({save,restore}_stack_nonlocal): Delete.
[official-gcc.git] / gcc / stmt.c
blob40698295f3a70d054daf5f6365e7c11d3aead4a0
1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 88, 89, 92, 93, 1994 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
21 /* This file handles the generation of rtl code from tree structure
22 above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
23 It also creates the rtl expressions for parameters and auto variables
24 and has full responsibility for allocating stack slots.
26 The functions whose names start with `expand_' are called by the
27 parser to generate RTL instructions for various kinds of constructs.
29 Some control and binding constructs require calling several such
30 functions at different times. For example, a simple if-then
31 is expanded by calling `expand_start_cond' (with the condition-expression
32 as argument) before parsing the then-clause and calling `expand_end_cond'
33 after parsing the then-clause. */
35 #include "config.h"
37 #include <stdio.h>
38 #include <ctype.h>
40 #include "rtl.h"
41 #include "tree.h"
42 #include "flags.h"
43 #include "function.h"
44 #include "insn-flags.h"
45 #include "insn-config.h"
46 #include "insn-codes.h"
47 #include "expr.h"
48 #include "hard-reg-set.h"
49 #include "obstack.h"
50 #include "loop.h"
51 #include "recog.h"
52 #include "machmode.h"
54 #include "bytecode.h"
55 #include "bc-typecd.h"
56 #include "bc-opcode.h"
57 #include "bc-optab.h"
58 #include "bc-emit.h"
60 #define obstack_chunk_alloc xmalloc
61 #define obstack_chunk_free free
62 struct obstack stmt_obstack;
64 /* Filename and line number of last line-number note,
65 whether we actually emitted it or not. */
66 char *emit_filename;
67 int emit_lineno;
69 /* Nonzero if within a ({...}) grouping, in which case we must
70 always compute a value for each expr-stmt in case it is the last one. */
72 int expr_stmts_for_value;
74 /* Each time we expand an expression-statement,
75 record the expr's type and its RTL value here. */
77 static tree last_expr_type;
78 static rtx last_expr_value;
80 /* Each time we expand the end of a binding contour (in `expand_end_bindings')
81 and we emit a new NOTE_INSN_BLOCK_END note, we save a pointer to it here.
82 This is used by the `remember_end_note' function to record the endpoint
83 of each generated block in its associated BLOCK node. */
85 static rtx last_block_end_note;
87 /* Number of binding contours started so far in this function. */
89 int block_start_count;
91 /* Nonzero if function being compiled needs to
92 return the address of where it has put a structure value. */
94 extern int current_function_returns_pcc_struct;
96 /* Label that will go on parm cleanup code, if any.
97 Jumping to this label runs cleanup code for parameters, if
98 such code must be run. Following this code is the logical return label. */
100 extern rtx cleanup_label;
102 /* Label that will go on function epilogue.
103 Jumping to this label serves as a "return" instruction
104 on machines which require execution of the epilogue on all returns. */
106 extern rtx return_label;
108 /* List (chain of EXPR_LISTs) of pseudo-regs of SAVE_EXPRs.
109 So we can mark them all live at the end of the function, if nonopt. */
110 extern rtx save_expr_regs;
112 /* Offset to end of allocated area of stack frame.
113 If stack grows down, this is the address of the last stack slot allocated.
114 If stack grows up, this is the address for the next slot. */
115 extern int frame_offset;
117 /* Label to jump back to for tail recursion, or 0 if we have
118 not yet needed one for this function. */
119 extern rtx tail_recursion_label;
121 /* Place after which to insert the tail_recursion_label if we need one. */
122 extern rtx tail_recursion_reentry;
124 /* Location at which to save the argument pointer if it will need to be
125 referenced. There are two cases where this is done: if nonlocal gotos
126 exist, or if vars whose is an offset from the argument pointer will be
127 needed by inner routines. */
129 extern rtx arg_pointer_save_area;
131 /* Chain of all RTL_EXPRs that have insns in them. */
132 extern tree rtl_expr_chain;
134 #if 0 /* Turned off because 0 seems to work just as well. */
135 /* Cleanup lists are required for binding levels regardless of whether
136 that binding level has cleanups or not. This node serves as the
137 cleanup list whenever an empty list is required. */
138 static tree empty_cleanup_list;
139 #endif
141 extern void (*interim_eh_hook) PROTO((tree));
143 /* Functions and data structures for expanding case statements. */
145 /* Case label structure, used to hold info on labels within case
146 statements. We handle "range" labels; for a single-value label
147 as in C, the high and low limits are the same.
149 A chain of case nodes is initially maintained via the RIGHT fields
150 in the nodes. Nodes with higher case values are later in the list.
152 Switch statements can be output in one of two forms. A branch table
153 is used if there are more than a few labels and the labels are dense
154 within the range between the smallest and largest case value. If a
155 branch table is used, no further manipulations are done with the case
156 node chain.
158 The alternative to the use of a branch table is to generate a series
159 of compare and jump insns. When that is done, we use the LEFT, RIGHT,
160 and PARENT fields to hold a binary tree. Initially the tree is
161 totally unbalanced, with everything on the right. We balance the tree
162 with nodes on the left having lower case values than the parent
163 and nodes on the right having higher values. We then output the tree
164 in order. */
166 struct case_node
168 struct case_node *left; /* Left son in binary tree */
169 struct case_node *right; /* Right son in binary tree; also node chain */
170 struct case_node *parent; /* Parent of node in binary tree */
171 tree low; /* Lowest index value for this label */
172 tree high; /* Highest index value for this label */
173 tree code_label; /* Label to jump to when node matches */
176 typedef struct case_node case_node;
177 typedef struct case_node *case_node_ptr;
179 /* These are used by estimate_case_costs and balance_case_nodes. */
181 /* This must be a signed type, and non-ANSI compilers lack signed char. */
182 static short *cost_table;
183 static int use_cost_table;
185 /* Stack of control and binding constructs we are currently inside.
187 These constructs begin when you call `expand_start_WHATEVER'
188 and end when you call `expand_end_WHATEVER'. This stack records
189 info about how the construct began that tells the end-function
190 what to do. It also may provide information about the construct
191 to alter the behavior of other constructs within the body.
192 For example, they may affect the behavior of C `break' and `continue'.
194 Each construct gets one `struct nesting' object.
195 All of these objects are chained through the `all' field.
196 `nesting_stack' points to the first object (innermost construct).
197 The position of an entry on `nesting_stack' is in its `depth' field.
199 Each type of construct has its own individual stack.
200 For example, loops have `loop_stack'. Each object points to the
201 next object of the same type through the `next' field.
203 Some constructs are visible to `break' exit-statements and others
204 are not. Which constructs are visible depends on the language.
205 Therefore, the data structure allows each construct to be visible
206 or not, according to the args given when the construct is started.
207 The construct is visible if the `exit_label' field is non-null.
208 In that case, the value should be a CODE_LABEL rtx. */
210 struct nesting
212 struct nesting *all;
213 struct nesting *next;
214 int depth;
215 rtx exit_label;
216 union
218 /* For conds (if-then and if-then-else statements). */
219 struct
221 /* Label for the end of the if construct.
222 There is none if EXITFLAG was not set
223 and no `else' has been seen yet. */
224 rtx endif_label;
225 /* Label for the end of this alternative.
226 This may be the end of the if or the next else/elseif. */
227 rtx next_label;
228 } cond;
229 /* For loops. */
230 struct
232 /* Label at the top of the loop; place to loop back to. */
233 rtx start_label;
234 /* Label at the end of the whole construct. */
235 rtx end_label;
236 /* Label before a jump that branches to the end of the whole
237 construct. This is where destructors go if any. */
238 rtx alt_end_label;
239 /* Label for `continue' statement to jump to;
240 this is in front of the stepper of the loop. */
241 rtx continue_label;
242 } loop;
243 /* For variable binding contours. */
244 struct
246 /* Sequence number of this binding contour within the function,
247 in order of entry. */
248 int block_start_count;
249 /* Nonzero => value to restore stack to on exit. Complemented by
250 bc_stack_level (see below) when generating bytecodes. */
251 rtx stack_level;
252 /* The NOTE that starts this contour.
253 Used by expand_goto to check whether the destination
254 is within each contour or not. */
255 rtx first_insn;
256 /* Innermost containing binding contour that has a stack level. */
257 struct nesting *innermost_stack_block;
258 /* List of cleanups to be run on exit from this contour.
259 This is a list of expressions to be evaluated.
260 The TREE_PURPOSE of each link is the ..._DECL node
261 which the cleanup pertains to. */
262 tree cleanups;
263 /* List of cleanup-lists of blocks containing this block,
264 as they were at the locus where this block appears.
265 There is an element for each containing block,
266 ordered innermost containing block first.
267 The tail of this list can be 0 (was empty_cleanup_list),
268 if all remaining elements would be empty lists.
269 The element's TREE_VALUE is the cleanup-list of that block,
270 which may be null. */
271 tree outer_cleanups;
272 /* Chain of labels defined inside this binding contour.
273 For contours that have stack levels or cleanups. */
274 struct label_chain *label_chain;
275 /* Number of function calls seen, as of start of this block. */
276 int function_call_count;
277 /* Bytecode specific: stack level to restore stack to on exit. */
278 int bc_stack_level;
279 } block;
280 /* For switch (C) or case (Pascal) statements,
281 and also for dummies (see `expand_start_case_dummy'). */
282 struct
284 /* The insn after which the case dispatch should finally
285 be emitted. Zero for a dummy. */
286 rtx start;
287 /* For bytecodes, the case table is in-lined right in the code.
288 A label is needed for skipping over this block. It is only
289 used when generating bytecodes. */
290 rtx skip_label;
291 /* A list of case labels, kept in ascending order by value
292 as the list is built.
293 During expand_end_case, this list may be rearranged into a
294 nearly balanced binary tree. */
295 struct case_node *case_list;
296 /* Label to jump to if no case matches. */
297 tree default_label;
298 /* The expression to be dispatched on. */
299 tree index_expr;
300 /* Type that INDEX_EXPR should be converted to. */
301 tree nominal_type;
302 /* Number of range exprs in case statement. */
303 int num_ranges;
304 /* Name of this kind of statement, for warnings. */
305 char *printname;
306 /* Nonzero if a case label has been seen in this case stmt. */
307 char seenlabel;
308 } case_stmt;
309 } data;
312 /* Chain of all pending binding contours. */
313 struct nesting *block_stack;
315 /* If any new stacks are added here, add them to POPSTACKS too. */
317 /* Chain of all pending binding contours that restore stack levels
318 or have cleanups. */
319 struct nesting *stack_block_stack;
321 /* Chain of all pending conditional statements. */
322 struct nesting *cond_stack;
324 /* Chain of all pending loops. */
325 struct nesting *loop_stack;
327 /* Chain of all pending case or switch statements. */
328 struct nesting *case_stack;
330 /* Separate chain including all of the above,
331 chained through the `all' field. */
332 struct nesting *nesting_stack;
334 /* Number of entries on nesting_stack now. */
335 int nesting_depth;
337 /* Allocate and return a new `struct nesting'. */
339 #define ALLOC_NESTING() \
340 (struct nesting *) obstack_alloc (&stmt_obstack, sizeof (struct nesting))
342 /* Pop the nesting stack element by element until we pop off
343 the element which is at the top of STACK.
344 Update all the other stacks, popping off elements from them
345 as we pop them from nesting_stack. */
347 #define POPSTACK(STACK) \
348 do { struct nesting *target = STACK; \
349 struct nesting *this; \
350 do { this = nesting_stack; \
351 if (loop_stack == this) \
352 loop_stack = loop_stack->next; \
353 if (cond_stack == this) \
354 cond_stack = cond_stack->next; \
355 if (block_stack == this) \
356 block_stack = block_stack->next; \
357 if (stack_block_stack == this) \
358 stack_block_stack = stack_block_stack->next; \
359 if (case_stack == this) \
360 case_stack = case_stack->next; \
361 nesting_depth = nesting_stack->depth - 1; \
362 nesting_stack = this->all; \
363 obstack_free (&stmt_obstack, this); } \
364 while (this != target); } while (0)
366 /* In some cases it is impossible to generate code for a forward goto
367 until the label definition is seen. This happens when it may be necessary
368 for the goto to reset the stack pointer: we don't yet know how to do that.
369 So expand_goto puts an entry on this fixup list.
370 Each time a binding contour that resets the stack is exited,
371 we check each fixup.
372 If the target label has now been defined, we can insert the proper code. */
374 struct goto_fixup
376 /* Points to following fixup. */
377 struct goto_fixup *next;
378 /* Points to the insn before the jump insn.
379 If more code must be inserted, it goes after this insn. */
380 rtx before_jump;
381 /* The LABEL_DECL that this jump is jumping to, or 0
382 for break, continue or return. */
383 tree target;
384 /* The BLOCK for the place where this goto was found. */
385 tree context;
386 /* The CODE_LABEL rtx that this is jumping to. */
387 rtx target_rtl;
388 /* Number of binding contours started in current function
389 before the label reference. */
390 int block_start_count;
391 /* The outermost stack level that should be restored for this jump.
392 Each time a binding contour that resets the stack is exited,
393 if the target label is *not* yet defined, this slot is updated. */
394 rtx stack_level;
395 /* List of lists of cleanup expressions to be run by this goto.
396 There is one element for each block that this goto is within.
397 The tail of this list can be 0 (was empty_cleanup_list),
398 if all remaining elements would be empty.
399 The TREE_VALUE contains the cleanup list of that block as of the
400 time this goto was seen.
401 The TREE_ADDRESSABLE flag is 1 for a block that has been exited. */
402 tree cleanup_list_list;
404 /* Bytecode specific members follow */
406 /* The label that this jump is jumping to, or 0 for break, continue
407 or return. */
408 struct bc_label *bc_target;
410 /* The label we use for the fixup patch */
411 struct bc_label *label;
413 /* True (non-0) if fixup has been handled */
414 int bc_handled:1;
416 /* Like stack_level above, except refers to the interpreter stack */
417 int bc_stack_level;
420 static struct goto_fixup *goto_fixup_chain;
422 /* Within any binding contour that must restore a stack level,
423 all labels are recorded with a chain of these structures. */
425 struct label_chain
427 /* Points to following fixup. */
428 struct label_chain *next;
429 tree label;
431 static void expand_goto_internal PROTO((tree, rtx, rtx));
432 static void bc_expand_goto_internal PROTO((enum bytecode_opcode,
433 struct bc_label *, tree));
434 static int expand_fixup PROTO((tree, rtx, rtx));
435 static void bc_expand_fixup PROTO((enum bytecode_opcode,
436 struct bc_label *, int));
437 static void fixup_gotos PROTO((struct nesting *, rtx, tree,
438 rtx, int));
439 static void bc_fixup_gotos PROTO((struct nesting *, int, tree,
440 rtx, int));
441 static int warn_if_unused_value PROTO((tree));
442 static void bc_expand_start_cond PROTO((tree, int));
443 static void bc_expand_end_cond PROTO((void));
444 static void bc_expand_start_else PROTO((void));
445 static void bc_expand_end_loop PROTO((void));
446 static void bc_expand_end_bindings PROTO((tree, int, int));
447 static void bc_expand_decl PROTO((tree, tree));
448 static void bc_expand_variable_local_init PROTO((tree));
449 static void bc_expand_decl_init PROTO((tree));
450 static void expand_null_return_1 PROTO((rtx, int));
451 static int tail_recursion_args PROTO((tree, tree));
452 static void expand_cleanups PROTO((tree, tree));
453 static void bc_expand_start_case PROTO((struct nesting *, tree,
454 tree, char *));
455 static int bc_pushcase PROTO((tree, tree));
456 static void bc_check_for_full_enumeration_handling PROTO((tree));
457 static void bc_expand_end_case PROTO((tree));
458 static void do_jump_if_equal PROTO((rtx, rtx, rtx, int));
459 static int estimate_case_costs PROTO((case_node_ptr));
460 static void group_case_nodes PROTO((case_node_ptr));
461 static void balance_case_nodes PROTO((case_node_ptr *,
462 case_node_ptr));
463 static int node_has_low_bound PROTO((case_node_ptr, tree));
464 static int node_has_high_bound PROTO((case_node_ptr, tree));
465 static int node_is_bounded PROTO((case_node_ptr, tree));
466 static void emit_jump_if_reachable PROTO((rtx));
467 static void emit_case_nodes PROTO((rtx, case_node_ptr, rtx, tree));
469 int bc_expand_exit_loop_if_false ();
470 void bc_expand_start_cond ();
471 void bc_expand_end_cond ();
472 void bc_expand_start_else ();
473 void bc_expand_end_bindings ();
474 void bc_expand_start_case ();
475 void bc_check_for_full_enumeration_handling ();
476 void bc_expand_end_case ();
477 void bc_expand_decl ();
479 extern rtx bc_allocate_local ();
480 extern rtx bc_allocate_variable_array ();
482 void
483 init_stmt ()
485 gcc_obstack_init (&stmt_obstack);
486 #if 0
487 empty_cleanup_list = build_tree_list (NULL_TREE, NULL_TREE);
488 #endif
491 void
492 init_stmt_for_function ()
494 /* We are not currently within any block, conditional, loop or case. */
495 block_stack = 0;
496 stack_block_stack = 0;
497 loop_stack = 0;
498 case_stack = 0;
499 cond_stack = 0;
500 nesting_stack = 0;
501 nesting_depth = 0;
503 block_start_count = 0;
505 /* No gotos have been expanded yet. */
506 goto_fixup_chain = 0;
508 /* We are not processing a ({...}) grouping. */
509 expr_stmts_for_value = 0;
510 last_expr_type = 0;
513 void
514 save_stmt_status (p)
515 struct function *p;
517 p->block_stack = block_stack;
518 p->stack_block_stack = stack_block_stack;
519 p->cond_stack = cond_stack;
520 p->loop_stack = loop_stack;
521 p->case_stack = case_stack;
522 p->nesting_stack = nesting_stack;
523 p->nesting_depth = nesting_depth;
524 p->block_start_count = block_start_count;
525 p->last_expr_type = last_expr_type;
526 p->last_expr_value = last_expr_value;
527 p->expr_stmts_for_value = expr_stmts_for_value;
528 p->emit_filename = emit_filename;
529 p->emit_lineno = emit_lineno;
530 p->goto_fixup_chain = goto_fixup_chain;
533 void
534 restore_stmt_status (p)
535 struct function *p;
537 block_stack = p->block_stack;
538 stack_block_stack = p->stack_block_stack;
539 cond_stack = p->cond_stack;
540 loop_stack = p->loop_stack;
541 case_stack = p->case_stack;
542 nesting_stack = p->nesting_stack;
543 nesting_depth = p->nesting_depth;
544 block_start_count = p->block_start_count;
545 last_expr_type = p->last_expr_type;
546 last_expr_value = p->last_expr_value;
547 expr_stmts_for_value = p->expr_stmts_for_value;
548 emit_filename = p->emit_filename;
549 emit_lineno = p->emit_lineno;
550 goto_fixup_chain = p->goto_fixup_chain;
553 /* Emit a no-op instruction. */
555 void
556 emit_nop ()
558 rtx last_insn;
560 if (!output_bytecode)
562 last_insn = get_last_insn ();
563 if (!optimize
564 && (GET_CODE (last_insn) == CODE_LABEL
565 || prev_real_insn (last_insn) == 0))
566 emit_insn (gen_nop ());
570 /* Return the rtx-label that corresponds to a LABEL_DECL,
571 creating it if necessary. */
574 label_rtx (label)
575 tree label;
577 if (TREE_CODE (label) != LABEL_DECL)
578 abort ();
580 if (DECL_RTL (label))
581 return DECL_RTL (label);
583 return DECL_RTL (label) = gen_label_rtx ();
586 /* Add an unconditional jump to LABEL as the next sequential instruction. */
588 void
589 emit_jump (label)
590 rtx label;
592 do_pending_stack_adjust ();
593 emit_jump_insn (gen_jump (label));
594 emit_barrier ();
597 /* Emit code to jump to the address
598 specified by the pointer expression EXP. */
600 void
601 expand_computed_goto (exp)
602 tree exp;
604 if (output_bytecode)
606 bc_expand_expr (exp);
607 bc_emit_instruction (jumpP);
609 else
611 rtx x = expand_expr (exp, NULL_RTX, VOIDmode, 0);
612 emit_queue ();
613 emit_indirect_jump (x);
617 /* Handle goto statements and the labels that they can go to. */
619 /* Specify the location in the RTL code of a label LABEL,
620 which is a LABEL_DECL tree node.
622 This is used for the kind of label that the user can jump to with a
623 goto statement, and for alternatives of a switch or case statement.
624 RTL labels generated for loops and conditionals don't go through here;
625 they are generated directly at the RTL level, by other functions below.
627 Note that this has nothing to do with defining label *names*.
628 Languages vary in how they do that and what that even means. */
630 void
631 expand_label (label)
632 tree label;
634 struct label_chain *p;
636 if (output_bytecode)
638 if (! DECL_RTL (label))
639 DECL_RTL (label) = bc_gen_rtx ((char *) 0, 0, bc_get_bytecode_label ());
640 if (! bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (DECL_RTL (label))))
641 error ("multiply defined label");
642 return;
645 do_pending_stack_adjust ();
646 emit_label (label_rtx (label));
647 if (DECL_NAME (label))
648 LABEL_NAME (DECL_RTL (label)) = IDENTIFIER_POINTER (DECL_NAME (label));
650 if (stack_block_stack != 0)
652 p = (struct label_chain *) oballoc (sizeof (struct label_chain));
653 p->next = stack_block_stack->data.block.label_chain;
654 stack_block_stack->data.block.label_chain = p;
655 p->label = label;
659 /* Declare that LABEL (a LABEL_DECL) may be used for nonlocal gotos
660 from nested functions. */
662 void
663 declare_nonlocal_label (label)
664 tree label;
666 nonlocal_labels = tree_cons (NULL_TREE, label, nonlocal_labels);
667 LABEL_PRESERVE_P (label_rtx (label)) = 1;
668 if (nonlocal_goto_handler_slot == 0)
670 nonlocal_goto_handler_slot
671 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
672 emit_stack_save (SAVE_NONLOCAL,
673 &nonlocal_goto_stack_level,
674 PREV_INSN (tail_recursion_reentry));
678 /* Generate RTL code for a `goto' statement with target label LABEL.
679 LABEL should be a LABEL_DECL tree node that was or will later be
680 defined with `expand_label'. */
682 void
683 expand_goto (label)
684 tree label;
686 tree context;
688 if (output_bytecode)
690 expand_goto_internal (label, label_rtx (label), NULL_RTX);
691 return;
694 /* Check for a nonlocal goto to a containing function. */
695 context = decl_function_context (label);
696 if (context != 0 && context != current_function_decl)
698 struct function *p = find_function_data (context);
699 rtx label_ref = gen_rtx (LABEL_REF, Pmode, label_rtx (label));
700 rtx temp;
702 p->has_nonlocal_label = 1;
703 current_function_has_nonlocal_goto = 1;
704 LABEL_REF_NONLOCAL_P (label_ref) = 1;
706 /* Copy the rtl for the slots so that they won't be shared in
707 case the virtual stack vars register gets instantiated differently
708 in the parent than in the child. */
710 #if HAVE_nonlocal_goto
711 if (HAVE_nonlocal_goto)
712 emit_insn (gen_nonlocal_goto (lookup_static_chain (label),
713 copy_rtx (p->nonlocal_goto_handler_slot),
714 copy_rtx (p->nonlocal_goto_stack_level),
715 label_ref));
716 else
717 #endif
719 rtx addr;
721 /* Restore frame pointer for containing function.
722 This sets the actual hard register used for the frame pointer
723 to the location of the function's incoming static chain info.
724 The non-local goto handler will then adjust it to contain the
725 proper value and reload the argument pointer, if needed. */
726 emit_move_insn (hard_frame_pointer_rtx, lookup_static_chain (label));
728 /* We have now loaded the frame pointer hardware register with
729 the address of that corresponds to the start of the virtual
730 stack vars. So replace virtual_stack_vars_rtx in all
731 addresses we use with stack_pointer_rtx. */
733 /* Get addr of containing function's current nonlocal goto handler,
734 which will do any cleanups and then jump to the label. */
735 addr = copy_rtx (p->nonlocal_goto_handler_slot);
736 temp = copy_to_reg (replace_rtx (addr, virtual_stack_vars_rtx,
737 hard_frame_pointer_rtx));
739 /* Restore the stack pointer. Note this uses fp just restored. */
740 addr = p->nonlocal_goto_stack_level;
741 if (addr)
742 addr = replace_rtx (copy_rtx (addr),
743 virtual_stack_vars_rtx,
744 hard_frame_pointer_rtx);
746 emit_stack_restore (SAVE_NONLOCAL, addr, NULL_RTX);
748 /* Put in the static chain register the nonlocal label address. */
749 emit_move_insn (static_chain_rtx, label_ref);
750 /* USE of hard_frame_pointer_rtx added for consistency; not clear if
751 really needed. */
752 emit_insn (gen_rtx (USE, VOIDmode, hard_frame_pointer_rtx));
753 emit_insn (gen_rtx (USE, VOIDmode, stack_pointer_rtx));
754 emit_insn (gen_rtx (USE, VOIDmode, static_chain_rtx));
755 emit_indirect_jump (temp);
758 else
759 expand_goto_internal (label, label_rtx (label), NULL_RTX);
762 /* Generate RTL code for a `goto' statement with target label BODY.
763 LABEL should be a LABEL_REF.
764 LAST_INSN, if non-0, is the rtx we should consider as the last
765 insn emitted (for the purposes of cleaning up a return). */
767 static void
768 expand_goto_internal (body, label, last_insn)
769 tree body;
770 rtx label;
771 rtx last_insn;
773 struct nesting *block;
774 rtx stack_level = 0;
776 /* NOTICE! If a bytecode instruction other than `jump' is needed,
777 then the caller has to call bc_expand_goto_internal()
778 directly. This is rather an exceptional case, and there aren't
779 that many places where this is necessary. */
780 if (output_bytecode)
782 expand_goto_internal (body, label, last_insn);
783 return;
786 if (GET_CODE (label) != CODE_LABEL)
787 abort ();
789 /* If label has already been defined, we can tell now
790 whether and how we must alter the stack level. */
792 if (PREV_INSN (label) != 0)
794 /* Find the innermost pending block that contains the label.
795 (Check containment by comparing insn-uids.)
796 Then restore the outermost stack level within that block,
797 and do cleanups of all blocks contained in it. */
798 for (block = block_stack; block; block = block->next)
800 if (INSN_UID (block->data.block.first_insn) < INSN_UID (label))
801 break;
802 if (block->data.block.stack_level != 0)
803 stack_level = block->data.block.stack_level;
804 /* Execute the cleanups for blocks we are exiting. */
805 if (block->data.block.cleanups != 0)
807 expand_cleanups (block->data.block.cleanups, NULL_TREE);
808 do_pending_stack_adjust ();
812 if (stack_level)
814 /* Ensure stack adjust isn't done by emit_jump, as this would clobber
815 the stack pointer. This one should be deleted as dead by flow. */
816 clear_pending_stack_adjust ();
817 do_pending_stack_adjust ();
818 emit_stack_restore (SAVE_BLOCK, stack_level, NULL_RTX);
821 if (body != 0 && DECL_TOO_LATE (body))
822 error ("jump to `%s' invalidly jumps into binding contour",
823 IDENTIFIER_POINTER (DECL_NAME (body)));
825 /* Label not yet defined: may need to put this goto
826 on the fixup list. */
827 else if (! expand_fixup (body, label, last_insn))
829 /* No fixup needed. Record that the label is the target
830 of at least one goto that has no fixup. */
831 if (body != 0)
832 TREE_ADDRESSABLE (body) = 1;
835 emit_jump (label);
838 /* Generate a jump with OPCODE to the given bytecode LABEL which is
839 found within BODY. */
841 static void
842 bc_expand_goto_internal (opcode, label, body)
843 enum bytecode_opcode opcode;
844 struct bc_label *label;
845 tree body;
847 struct nesting *block;
848 int stack_level = -1;
850 /* If the label is defined, adjust the stack as necessary.
851 If it's not defined, we have to push the reference on the
852 fixup list. */
854 if (label->defined)
857 /* Find the innermost pending block that contains the label.
858 (Check containment by comparing bytecode uids.) Then restore the
859 outermost stack level within that block. */
861 for (block = block_stack; block; block = block->next)
863 if (BYTECODE_BC_LABEL (block->data.block.first_insn)->uid < label->uid)
864 break;
865 if (block->data.block.bc_stack_level)
866 stack_level = block->data.block.bc_stack_level;
868 /* Execute the cleanups for blocks we are exiting. */
869 if (block->data.block.cleanups != 0)
871 expand_cleanups (block->data.block.cleanups, NULL_TREE);
872 do_pending_stack_adjust ();
876 /* Restore the stack level. If we need to adjust the stack, we
877 must do so after the jump, since the jump may depend on
878 what's on the stack. Thus, any stack-modifying conditional
879 jumps (these are the only ones that rely on what's on the
880 stack) go into the fixup list. */
882 if (stack_level >= 0
883 && stack_depth != stack_level
884 && opcode != jump)
886 bc_expand_fixup (opcode, label, stack_level);
887 else
889 if (stack_level >= 0)
890 bc_adjust_stack (stack_depth - stack_level);
892 if (body && DECL_BIT_FIELD (body))
893 error ("jump to `%s' invalidly jumps into binding contour",
894 IDENTIFIER_POINTER (DECL_NAME (body)));
896 /* Emit immediate jump */
897 bc_emit_bytecode (opcode);
898 bc_emit_bytecode_labelref (label);
900 #ifdef DEBUG_PRINT_CODE
901 fputc ('\n', stderr);
902 #endif
905 else
906 /* Put goto in the fixup list */
907 bc_expand_fixup (opcode, label, stack_level);
910 /* Generate if necessary a fixup for a goto
911 whose target label in tree structure (if any) is TREE_LABEL
912 and whose target in rtl is RTL_LABEL.
914 If LAST_INSN is nonzero, we pretend that the jump appears
915 after insn LAST_INSN instead of at the current point in the insn stream.
917 The fixup will be used later to insert insns just before the goto.
918 Those insns will restore the stack level as appropriate for the
919 target label, and will (in the case of C++) also invoke any object
920 destructors which have to be invoked when we exit the scopes which
921 are exited by the goto.
923 Value is nonzero if a fixup is made. */
925 static int
926 expand_fixup (tree_label, rtl_label, last_insn)
927 tree tree_label;
928 rtx rtl_label;
929 rtx last_insn;
931 struct nesting *block, *end_block;
933 /* See if we can recognize which block the label will be output in.
934 This is possible in some very common cases.
935 If we succeed, set END_BLOCK to that block.
936 Otherwise, set it to 0. */
938 if (cond_stack
939 && (rtl_label == cond_stack->data.cond.endif_label
940 || rtl_label == cond_stack->data.cond.next_label))
941 end_block = cond_stack;
942 /* If we are in a loop, recognize certain labels which
943 are likely targets. This reduces the number of fixups
944 we need to create. */
945 else if (loop_stack
946 && (rtl_label == loop_stack->data.loop.start_label
947 || rtl_label == loop_stack->data.loop.end_label
948 || rtl_label == loop_stack->data.loop.continue_label))
949 end_block = loop_stack;
950 else
951 end_block = 0;
953 /* Now set END_BLOCK to the binding level to which we will return. */
955 if (end_block)
957 struct nesting *next_block = end_block->all;
958 block = block_stack;
960 /* First see if the END_BLOCK is inside the innermost binding level.
961 If so, then no cleanups or stack levels are relevant. */
962 while (next_block && next_block != block)
963 next_block = next_block->all;
965 if (next_block)
966 return 0;
968 /* Otherwise, set END_BLOCK to the innermost binding level
969 which is outside the relevant control-structure nesting. */
970 next_block = block_stack->next;
971 for (block = block_stack; block != end_block; block = block->all)
972 if (block == next_block)
973 next_block = next_block->next;
974 end_block = next_block;
977 /* Does any containing block have a stack level or cleanups?
978 If not, no fixup is needed, and that is the normal case
979 (the only case, for standard C). */
980 for (block = block_stack; block != end_block; block = block->next)
981 if (block->data.block.stack_level != 0
982 || block->data.block.cleanups != 0)
983 break;
985 if (block != end_block)
987 /* Ok, a fixup is needed. Add a fixup to the list of such. */
988 struct goto_fixup *fixup
989 = (struct goto_fixup *) oballoc (sizeof (struct goto_fixup));
990 /* In case an old stack level is restored, make sure that comes
991 after any pending stack adjust. */
992 /* ?? If the fixup isn't to come at the present position,
993 doing the stack adjust here isn't useful. Doing it with our
994 settings at that location isn't useful either. Let's hope
995 someone does it! */
996 if (last_insn == 0)
997 do_pending_stack_adjust ();
998 fixup->target = tree_label;
999 fixup->target_rtl = rtl_label;
1001 /* Create a BLOCK node and a corresponding matched set of
1002 NOTE_INSN_BEGIN_BLOCK and NOTE_INSN_END_BLOCK notes at
1003 this point. The notes will encapsulate any and all fixup
1004 code which we might later insert at this point in the insn
1005 stream. Also, the BLOCK node will be the parent (i.e. the
1006 `SUPERBLOCK') of any other BLOCK nodes which we might create
1007 later on when we are expanding the fixup code. */
1010 register rtx original_before_jump
1011 = last_insn ? last_insn : get_last_insn ();
1013 start_sequence ();
1014 pushlevel (0);
1015 fixup->before_jump = emit_note (NULL_PTR, NOTE_INSN_BLOCK_BEG);
1016 last_block_end_note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_END);
1017 fixup->context = poplevel (1, 0, 0); /* Create the BLOCK node now! */
1018 end_sequence ();
1019 emit_insns_after (fixup->before_jump, original_before_jump);
1022 fixup->block_start_count = block_start_count;
1023 fixup->stack_level = 0;
1024 fixup->cleanup_list_list
1025 = (((block->data.block.outer_cleanups
1026 #if 0
1027 && block->data.block.outer_cleanups != empty_cleanup_list
1028 #endif
1030 || block->data.block.cleanups)
1031 ? tree_cons (NULL_TREE, block->data.block.cleanups,
1032 block->data.block.outer_cleanups)
1033 : 0);
1034 fixup->next = goto_fixup_chain;
1035 goto_fixup_chain = fixup;
1038 return block != 0;
1042 /* Generate bytecode jump with OPCODE to a fixup routine that links to LABEL.
1043 Make the fixup restore the stack level to STACK_LEVEL. */
1045 static void
1046 bc_expand_fixup (opcode, label, stack_level)
1047 enum bytecode_opcode opcode;
1048 struct bc_label *label;
1049 int stack_level;
1051 struct goto_fixup *fixup
1052 = (struct goto_fixup *) oballoc (sizeof (struct goto_fixup));
1054 fixup->label = bc_get_bytecode_label ();
1055 fixup->bc_target = label;
1056 fixup->bc_stack_level = stack_level;
1057 fixup->bc_handled = FALSE;
1059 fixup->next = goto_fixup_chain;
1060 goto_fixup_chain = fixup;
1062 /* Insert a jump to the fixup code */
1063 bc_emit_bytecode (opcode);
1064 bc_emit_bytecode_labelref (fixup->label);
1066 #ifdef DEBUG_PRINT_CODE
1067 fputc ('\n', stderr);
1068 #endif
1071 /* Expand any needed fixups in the outputmost binding level of the
1072 function. FIRST_INSN is the first insn in the function. */
1074 void
1075 expand_fixups (first_insn)
1076 rtx first_insn;
1078 fixup_gotos (NULL_PTR, NULL_RTX, NULL_TREE, first_insn, 0);
1081 /* When exiting a binding contour, process all pending gotos requiring fixups.
1082 THISBLOCK is the structure that describes the block being exited.
1083 STACK_LEVEL is the rtx for the stack level to restore exiting this contour.
1084 CLEANUP_LIST is a list of expressions to evaluate on exiting this contour.
1085 FIRST_INSN is the insn that began this contour.
1087 Gotos that jump out of this contour must restore the
1088 stack level and do the cleanups before actually jumping.
1090 DONT_JUMP_IN nonzero means report error there is a jump into this
1091 contour from before the beginning of the contour.
1092 This is also done if STACK_LEVEL is nonzero. */
1094 static void
1095 fixup_gotos (thisblock, stack_level, cleanup_list, first_insn, dont_jump_in)
1096 struct nesting *thisblock;
1097 rtx stack_level;
1098 tree cleanup_list;
1099 rtx first_insn;
1100 int dont_jump_in;
1102 register struct goto_fixup *f, *prev;
1104 if (output_bytecode)
1106 /* ??? The second arg is the bc stack level, which is not the same
1107 as STACK_LEVEL. I have no idea what should go here, so I'll
1108 just pass 0. */
1109 bc_fixup_gotos (thisblock, 0, cleanup_list, first_insn, dont_jump_in);
1110 return;
1113 /* F is the fixup we are considering; PREV is the previous one. */
1114 /* We run this loop in two passes so that cleanups of exited blocks
1115 are run first, and blocks that are exited are marked so
1116 afterwards. */
1118 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1120 /* Test for a fixup that is inactive because it is already handled. */
1121 if (f->before_jump == 0)
1123 /* Delete inactive fixup from the chain, if that is easy to do. */
1124 if (prev != 0)
1125 prev->next = f->next;
1127 /* Has this fixup's target label been defined?
1128 If so, we can finalize it. */
1129 else if (PREV_INSN (f->target_rtl) != 0)
1131 register rtx cleanup_insns;
1133 /* Get the first non-label after the label
1134 this goto jumps to. If that's before this scope begins,
1135 we don't have a jump into the scope. */
1136 rtx after_label = f->target_rtl;
1137 while (after_label != 0 && GET_CODE (after_label) == CODE_LABEL)
1138 after_label = NEXT_INSN (after_label);
1140 /* If this fixup jumped into this contour from before the beginning
1141 of this contour, report an error. */
1142 /* ??? Bug: this does not detect jumping in through intermediate
1143 blocks that have stack levels or cleanups.
1144 It detects only a problem with the innermost block
1145 around the label. */
1146 if (f->target != 0
1147 && (dont_jump_in || stack_level || cleanup_list)
1148 /* If AFTER_LABEL is 0, it means the jump goes to the end
1149 of the rtl, which means it jumps into this scope. */
1150 && (after_label == 0
1151 || INSN_UID (first_insn) < INSN_UID (after_label))
1152 && INSN_UID (first_insn) > INSN_UID (f->before_jump)
1153 && ! DECL_REGISTER (f->target))
1155 error_with_decl (f->target,
1156 "label `%s' used before containing binding contour");
1157 /* Prevent multiple errors for one label. */
1158 DECL_REGISTER (f->target) = 1;
1161 /* We will expand the cleanups into a sequence of their own and
1162 then later on we will attach this new sequence to the insn
1163 stream just ahead of the actual jump insn. */
1165 start_sequence ();
1167 /* Temporarily restore the lexical context where we will
1168 logically be inserting the fixup code. We do this for the
1169 sake of getting the debugging information right. */
1171 pushlevel (0);
1172 set_block (f->context);
1174 /* Expand the cleanups for blocks this jump exits. */
1175 if (f->cleanup_list_list)
1177 tree lists;
1178 for (lists = f->cleanup_list_list; lists; lists = TREE_CHAIN (lists))
1179 /* Marked elements correspond to blocks that have been closed.
1180 Do their cleanups. */
1181 if (TREE_ADDRESSABLE (lists)
1182 && TREE_VALUE (lists) != 0)
1184 expand_cleanups (TREE_VALUE (lists), 0);
1185 /* Pop any pushes done in the cleanups,
1186 in case function is about to return. */
1187 do_pending_stack_adjust ();
1191 /* Restore stack level for the biggest contour that this
1192 jump jumps out of. */
1193 if (f->stack_level)
1194 emit_stack_restore (SAVE_BLOCK, f->stack_level, f->before_jump);
1196 /* Finish up the sequence containing the insns which implement the
1197 necessary cleanups, and then attach that whole sequence to the
1198 insn stream just ahead of the actual jump insn. Attaching it
1199 at that point insures that any cleanups which are in fact
1200 implicit C++ object destructions (which must be executed upon
1201 leaving the block) appear (to the debugger) to be taking place
1202 in an area of the generated code where the object(s) being
1203 destructed are still "in scope". */
1205 cleanup_insns = get_insns ();
1206 poplevel (1, 0, 0);
1208 end_sequence ();
1209 emit_insns_after (cleanup_insns, f->before_jump);
1212 f->before_jump = 0;
1216 /* Mark the cleanups of exited blocks so that they are executed
1217 by the code above. */
1218 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1219 if (f->before_jump != 0
1220 && PREV_INSN (f->target_rtl) == 0
1221 /* Label has still not appeared. If we are exiting a block with
1222 a stack level to restore, that started before the fixup,
1223 mark this stack level as needing restoration
1224 when the fixup is later finalized.
1225 Also mark the cleanup_list_list element for F
1226 that corresponds to this block, so that ultimately
1227 this block's cleanups will be executed by the code above. */
1228 && thisblock != 0
1229 /* Note: if THISBLOCK == 0 and we have a label that hasn't appeared,
1230 it means the label is undefined. That's erroneous, but possible. */
1231 && (thisblock->data.block.block_start_count
1232 <= f->block_start_count))
1234 tree lists = f->cleanup_list_list;
1235 for (; lists; lists = TREE_CHAIN (lists))
1236 /* If the following elt. corresponds to our containing block
1237 then the elt. must be for this block. */
1238 if (TREE_CHAIN (lists) == thisblock->data.block.outer_cleanups)
1239 TREE_ADDRESSABLE (lists) = 1;
1241 if (stack_level)
1242 f->stack_level = stack_level;
1247 /* When exiting a binding contour, process all pending gotos requiring fixups.
1248 Note: STACK_DEPTH is not altered.
1250 The arguments are currently not used in the bytecode compiler, but we may
1251 need them one day for languages other than C.
1253 THISBLOCK is the structure that describes the block being exited.
1254 STACK_LEVEL is the rtx for the stack level to restore exiting this contour.
1255 CLEANUP_LIST is a list of expressions to evaluate on exiting this contour.
1256 FIRST_INSN is the insn that began this contour.
1258 Gotos that jump out of this contour must restore the
1259 stack level and do the cleanups before actually jumping.
1261 DONT_JUMP_IN nonzero means report error there is a jump into this
1262 contour from before the beginning of the contour.
1263 This is also done if STACK_LEVEL is nonzero. */
1265 static void
1266 bc_fixup_gotos (thisblock, stack_level, cleanup_list, first_insn, dont_jump_in)
1267 struct nesting *thisblock;
1268 int stack_level;
1269 tree cleanup_list;
1270 rtx first_insn;
1271 int dont_jump_in;
1273 register struct goto_fixup *f, *prev;
1274 int saved_stack_depth;
1276 /* F is the fixup we are considering; PREV is the previous one. */
1278 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1280 /* Test for a fixup that is inactive because it is already handled. */
1281 if (f->before_jump == 0)
1283 /* Delete inactive fixup from the chain, if that is easy to do. */
1284 if (prev)
1285 prev->next = f->next;
1288 /* Emit code to restore the stack and continue */
1289 bc_emit_bytecode_labeldef (f->label);
1291 /* Save stack_depth across call, since bc_adjust_stack () will alter
1292 the perceived stack depth via the instructions generated. */
1294 if (f->bc_stack_level >= 0)
1296 saved_stack_depth = stack_depth;
1297 bc_adjust_stack (stack_depth - f->bc_stack_level);
1298 stack_depth = saved_stack_depth;
1301 bc_emit_bytecode (jump);
1302 bc_emit_bytecode_labelref (f->bc_target);
1304 #ifdef DEBUG_PRINT_CODE
1305 fputc ('\n', stderr);
1306 #endif
1309 goto_fixup_chain = NULL;
1312 /* Generate RTL for an asm statement (explicit assembler code).
1313 BODY is a STRING_CST node containing the assembler code text,
1314 or an ADDR_EXPR containing a STRING_CST. */
1316 void
1317 expand_asm (body)
1318 tree body;
1320 if (output_bytecode)
1322 error ("`asm' is illegal when generating bytecode");
1323 return;
1326 if (TREE_CODE (body) == ADDR_EXPR)
1327 body = TREE_OPERAND (body, 0);
1329 emit_insn (gen_rtx (ASM_INPUT, VOIDmode,
1330 TREE_STRING_POINTER (body)));
1331 last_expr_type = 0;
1334 /* Generate RTL for an asm statement with arguments.
1335 STRING is the instruction template.
1336 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
1337 Each output or input has an expression in the TREE_VALUE and
1338 a constraint-string in the TREE_PURPOSE.
1339 CLOBBERS is a list of STRING_CST nodes each naming a hard register
1340 that is clobbered by this insn.
1342 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
1343 Some elements of OUTPUTS may be replaced with trees representing temporary
1344 values. The caller should copy those temporary values to the originally
1345 specified lvalues.
1347 VOL nonzero means the insn is volatile; don't optimize it. */
1349 void
1350 expand_asm_operands (string, outputs, inputs, clobbers, vol, filename, line)
1351 tree string, outputs, inputs, clobbers;
1352 int vol;
1353 char *filename;
1354 int line;
1356 rtvec argvec, constraints;
1357 rtx body;
1358 int ninputs = list_length (inputs);
1359 int noutputs = list_length (outputs);
1360 int nclobbers;
1361 tree tail;
1362 register int i;
1363 /* Vector of RTX's of evaluated output operands. */
1364 rtx *output_rtx = (rtx *) alloca (noutputs * sizeof (rtx));
1365 /* The insn we have emitted. */
1366 rtx insn;
1368 if (output_bytecode)
1370 error ("`asm' is illegal when generating bytecode");
1371 return;
1374 /* Count the number of meaningful clobbered registers, ignoring what
1375 we would ignore later. */
1376 nclobbers = 0;
1377 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1379 char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1380 i = decode_reg_name (regname);
1381 if (i >= 0 || i == -4)
1382 ++nclobbers;
1385 last_expr_type = 0;
1387 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1389 tree val = TREE_VALUE (tail);
1390 tree val1;
1391 int j;
1392 int found_equal;
1394 /* If there's an erroneous arg, emit no insn. */
1395 if (TREE_TYPE (val) == error_mark_node)
1396 return;
1398 /* Make sure constraint has `=' and does not have `+'. */
1400 found_equal = 0;
1401 for (j = 0; j < TREE_STRING_LENGTH (TREE_PURPOSE (tail)); j++)
1403 if (TREE_STRING_POINTER (TREE_PURPOSE (tail))[j] == '+')
1405 error ("output operand constraint contains `+'");
1406 return;
1408 if (TREE_STRING_POINTER (TREE_PURPOSE (tail))[j] == '=')
1409 found_equal = 1;
1411 if (! found_equal)
1413 error ("output operand constraint lacks `='");
1414 return;
1417 /* If an output operand is not a variable or indirect ref,
1418 or a part of one,
1419 create a SAVE_EXPR which is a pseudo-reg
1420 to act as an intermediate temporary.
1421 Make the asm insn write into that, then copy it to
1422 the real output operand. */
1424 while (TREE_CODE (val) == COMPONENT_REF
1425 || TREE_CODE (val) == ARRAY_REF)
1426 val = TREE_OPERAND (val, 0);
1428 if (TREE_CODE (val) != VAR_DECL
1429 && TREE_CODE (val) != PARM_DECL
1430 && TREE_CODE (val) != INDIRECT_REF)
1432 TREE_VALUE (tail) = save_expr (TREE_VALUE (tail));
1433 /* If it's a constant, print error now so don't crash later. */
1434 if (TREE_CODE (TREE_VALUE (tail)) != SAVE_EXPR)
1436 error ("invalid output in `asm'");
1437 return;
1441 output_rtx[i] = expand_expr (TREE_VALUE (tail), NULL_RTX, VOIDmode, 0);
1444 if (ninputs + noutputs > MAX_RECOG_OPERANDS)
1446 error ("more than %d operands in `asm'", MAX_RECOG_OPERANDS);
1447 return;
1450 /* Make vectors for the expression-rtx and constraint strings. */
1452 argvec = rtvec_alloc (ninputs);
1453 constraints = rtvec_alloc (ninputs);
1455 body = gen_rtx (ASM_OPERANDS, VOIDmode,
1456 TREE_STRING_POINTER (string), "", 0, argvec, constraints,
1457 filename, line);
1458 MEM_VOLATILE_P (body) = vol;
1460 /* Eval the inputs and put them into ARGVEC.
1461 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
1463 i = 0;
1464 for (tail = inputs; tail; tail = TREE_CHAIN (tail))
1466 int j;
1468 /* If there's an erroneous arg, emit no insn,
1469 because the ASM_INPUT would get VOIDmode
1470 and that could cause a crash in reload. */
1471 if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
1472 return;
1473 if (TREE_PURPOSE (tail) == NULL_TREE)
1475 error ("hard register `%s' listed as input operand to `asm'",
1476 TREE_STRING_POINTER (TREE_VALUE (tail)) );
1477 return;
1480 /* Make sure constraint has neither `=' nor `+'. */
1482 for (j = 0; j < TREE_STRING_LENGTH (TREE_PURPOSE (tail)); j++)
1483 if (TREE_STRING_POINTER (TREE_PURPOSE (tail))[j] == '='
1484 || TREE_STRING_POINTER (TREE_PURPOSE (tail))[j] == '+')
1486 error ("input operand constraint contains `%c'",
1487 TREE_STRING_POINTER (TREE_PURPOSE (tail))[j]);
1488 return;
1491 XVECEXP (body, 3, i) /* argvec */
1492 = expand_expr (TREE_VALUE (tail), NULL_RTX, VOIDmode, 0);
1493 XVECEXP (body, 4, i) /* constraints */
1494 = gen_rtx (ASM_INPUT, TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))),
1495 TREE_STRING_POINTER (TREE_PURPOSE (tail)));
1496 i++;
1499 /* Protect all the operands from the queue,
1500 now that they have all been evaluated. */
1502 for (i = 0; i < ninputs; i++)
1503 XVECEXP (body, 3, i) = protect_from_queue (XVECEXP (body, 3, i), 0);
1505 for (i = 0; i < noutputs; i++)
1506 output_rtx[i] = protect_from_queue (output_rtx[i], 1);
1508 /* Now, for each output, construct an rtx
1509 (set OUTPUT (asm_operands INSN OUTPUTNUMBER OUTPUTCONSTRAINT
1510 ARGVEC CONSTRAINTS))
1511 If there is more than one, put them inside a PARALLEL. */
1513 if (noutputs == 1 && nclobbers == 0)
1515 XSTR (body, 1) = TREE_STRING_POINTER (TREE_PURPOSE (outputs));
1516 insn = emit_insn (gen_rtx (SET, VOIDmode, output_rtx[0], body));
1518 else if (noutputs == 0 && nclobbers == 0)
1520 /* No output operands: put in a raw ASM_OPERANDS rtx. */
1521 insn = emit_insn (body);
1523 else
1525 rtx obody = body;
1526 int num = noutputs;
1527 if (num == 0) num = 1;
1528 body = gen_rtx (PARALLEL, VOIDmode, rtvec_alloc (num + nclobbers));
1530 /* For each output operand, store a SET. */
1532 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1534 XVECEXP (body, 0, i)
1535 = gen_rtx (SET, VOIDmode,
1536 output_rtx[i],
1537 gen_rtx (ASM_OPERANDS, VOIDmode,
1538 TREE_STRING_POINTER (string),
1539 TREE_STRING_POINTER (TREE_PURPOSE (tail)),
1540 i, argvec, constraints,
1541 filename, line));
1542 MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
1545 /* If there are no outputs (but there are some clobbers)
1546 store the bare ASM_OPERANDS into the PARALLEL. */
1548 if (i == 0)
1549 XVECEXP (body, 0, i++) = obody;
1551 /* Store (clobber REG) for each clobbered register specified. */
1553 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1555 char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1556 int j = decode_reg_name (regname);
1558 if (j < 0)
1560 if (j == -3) /* `cc', which is not a register */
1561 continue;
1563 if (j == -4) /* `memory', don't cache memory across asm */
1565 XVECEXP (body, 0, i++)
1566 = gen_rtx (CLOBBER, VOIDmode,
1567 gen_rtx (MEM, BLKmode,
1568 gen_rtx (SCRATCH, VOIDmode, 0)));
1569 continue;
1572 error ("unknown register name `%s' in `asm'", regname);
1573 return;
1576 /* Use QImode since that's guaranteed to clobber just one reg. */
1577 XVECEXP (body, 0, i++)
1578 = gen_rtx (CLOBBER, VOIDmode, gen_rtx (REG, QImode, j));
1581 insn = emit_insn (body);
1584 free_temp_slots ();
1587 /* Generate RTL to evaluate the expression EXP
1588 and remember it in case this is the VALUE in a ({... VALUE; }) constr. */
1590 void
1591 expand_expr_stmt (exp)
1592 tree exp;
1594 if (output_bytecode)
1596 int org_stack_depth = stack_depth;
1598 bc_expand_expr (exp);
1600 /* Restore stack depth */
1601 if (stack_depth < org_stack_depth)
1602 abort ();
1604 bc_emit_instruction (drop);
1606 last_expr_type = TREE_TYPE (exp);
1607 return;
1610 /* If -W, warn about statements with no side effects,
1611 except for an explicit cast to void (e.g. for assert()), and
1612 except inside a ({...}) where they may be useful. */
1613 if (expr_stmts_for_value == 0 && exp != error_mark_node)
1615 if (! TREE_SIDE_EFFECTS (exp) && (extra_warnings || warn_unused)
1616 && !(TREE_CODE (exp) == CONVERT_EXPR
1617 && TREE_TYPE (exp) == void_type_node))
1618 warning_with_file_and_line (emit_filename, emit_lineno,
1619 "statement with no effect");
1620 else if (warn_unused)
1621 warn_if_unused_value (exp);
1623 last_expr_type = TREE_TYPE (exp);
1624 if (! flag_syntax_only)
1625 last_expr_value = expand_expr (exp,
1626 (expr_stmts_for_value
1627 ? NULL_RTX : const0_rtx),
1628 VOIDmode, 0);
1630 /* If all we do is reference a volatile value in memory,
1631 copy it to a register to be sure it is actually touched. */
1632 if (last_expr_value != 0 && GET_CODE (last_expr_value) == MEM
1633 && TREE_THIS_VOLATILE (exp))
1635 if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode)
1637 else if (TYPE_MODE (TREE_TYPE (exp)) != BLKmode)
1638 copy_to_reg (last_expr_value);
1639 else
1641 rtx lab = gen_label_rtx ();
1643 /* Compare the value with itself to reference it. */
1644 emit_cmp_insn (last_expr_value, last_expr_value, EQ,
1645 expand_expr (TYPE_SIZE (last_expr_type),
1646 NULL_RTX, VOIDmode, 0),
1647 BLKmode, 0,
1648 TYPE_ALIGN (last_expr_type) / BITS_PER_UNIT);
1649 emit_jump_insn ((*bcc_gen_fctn[(int) EQ]) (lab));
1650 emit_label (lab);
1654 /* If this expression is part of a ({...}) and is in memory, we may have
1655 to preserve temporaries. */
1656 preserve_temp_slots (last_expr_value);
1658 /* Free any temporaries used to evaluate this expression. Any temporary
1659 used as a result of this expression will already have been preserved
1660 above. */
1661 free_temp_slots ();
1663 emit_queue ();
1666 /* Warn if EXP contains any computations whose results are not used.
1667 Return 1 if a warning is printed; 0 otherwise. */
1669 static int
1670 warn_if_unused_value (exp)
1671 tree exp;
1673 if (TREE_USED (exp))
1674 return 0;
1676 switch (TREE_CODE (exp))
1678 case PREINCREMENT_EXPR:
1679 case POSTINCREMENT_EXPR:
1680 case PREDECREMENT_EXPR:
1681 case POSTDECREMENT_EXPR:
1682 case MODIFY_EXPR:
1683 case INIT_EXPR:
1684 case TARGET_EXPR:
1685 case CALL_EXPR:
1686 case METHOD_CALL_EXPR:
1687 case RTL_EXPR:
1688 case WITH_CLEANUP_EXPR:
1689 case EXIT_EXPR:
1690 /* We don't warn about COND_EXPR because it may be a useful
1691 construct if either arm contains a side effect. */
1692 case COND_EXPR:
1693 return 0;
1695 case BIND_EXPR:
1696 /* For a binding, warn if no side effect within it. */
1697 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1699 case TRUTH_ORIF_EXPR:
1700 case TRUTH_ANDIF_EXPR:
1701 /* In && or ||, warn if 2nd operand has no side effect. */
1702 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1704 case COMPOUND_EXPR:
1705 if (TREE_NO_UNUSED_WARNING (exp))
1706 return 0;
1707 if (warn_if_unused_value (TREE_OPERAND (exp, 0)))
1708 return 1;
1709 /* Let people do `(foo (), 0)' without a warning. */
1710 if (TREE_CONSTANT (TREE_OPERAND (exp, 1)))
1711 return 0;
1712 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1714 case NOP_EXPR:
1715 case CONVERT_EXPR:
1716 case NON_LVALUE_EXPR:
1717 /* Don't warn about values cast to void. */
1718 if (TREE_TYPE (exp) == void_type_node)
1719 return 0;
1720 /* Don't warn about conversions not explicit in the user's program. */
1721 if (TREE_NO_UNUSED_WARNING (exp))
1722 return 0;
1723 /* Assignment to a cast usually results in a cast of a modify.
1724 Don't complain about that. There can be an arbitrary number of
1725 casts before the modify, so we must loop until we find the first
1726 non-cast expression and then test to see if that is a modify. */
1728 tree tem = TREE_OPERAND (exp, 0);
1730 while (TREE_CODE (tem) == CONVERT_EXPR || TREE_CODE (tem) == NOP_EXPR)
1731 tem = TREE_OPERAND (tem, 0);
1733 if (TREE_CODE (tem) == MODIFY_EXPR)
1734 return 0;
1736 /* ... fall through ... */
1738 default:
1739 /* Referencing a volatile value is a side effect, so don't warn. */
1740 if ((TREE_CODE_CLASS (TREE_CODE (exp)) == 'd'
1741 || TREE_CODE_CLASS (TREE_CODE (exp)) == 'r')
1742 && TREE_THIS_VOLATILE (exp))
1743 return 0;
1744 warning_with_file_and_line (emit_filename, emit_lineno,
1745 "value computed is not used");
1746 return 1;
1750 /* Clear out the memory of the last expression evaluated. */
1752 void
1753 clear_last_expr ()
1755 last_expr_type = 0;
1758 /* Begin a statement which will return a value.
1759 Return the RTL_EXPR for this statement expr.
1760 The caller must save that value and pass it to expand_end_stmt_expr. */
1762 tree
1763 expand_start_stmt_expr ()
1765 int momentary;
1766 tree t;
1768 /* When generating bytecode just note down the stack depth */
1769 if (output_bytecode)
1770 return (build_int_2 (stack_depth, 0));
1772 /* Make the RTL_EXPR node temporary, not momentary,
1773 so that rtl_expr_chain doesn't become garbage. */
1774 momentary = suspend_momentary ();
1775 t = make_node (RTL_EXPR);
1776 resume_momentary (momentary);
1777 start_sequence_for_rtl_expr (t);
1778 NO_DEFER_POP;
1779 expr_stmts_for_value++;
1780 return t;
1783 /* Restore the previous state at the end of a statement that returns a value.
1784 Returns a tree node representing the statement's value and the
1785 insns to compute the value.
1787 The nodes of that expression have been freed by now, so we cannot use them.
1788 But we don't want to do that anyway; the expression has already been
1789 evaluated and now we just want to use the value. So generate a RTL_EXPR
1790 with the proper type and RTL value.
1792 If the last substatement was not an expression,
1793 return something with type `void'. */
1795 tree
1796 expand_end_stmt_expr (t)
1797 tree t;
1799 if (output_bytecode)
1801 int i;
1802 tree t;
1805 /* At this point, all expressions have been evaluated in order.
1806 However, all expression values have been popped when evaluated,
1807 which means we have to recover the last expression value. This is
1808 the last value removed by means of a `drop' instruction. Instead
1809 of adding code to inhibit dropping the last expression value, it
1810 is here recovered by undoing the `drop'. Since `drop' is
1811 equivalent to `adjustackSI [1]', it can be undone with `adjstackSI
1812 [-1]'. */
1814 bc_adjust_stack (-1);
1816 if (!last_expr_type)
1817 last_expr_type = void_type_node;
1819 t = make_node (RTL_EXPR);
1820 TREE_TYPE (t) = last_expr_type;
1821 RTL_EXPR_RTL (t) = NULL;
1822 RTL_EXPR_SEQUENCE (t) = NULL;
1824 /* Don't consider deleting this expr or containing exprs at tree level. */
1825 TREE_THIS_VOLATILE (t) = 1;
1827 last_expr_type = 0;
1828 return t;
1831 OK_DEFER_POP;
1833 if (last_expr_type == 0)
1835 last_expr_type = void_type_node;
1836 last_expr_value = const0_rtx;
1838 else if (last_expr_value == 0)
1839 /* There are some cases where this can happen, such as when the
1840 statement is void type. */
1841 last_expr_value = const0_rtx;
1842 else if (GET_CODE (last_expr_value) != REG && ! CONSTANT_P (last_expr_value))
1843 /* Remove any possible QUEUED. */
1844 last_expr_value = protect_from_queue (last_expr_value, 0);
1846 emit_queue ();
1848 TREE_TYPE (t) = last_expr_type;
1849 RTL_EXPR_RTL (t) = last_expr_value;
1850 RTL_EXPR_SEQUENCE (t) = get_insns ();
1852 rtl_expr_chain = tree_cons (NULL_TREE, t, rtl_expr_chain);
1854 end_sequence ();
1856 /* Don't consider deleting this expr or containing exprs at tree level. */
1857 TREE_SIDE_EFFECTS (t) = 1;
1858 /* Propagate volatility of the actual RTL expr. */
1859 TREE_THIS_VOLATILE (t) = volatile_refs_p (last_expr_value);
1861 last_expr_type = 0;
1862 expr_stmts_for_value--;
1864 return t;
1867 /* Generate RTL for the start of an if-then. COND is the expression
1868 whose truth should be tested.
1870 If EXITFLAG is nonzero, this conditional is visible to
1871 `exit_something'. */
1873 void
1874 expand_start_cond (cond, exitflag)
1875 tree cond;
1876 int exitflag;
1878 struct nesting *thiscond = ALLOC_NESTING ();
1880 /* Make an entry on cond_stack for the cond we are entering. */
1882 thiscond->next = cond_stack;
1883 thiscond->all = nesting_stack;
1884 thiscond->depth = ++nesting_depth;
1885 thiscond->data.cond.next_label = gen_label_rtx ();
1886 /* Before we encounter an `else', we don't need a separate exit label
1887 unless there are supposed to be exit statements
1888 to exit this conditional. */
1889 thiscond->exit_label = exitflag ? gen_label_rtx () : 0;
1890 thiscond->data.cond.endif_label = thiscond->exit_label;
1891 cond_stack = thiscond;
1892 nesting_stack = thiscond;
1894 if (output_bytecode)
1895 bc_expand_start_cond (cond, exitflag);
1896 else
1897 do_jump (cond, thiscond->data.cond.next_label, NULL_RTX);
1900 /* Generate RTL between then-clause and the elseif-clause
1901 of an if-then-elseif-.... */
1903 void
1904 expand_start_elseif (cond)
1905 tree cond;
1907 if (cond_stack->data.cond.endif_label == 0)
1908 cond_stack->data.cond.endif_label = gen_label_rtx ();
1909 emit_jump (cond_stack->data.cond.endif_label);
1910 emit_label (cond_stack->data.cond.next_label);
1911 cond_stack->data.cond.next_label = gen_label_rtx ();
1912 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
1915 /* Generate RTL between the then-clause and the else-clause
1916 of an if-then-else. */
1918 void
1919 expand_start_else ()
1921 if (cond_stack->data.cond.endif_label == 0)
1922 cond_stack->data.cond.endif_label = gen_label_rtx ();
1924 if (output_bytecode)
1926 bc_expand_start_else ();
1927 return;
1930 emit_jump (cond_stack->data.cond.endif_label);
1931 emit_label (cond_stack->data.cond.next_label);
1932 cond_stack->data.cond.next_label = 0; /* No more _else or _elseif calls. */
1935 /* Generate RTL for the end of an if-then.
1936 Pop the record for it off of cond_stack. */
1938 void
1939 expand_end_cond ()
1941 struct nesting *thiscond = cond_stack;
1943 if (output_bytecode)
1944 bc_expand_end_cond ();
1945 else
1947 do_pending_stack_adjust ();
1948 if (thiscond->data.cond.next_label)
1949 emit_label (thiscond->data.cond.next_label);
1950 if (thiscond->data.cond.endif_label)
1951 emit_label (thiscond->data.cond.endif_label);
1954 POPSTACK (cond_stack);
1955 last_expr_type = 0;
1959 /* Generate code for the start of an if-then. COND is the expression
1960 whose truth is to be tested; if EXITFLAG is nonzero this conditional
1961 is to be visible to exit_something. It is assumed that the caller
1962 has pushed the previous context on the cond stack. */
1964 static void
1965 bc_expand_start_cond (cond, exitflag)
1966 tree cond;
1967 int exitflag;
1969 struct nesting *thiscond = cond_stack;
1971 thiscond->data.case_stmt.nominal_type = cond;
1972 if (! exitflag)
1973 thiscond->exit_label = gen_label_rtx ();
1974 bc_expand_expr (cond);
1975 bc_emit_bytecode (xjumpifnot);
1976 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (thiscond->exit_label));
1978 #ifdef DEBUG_PRINT_CODE
1979 fputc ('\n', stderr);
1980 #endif
1983 /* Generate the label for the end of an if with
1984 no else- clause. */
1986 static void
1987 bc_expand_end_cond ()
1989 struct nesting *thiscond = cond_stack;
1991 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thiscond->exit_label));
1994 /* Generate code for the start of the else- clause of
1995 an if-then-else. */
1997 static void
1998 bc_expand_start_else ()
2000 struct nesting *thiscond = cond_stack;
2002 thiscond->data.cond.endif_label = thiscond->exit_label;
2003 thiscond->exit_label = gen_label_rtx ();
2004 bc_emit_bytecode (jump);
2005 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (thiscond->exit_label));
2007 #ifdef DEBUG_PRINT_CODE
2008 fputc ('\n', stderr);
2009 #endif
2011 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thiscond->data.cond.endif_label));
2014 /* Generate RTL for the start of a loop. EXIT_FLAG is nonzero if this
2015 loop should be exited by `exit_something'. This is a loop for which
2016 `expand_continue' will jump to the top of the loop.
2018 Make an entry on loop_stack to record the labels associated with
2019 this loop. */
2021 struct nesting *
2022 expand_start_loop (exit_flag)
2023 int exit_flag;
2025 register struct nesting *thisloop = ALLOC_NESTING ();
2027 /* Make an entry on loop_stack for the loop we are entering. */
2029 thisloop->next = loop_stack;
2030 thisloop->all = nesting_stack;
2031 thisloop->depth = ++nesting_depth;
2032 thisloop->data.loop.start_label = gen_label_rtx ();
2033 thisloop->data.loop.end_label = gen_label_rtx ();
2034 thisloop->data.loop.alt_end_label = 0;
2035 thisloop->data.loop.continue_label = thisloop->data.loop.start_label;
2036 thisloop->exit_label = exit_flag ? thisloop->data.loop.end_label : 0;
2037 loop_stack = thisloop;
2038 nesting_stack = thisloop;
2040 if (output_bytecode)
2042 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thisloop->data.loop.start_label));
2043 return thisloop;
2046 do_pending_stack_adjust ();
2047 emit_queue ();
2048 emit_note (NULL_PTR, NOTE_INSN_LOOP_BEG);
2049 emit_label (thisloop->data.loop.start_label);
2051 return thisloop;
2054 /* Like expand_start_loop but for a loop where the continuation point
2055 (for expand_continue_loop) will be specified explicitly. */
2057 struct nesting *
2058 expand_start_loop_continue_elsewhere (exit_flag)
2059 int exit_flag;
2061 struct nesting *thisloop = expand_start_loop (exit_flag);
2062 loop_stack->data.loop.continue_label = gen_label_rtx ();
2063 return thisloop;
2066 /* Specify the continuation point for a loop started with
2067 expand_start_loop_continue_elsewhere.
2068 Use this at the point in the code to which a continue statement
2069 should jump. */
2071 void
2072 expand_loop_continue_here ()
2074 if (output_bytecode)
2076 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (loop_stack->data.loop.continue_label));
2077 return;
2079 do_pending_stack_adjust ();
2080 emit_note (NULL_PTR, NOTE_INSN_LOOP_CONT);
2081 emit_label (loop_stack->data.loop.continue_label);
2084 /* End a loop. */
2086 static void
2087 bc_expand_end_loop ()
2089 struct nesting *thisloop = loop_stack;
2091 bc_emit_bytecode (jump);
2092 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (thisloop->data.loop.start_label));
2094 #ifdef DEBUG_PRINT_CODE
2095 fputc ('\n', stderr);
2096 #endif
2098 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thisloop->exit_label));
2099 POPSTACK (loop_stack);
2100 last_expr_type = 0;
2104 /* Finish a loop. Generate a jump back to the top and the loop-exit label.
2105 Pop the block off of loop_stack. */
2107 void
2108 expand_end_loop ()
2110 register rtx insn;
2111 register rtx start_label;
2112 rtx last_test_insn = 0;
2113 int num_insns = 0;
2115 if (output_bytecode)
2117 bc_expand_end_loop ();
2118 return;
2121 insn = get_last_insn ();
2122 start_label = loop_stack->data.loop.start_label;
2124 /* Mark the continue-point at the top of the loop if none elsewhere. */
2125 if (start_label == loop_stack->data.loop.continue_label)
2126 emit_note_before (NOTE_INSN_LOOP_CONT, start_label);
2128 do_pending_stack_adjust ();
2130 /* If optimizing, perhaps reorder the loop. If the loop
2131 starts with a conditional exit, roll that to the end
2132 where it will optimize together with the jump back.
2134 We look for the last conditional branch to the exit that we encounter
2135 before hitting 30 insns or a CALL_INSN. If we see an unconditional
2136 branch to the exit first, use it.
2138 We must also stop at NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes
2139 because moving them is not valid. */
2141 if (optimize
2143 ! (GET_CODE (insn) == JUMP_INSN
2144 && GET_CODE (PATTERN (insn)) == SET
2145 && SET_DEST (PATTERN (insn)) == pc_rtx
2146 && GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE))
2148 /* Scan insns from the top of the loop looking for a qualified
2149 conditional exit. */
2150 for (insn = NEXT_INSN (loop_stack->data.loop.start_label); insn;
2151 insn = NEXT_INSN (insn))
2153 if (GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == CODE_LABEL)
2154 break;
2156 if (GET_CODE (insn) == NOTE
2157 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2158 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
2159 break;
2161 if (GET_CODE (insn) == JUMP_INSN || GET_CODE (insn) == INSN)
2162 num_insns++;
2164 if (last_test_insn && num_insns > 30)
2165 break;
2167 if (GET_CODE (insn) == JUMP_INSN && GET_CODE (PATTERN (insn)) == SET
2168 && SET_DEST (PATTERN (insn)) == pc_rtx
2169 && GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE
2170 && ((GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 1)) == LABEL_REF
2171 && ((XEXP (XEXP (SET_SRC (PATTERN (insn)), 1), 0)
2172 == loop_stack->data.loop.end_label)
2173 || (XEXP (XEXP (SET_SRC (PATTERN (insn)), 1), 0)
2174 == loop_stack->data.loop.alt_end_label)))
2175 || (GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 2)) == LABEL_REF
2176 && ((XEXP (XEXP (SET_SRC (PATTERN (insn)), 2), 0)
2177 == loop_stack->data.loop.end_label)
2178 || (XEXP (XEXP (SET_SRC (PATTERN (insn)), 2), 0)
2179 == loop_stack->data.loop.alt_end_label)))))
2180 last_test_insn = insn;
2182 if (last_test_insn == 0 && GET_CODE (insn) == JUMP_INSN
2183 && GET_CODE (PATTERN (insn)) == SET
2184 && SET_DEST (PATTERN (insn)) == pc_rtx
2185 && GET_CODE (SET_SRC (PATTERN (insn))) == LABEL_REF
2186 && ((XEXP (SET_SRC (PATTERN (insn)), 0)
2187 == loop_stack->data.loop.end_label)
2188 || (XEXP (SET_SRC (PATTERN (insn)), 0)
2189 == loop_stack->data.loop.alt_end_label)))
2190 /* Include BARRIER. */
2191 last_test_insn = NEXT_INSN (insn);
2194 if (last_test_insn != 0 && last_test_insn != get_last_insn ())
2196 /* We found one. Move everything from there up
2197 to the end of the loop, and add a jump into the loop
2198 to jump to there. */
2199 register rtx newstart_label = gen_label_rtx ();
2200 register rtx start_move = start_label;
2202 /* If the start label is preceded by a NOTE_INSN_LOOP_CONT note,
2203 then we want to move this note also. */
2204 if (GET_CODE (PREV_INSN (start_move)) == NOTE
2205 && (NOTE_LINE_NUMBER (PREV_INSN (start_move))
2206 == NOTE_INSN_LOOP_CONT))
2207 start_move = PREV_INSN (start_move);
2209 emit_label_after (newstart_label, PREV_INSN (start_move));
2210 reorder_insns (start_move, last_test_insn, get_last_insn ());
2211 emit_jump_insn_after (gen_jump (start_label),
2212 PREV_INSN (newstart_label));
2213 emit_barrier_after (PREV_INSN (newstart_label));
2214 start_label = newstart_label;
2218 emit_jump (start_label);
2219 emit_note (NULL_PTR, NOTE_INSN_LOOP_END);
2220 emit_label (loop_stack->data.loop.end_label);
2222 POPSTACK (loop_stack);
2224 last_expr_type = 0;
2227 /* Generate a jump to the current loop's continue-point.
2228 This is usually the top of the loop, but may be specified
2229 explicitly elsewhere. If not currently inside a loop,
2230 return 0 and do nothing; caller will print an error message. */
2233 expand_continue_loop (whichloop)
2234 struct nesting *whichloop;
2236 last_expr_type = 0;
2237 if (whichloop == 0)
2238 whichloop = loop_stack;
2239 if (whichloop == 0)
2240 return 0;
2241 expand_goto_internal (NULL_TREE, whichloop->data.loop.continue_label,
2242 NULL_RTX);
2243 return 1;
2246 /* Generate a jump to exit the current loop. If not currently inside a loop,
2247 return 0 and do nothing; caller will print an error message. */
2250 expand_exit_loop (whichloop)
2251 struct nesting *whichloop;
2253 last_expr_type = 0;
2254 if (whichloop == 0)
2255 whichloop = loop_stack;
2256 if (whichloop == 0)
2257 return 0;
2258 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label, NULL_RTX);
2259 return 1;
2262 /* Generate a conditional jump to exit the current loop if COND
2263 evaluates to zero. If not currently inside a loop,
2264 return 0 and do nothing; caller will print an error message. */
2267 expand_exit_loop_if_false (whichloop, cond)
2268 struct nesting *whichloop;
2269 tree cond;
2271 last_expr_type = 0;
2272 if (whichloop == 0)
2273 whichloop = loop_stack;
2274 if (whichloop == 0)
2275 return 0;
2276 if (output_bytecode)
2278 bc_expand_expr (cond);
2279 bc_expand_goto_internal (xjumpifnot,
2280 BYTECODE_BC_LABEL (whichloop->exit_label),
2281 NULL_TREE);
2283 else
2285 /* In order to handle fixups, we actually create a conditional jump
2286 around a unconditional branch to exit the loop. If fixups are
2287 necessary, they go before the unconditional branch. */
2289 rtx label = gen_label_rtx ();
2290 rtx last_insn;
2292 do_jump (cond, NULL_RTX, label);
2293 last_insn = get_last_insn ();
2294 if (GET_CODE (last_insn) == CODE_LABEL)
2295 whichloop->data.loop.alt_end_label = last_insn;
2296 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label,
2297 NULL_RTX);
2298 emit_label (label);
2301 return 1;
2304 /* Return non-zero if we should preserve sub-expressions as separate
2305 pseudos. We never do so if we aren't optimizing. We always do so
2306 if -fexpensive-optimizations.
2308 Otherwise, we only do so if we are in the "early" part of a loop. I.e.,
2309 the loop may still be a small one. */
2312 preserve_subexpressions_p ()
2314 rtx insn;
2316 if (flag_expensive_optimizations)
2317 return 1;
2319 if (optimize == 0 || loop_stack == 0)
2320 return 0;
2322 insn = get_last_insn_anywhere ();
2324 return (insn
2325 && (INSN_UID (insn) - INSN_UID (loop_stack->data.loop.start_label)
2326 < n_non_fixed_regs * 3));
2330 /* Generate a jump to exit the current loop, conditional, binding contour
2331 or case statement. Not all such constructs are visible to this function,
2332 only those started with EXIT_FLAG nonzero. Individual languages use
2333 the EXIT_FLAG parameter to control which kinds of constructs you can
2334 exit this way.
2336 If not currently inside anything that can be exited,
2337 return 0 and do nothing; caller will print an error message. */
2340 expand_exit_something ()
2342 struct nesting *n;
2343 last_expr_type = 0;
2344 for (n = nesting_stack; n; n = n->all)
2345 if (n->exit_label != 0)
2347 expand_goto_internal (NULL_TREE, n->exit_label, NULL_RTX);
2348 return 1;
2351 return 0;
2354 /* Generate RTL to return from the current function, with no value.
2355 (That is, we do not do anything about returning any value.) */
2357 void
2358 expand_null_return ()
2360 struct nesting *block = block_stack;
2361 rtx last_insn = 0;
2363 if (output_bytecode)
2365 bc_emit_instruction (ret);
2366 return;
2369 /* Does any pending block have cleanups? */
2371 while (block && block->data.block.cleanups == 0)
2372 block = block->next;
2374 /* If yes, use a goto to return, since that runs cleanups. */
2376 expand_null_return_1 (last_insn, block != 0);
2379 /* Generate RTL to return from the current function, with value VAL. */
2381 void
2382 expand_value_return (val)
2383 rtx val;
2385 struct nesting *block = block_stack;
2386 rtx last_insn = get_last_insn ();
2387 rtx return_reg = DECL_RTL (DECL_RESULT (current_function_decl));
2389 /* Copy the value to the return location
2390 unless it's already there. */
2392 if (return_reg != val)
2394 #ifdef PROMOTE_FUNCTION_RETURN
2395 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
2396 int unsignedp = TREE_UNSIGNED (type);
2397 enum machine_mode mode
2398 = promote_mode (type, DECL_MODE (DECL_RESULT (current_function_decl)),
2399 &unsignedp, 1);
2401 if (GET_MODE (val) != VOIDmode && GET_MODE (val) != mode)
2402 convert_move (return_reg, val, unsignedp);
2403 else
2404 #endif
2405 emit_move_insn (return_reg, val);
2407 if (GET_CODE (return_reg) == REG
2408 && REGNO (return_reg) < FIRST_PSEUDO_REGISTER)
2409 emit_insn (gen_rtx (USE, VOIDmode, return_reg));
2411 /* Does any pending block have cleanups? */
2413 while (block && block->data.block.cleanups == 0)
2414 block = block->next;
2416 /* If yes, use a goto to return, since that runs cleanups.
2417 Use LAST_INSN to put cleanups *before* the move insn emitted above. */
2419 expand_null_return_1 (last_insn, block != 0);
2422 /* Output a return with no value. If LAST_INSN is nonzero,
2423 pretend that the return takes place after LAST_INSN.
2424 If USE_GOTO is nonzero then don't use a return instruction;
2425 go to the return label instead. This causes any cleanups
2426 of pending blocks to be executed normally. */
2428 static void
2429 expand_null_return_1 (last_insn, use_goto)
2430 rtx last_insn;
2431 int use_goto;
2433 rtx end_label = cleanup_label ? cleanup_label : return_label;
2435 clear_pending_stack_adjust ();
2436 do_pending_stack_adjust ();
2437 last_expr_type = 0;
2439 /* PCC-struct return always uses an epilogue. */
2440 if (current_function_returns_pcc_struct || use_goto)
2442 if (end_label == 0)
2443 end_label = return_label = gen_label_rtx ();
2444 expand_goto_internal (NULL_TREE, end_label, last_insn);
2445 return;
2448 /* Otherwise output a simple return-insn if one is available,
2449 unless it won't do the job. */
2450 #ifdef HAVE_return
2451 if (HAVE_return && use_goto == 0 && cleanup_label == 0)
2453 emit_jump_insn (gen_return ());
2454 emit_barrier ();
2455 return;
2457 #endif
2459 /* Otherwise jump to the epilogue. */
2460 expand_goto_internal (NULL_TREE, end_label, last_insn);
2463 /* Generate RTL to evaluate the expression RETVAL and return it
2464 from the current function. */
2466 void
2467 expand_return (retval)
2468 tree retval;
2470 /* If there are any cleanups to be performed, then they will
2471 be inserted following LAST_INSN. It is desirable
2472 that the last_insn, for such purposes, should be the
2473 last insn before computing the return value. Otherwise, cleanups
2474 which call functions can clobber the return value. */
2475 /* ??? rms: I think that is erroneous, because in C++ it would
2476 run destructors on variables that might be used in the subsequent
2477 computation of the return value. */
2478 rtx last_insn = 0;
2479 register rtx val = 0;
2480 register rtx op0;
2481 tree retval_rhs;
2482 int cleanups;
2483 struct nesting *block;
2485 /* Bytecode returns are quite simple, just leave the result on the
2486 arithmetic stack. */
2487 if (output_bytecode)
2489 bc_expand_expr (retval);
2490 bc_emit_instruction (ret);
2491 return;
2494 /* If function wants no value, give it none. */
2495 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
2497 expand_expr (retval, NULL_RTX, VOIDmode, 0);
2498 emit_queue ();
2499 expand_null_return ();
2500 return;
2503 /* Are any cleanups needed? E.g. C++ destructors to be run? */
2504 cleanups = any_pending_cleanups (1);
2506 if (TREE_CODE (retval) == RESULT_DECL)
2507 retval_rhs = retval;
2508 else if ((TREE_CODE (retval) == MODIFY_EXPR || TREE_CODE (retval) == INIT_EXPR)
2509 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
2510 retval_rhs = TREE_OPERAND (retval, 1);
2511 else if (TREE_TYPE (retval) == void_type_node)
2512 /* Recognize tail-recursive call to void function. */
2513 retval_rhs = retval;
2514 else
2515 retval_rhs = NULL_TREE;
2517 /* Only use `last_insn' if there are cleanups which must be run. */
2518 if (cleanups || cleanup_label != 0)
2519 last_insn = get_last_insn ();
2521 /* Distribute return down conditional expr if either of the sides
2522 may involve tail recursion (see test below). This enhances the number
2523 of tail recursions we see. Don't do this always since it can produce
2524 sub-optimal code in some cases and we distribute assignments into
2525 conditional expressions when it would help. */
2527 if (optimize && retval_rhs != 0
2528 && frame_offset == 0
2529 && TREE_CODE (retval_rhs) == COND_EXPR
2530 && (TREE_CODE (TREE_OPERAND (retval_rhs, 1)) == CALL_EXPR
2531 || TREE_CODE (TREE_OPERAND (retval_rhs, 2)) == CALL_EXPR))
2533 rtx label = gen_label_rtx ();
2534 tree expr;
2536 do_jump (TREE_OPERAND (retval_rhs, 0), label, NULL_RTX);
2537 expr = build (MODIFY_EXPR, TREE_TYPE (current_function_decl),
2538 DECL_RESULT (current_function_decl),
2539 TREE_OPERAND (retval_rhs, 1));
2540 TREE_SIDE_EFFECTS (expr) = 1;
2541 expand_return (expr);
2542 emit_label (label);
2544 expr = build (MODIFY_EXPR, TREE_TYPE (current_function_decl),
2545 DECL_RESULT (current_function_decl),
2546 TREE_OPERAND (retval_rhs, 2));
2547 TREE_SIDE_EFFECTS (expr) = 1;
2548 expand_return (expr);
2549 return;
2552 /* For tail-recursive call to current function,
2553 just jump back to the beginning.
2554 It's unsafe if any auto variable in this function
2555 has its address taken; for simplicity,
2556 require stack frame to be empty. */
2557 if (optimize && retval_rhs != 0
2558 && frame_offset == 0
2559 && TREE_CODE (retval_rhs) == CALL_EXPR
2560 && TREE_CODE (TREE_OPERAND (retval_rhs, 0)) == ADDR_EXPR
2561 && TREE_OPERAND (TREE_OPERAND (retval_rhs, 0), 0) == current_function_decl
2562 /* Finish checking validity, and if valid emit code
2563 to set the argument variables for the new call. */
2564 && tail_recursion_args (TREE_OPERAND (retval_rhs, 1),
2565 DECL_ARGUMENTS (current_function_decl)))
2567 if (tail_recursion_label == 0)
2569 tail_recursion_label = gen_label_rtx ();
2570 emit_label_after (tail_recursion_label,
2571 tail_recursion_reentry);
2573 emit_queue ();
2574 expand_goto_internal (NULL_TREE, tail_recursion_label, last_insn);
2575 emit_barrier ();
2576 return;
2578 #ifdef HAVE_return
2579 /* This optimization is safe if there are local cleanups
2580 because expand_null_return takes care of them.
2581 ??? I think it should also be safe when there is a cleanup label,
2582 because expand_null_return takes care of them, too.
2583 Any reason why not? */
2584 if (HAVE_return && cleanup_label == 0
2585 && ! current_function_returns_pcc_struct
2586 && BRANCH_COST <= 1)
2588 /* If this is return x == y; then generate
2589 if (x == y) return 1; else return 0;
2590 if we can do it with explicit return insns and
2591 branches are cheap. */
2592 if (retval_rhs)
2593 switch (TREE_CODE (retval_rhs))
2595 case EQ_EXPR:
2596 case NE_EXPR:
2597 case GT_EXPR:
2598 case GE_EXPR:
2599 case LT_EXPR:
2600 case LE_EXPR:
2601 case TRUTH_ANDIF_EXPR:
2602 case TRUTH_ORIF_EXPR:
2603 case TRUTH_AND_EXPR:
2604 case TRUTH_OR_EXPR:
2605 case TRUTH_NOT_EXPR:
2606 case TRUTH_XOR_EXPR:
2607 op0 = gen_label_rtx ();
2608 jumpifnot (retval_rhs, op0);
2609 expand_value_return (const1_rtx);
2610 emit_label (op0);
2611 expand_value_return (const0_rtx);
2612 return;
2615 #endif /* HAVE_return */
2617 if (cleanups
2618 && retval_rhs != 0
2619 && TREE_TYPE (retval_rhs) != void_type_node
2620 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG)
2622 /* Calculate the return value into a pseudo reg. */
2623 val = expand_expr (retval_rhs, NULL_RTX, VOIDmode, 0);
2624 emit_queue ();
2625 /* All temporaries have now been used. */
2626 free_temp_slots ();
2627 /* Return the calculated value, doing cleanups first. */
2628 expand_value_return (val);
2630 else
2632 /* No cleanups or no hard reg used;
2633 calculate value into hard return reg. */
2634 expand_expr (retval, const0_rtx, VOIDmode, 0);
2635 emit_queue ();
2636 free_temp_slots ();
2637 expand_value_return (DECL_RTL (DECL_RESULT (current_function_decl)));
2641 /* Return 1 if the end of the generated RTX is not a barrier.
2642 This means code already compiled can drop through. */
2645 drop_through_at_end_p ()
2647 rtx insn = get_last_insn ();
2648 while (insn && GET_CODE (insn) == NOTE)
2649 insn = PREV_INSN (insn);
2650 return insn && GET_CODE (insn) != BARRIER;
2653 /* Emit code to alter this function's formal parms for a tail-recursive call.
2654 ACTUALS is a list of actual parameter expressions (chain of TREE_LISTs).
2655 FORMALS is the chain of decls of formals.
2656 Return 1 if this can be done;
2657 otherwise return 0 and do not emit any code. */
2659 static int
2660 tail_recursion_args (actuals, formals)
2661 tree actuals, formals;
2663 register tree a = actuals, f = formals;
2664 register int i;
2665 register rtx *argvec;
2667 /* Check that number and types of actuals are compatible
2668 with the formals. This is not always true in valid C code.
2669 Also check that no formal needs to be addressable
2670 and that all formals are scalars. */
2672 /* Also count the args. */
2674 for (a = actuals, f = formals, i = 0; a && f; a = TREE_CHAIN (a), f = TREE_CHAIN (f), i++)
2676 if (TREE_TYPE (TREE_VALUE (a)) != TREE_TYPE (f))
2677 return 0;
2678 if (GET_CODE (DECL_RTL (f)) != REG || DECL_MODE (f) == BLKmode)
2679 return 0;
2681 if (a != 0 || f != 0)
2682 return 0;
2684 /* Compute all the actuals. */
2686 argvec = (rtx *) alloca (i * sizeof (rtx));
2688 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
2689 argvec[i] = expand_expr (TREE_VALUE (a), NULL_RTX, VOIDmode, 0);
2691 /* Find which actual values refer to current values of previous formals.
2692 Copy each of them now, before any formal is changed. */
2694 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
2696 int copy = 0;
2697 register int j;
2698 for (f = formals, j = 0; j < i; f = TREE_CHAIN (f), j++)
2699 if (reg_mentioned_p (DECL_RTL (f), argvec[i]))
2700 { copy = 1; break; }
2701 if (copy)
2702 argvec[i] = copy_to_reg (argvec[i]);
2705 /* Store the values of the actuals into the formals. */
2707 for (f = formals, a = actuals, i = 0; f;
2708 f = TREE_CHAIN (f), a = TREE_CHAIN (a), i++)
2710 if (GET_MODE (DECL_RTL (f)) == GET_MODE (argvec[i]))
2711 emit_move_insn (DECL_RTL (f), argvec[i]);
2712 else
2713 convert_move (DECL_RTL (f), argvec[i],
2714 TREE_UNSIGNED (TREE_TYPE (TREE_VALUE (a))));
2717 free_temp_slots ();
2718 return 1;
2721 /* Generate the RTL code for entering a binding contour.
2722 The variables are declared one by one, by calls to `expand_decl'.
2724 EXIT_FLAG is nonzero if this construct should be visible to
2725 `exit_something'. */
2727 void
2728 expand_start_bindings (exit_flag)
2729 int exit_flag;
2731 struct nesting *thisblock = ALLOC_NESTING ();
2732 rtx note = output_bytecode ? 0 : emit_note (NULL_PTR, NOTE_INSN_BLOCK_BEG);
2734 /* Make an entry on block_stack for the block we are entering. */
2736 thisblock->next = block_stack;
2737 thisblock->all = nesting_stack;
2738 thisblock->depth = ++nesting_depth;
2739 thisblock->data.block.stack_level = 0;
2740 thisblock->data.block.cleanups = 0;
2741 thisblock->data.block.function_call_count = 0;
2742 #if 0
2743 if (block_stack)
2745 if (block_stack->data.block.cleanups == NULL_TREE
2746 && (block_stack->data.block.outer_cleanups == NULL_TREE
2747 || block_stack->data.block.outer_cleanups == empty_cleanup_list))
2748 thisblock->data.block.outer_cleanups = empty_cleanup_list;
2749 else
2750 thisblock->data.block.outer_cleanups
2751 = tree_cons (NULL_TREE, block_stack->data.block.cleanups,
2752 block_stack->data.block.outer_cleanups);
2754 else
2755 thisblock->data.block.outer_cleanups = 0;
2756 #endif
2757 #if 1
2758 if (block_stack
2759 && !(block_stack->data.block.cleanups == NULL_TREE
2760 && block_stack->data.block.outer_cleanups == NULL_TREE))
2761 thisblock->data.block.outer_cleanups
2762 = tree_cons (NULL_TREE, block_stack->data.block.cleanups,
2763 block_stack->data.block.outer_cleanups);
2764 else
2765 thisblock->data.block.outer_cleanups = 0;
2766 #endif
2767 thisblock->data.block.label_chain = 0;
2768 thisblock->data.block.innermost_stack_block = stack_block_stack;
2769 thisblock->data.block.first_insn = note;
2770 thisblock->data.block.block_start_count = ++block_start_count;
2771 thisblock->exit_label = exit_flag ? gen_label_rtx () : 0;
2772 block_stack = thisblock;
2773 nesting_stack = thisblock;
2775 if (!output_bytecode)
2777 /* Make a new level for allocating stack slots. */
2778 push_temp_slots ();
2782 /* Given a pointer to a BLOCK node, save a pointer to the most recently
2783 generated NOTE_INSN_BLOCK_END in the BLOCK_END_NOTE field of the given
2784 BLOCK node. */
2786 void
2787 remember_end_note (block)
2788 register tree block;
2790 BLOCK_END_NOTE (block) = last_block_end_note;
2791 last_block_end_note = NULL_RTX;
2794 /* Generate RTL code to terminate a binding contour.
2795 VARS is the chain of VAR_DECL nodes
2796 for the variables bound in this contour.
2797 MARK_ENDS is nonzero if we should put a note at the beginning
2798 and end of this binding contour.
2800 DONT_JUMP_IN is nonzero if it is not valid to jump into this contour.
2801 (That is true automatically if the contour has a saved stack level.) */
2803 void
2804 expand_end_bindings (vars, mark_ends, dont_jump_in)
2805 tree vars;
2806 int mark_ends;
2807 int dont_jump_in;
2809 register struct nesting *thisblock = block_stack;
2810 register tree decl;
2812 if (output_bytecode)
2814 bc_expand_end_bindings (vars, mark_ends, dont_jump_in);
2815 return;
2818 if (warn_unused)
2819 for (decl = vars; decl; decl = TREE_CHAIN (decl))
2820 if (! TREE_USED (decl) && TREE_CODE (decl) == VAR_DECL
2821 && ! DECL_IN_SYSTEM_HEADER (decl))
2822 warning_with_decl (decl, "unused variable `%s'");
2824 if (thisblock->exit_label)
2826 do_pending_stack_adjust ();
2827 emit_label (thisblock->exit_label);
2830 /* If necessary, make a handler for nonlocal gotos taking
2831 place in the function calls in this block. */
2832 if (function_call_count != thisblock->data.block.function_call_count
2833 && nonlocal_labels
2834 /* Make handler for outermost block
2835 if there were any nonlocal gotos to this function. */
2836 && (thisblock->next == 0 ? current_function_has_nonlocal_label
2837 /* Make handler for inner block if it has something
2838 special to do when you jump out of it. */
2839 : (thisblock->data.block.cleanups != 0
2840 || thisblock->data.block.stack_level != 0)))
2842 tree link;
2843 rtx afterward = gen_label_rtx ();
2844 rtx handler_label = gen_label_rtx ();
2845 rtx save_receiver = gen_reg_rtx (Pmode);
2846 rtx insns;
2848 /* Don't let jump_optimize delete the handler. */
2849 LABEL_PRESERVE_P (handler_label) = 1;
2851 /* Record the handler address in the stack slot for that purpose,
2852 during this block, saving and restoring the outer value. */
2853 if (thisblock->next != 0)
2855 emit_move_insn (nonlocal_goto_handler_slot, save_receiver);
2857 start_sequence ();
2858 emit_move_insn (save_receiver, nonlocal_goto_handler_slot);
2859 insns = get_insns ();
2860 end_sequence ();
2861 emit_insns_before (insns, thisblock->data.block.first_insn);
2864 start_sequence ();
2865 emit_move_insn (nonlocal_goto_handler_slot,
2866 gen_rtx (LABEL_REF, Pmode, handler_label));
2867 insns = get_insns ();
2868 end_sequence ();
2869 emit_insns_before (insns, thisblock->data.block.first_insn);
2871 /* Jump around the handler; it runs only when specially invoked. */
2872 emit_jump (afterward);
2873 emit_label (handler_label);
2875 #ifdef HAVE_nonlocal_goto
2876 if (! HAVE_nonlocal_goto)
2877 #endif
2878 /* First adjust our frame pointer to its actual value. It was
2879 previously set to the start of the virtual area corresponding to
2880 the stacked variables when we branched here and now needs to be
2881 adjusted to the actual hardware fp value.
2883 Assignments are to virtual registers are converted by
2884 instantiate_virtual_regs into the corresponding assignment
2885 to the underlying register (fp in this case) that makes
2886 the original assignment true.
2887 So the following insn will actually be
2888 decrementing fp by STARTING_FRAME_OFFSET. */
2889 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
2891 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
2892 if (fixed_regs[ARG_POINTER_REGNUM])
2894 #ifdef ELIMINABLE_REGS
2895 /* If the argument pointer can be eliminated in favor of the
2896 frame pointer, we don't need to restore it. We assume here
2897 that if such an elimination is present, it can always be used.
2898 This is the case on all known machines; if we don't make this
2899 assumption, we do unnecessary saving on many machines. */
2900 static struct elims {int from, to;} elim_regs[] = ELIMINABLE_REGS;
2901 int i;
2903 for (i = 0; i < sizeof elim_regs / sizeof elim_regs[0]; i++)
2904 if (elim_regs[i].from == ARG_POINTER_REGNUM
2905 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
2906 break;
2908 if (i == sizeof elim_regs / sizeof elim_regs [0])
2909 #endif
2911 /* Now restore our arg pointer from the address at which it
2912 was saved in our stack frame.
2913 If there hasn't be space allocated for it yet, make
2914 some now. */
2915 if (arg_pointer_save_area == 0)
2916 arg_pointer_save_area
2917 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
2918 emit_move_insn (virtual_incoming_args_rtx,
2919 /* We need a pseudo here, or else
2920 instantiate_virtual_regs_1 complains. */
2921 copy_to_reg (arg_pointer_save_area));
2924 #endif
2926 /* The handler expects the desired label address in the static chain
2927 register. It tests the address and does an appropriate jump
2928 to whatever label is desired. */
2929 for (link = nonlocal_labels; link; link = TREE_CHAIN (link))
2930 /* Skip any labels we shouldn't be able to jump to from here. */
2931 if (! DECL_TOO_LATE (TREE_VALUE (link)))
2933 rtx not_this = gen_label_rtx ();
2934 rtx this = gen_label_rtx ();
2935 do_jump_if_equal (static_chain_rtx,
2936 gen_rtx (LABEL_REF, Pmode, DECL_RTL (TREE_VALUE (link))),
2937 this, 0);
2938 emit_jump (not_this);
2939 emit_label (this);
2940 expand_goto (TREE_VALUE (link));
2941 emit_label (not_this);
2943 /* If label is not recognized, abort. */
2944 emit_library_call (gen_rtx (SYMBOL_REF, Pmode, "abort"), 0,
2945 VOIDmode, 0);
2946 emit_label (afterward);
2949 /* Don't allow jumping into a block that has cleanups or a stack level. */
2950 if (dont_jump_in
2951 || thisblock->data.block.stack_level != 0
2952 || thisblock->data.block.cleanups != 0)
2954 struct label_chain *chain;
2956 /* Any labels in this block are no longer valid to go to.
2957 Mark them to cause an error message. */
2958 for (chain = thisblock->data.block.label_chain; chain; chain = chain->next)
2960 DECL_TOO_LATE (chain->label) = 1;
2961 /* If any goto without a fixup came to this label,
2962 that must be an error, because gotos without fixups
2963 come from outside all saved stack-levels and all cleanups. */
2964 if (TREE_ADDRESSABLE (chain->label))
2965 error_with_decl (chain->label,
2966 "label `%s' used before containing binding contour");
2970 /* Restore stack level in effect before the block
2971 (only if variable-size objects allocated). */
2972 /* Perform any cleanups associated with the block. */
2974 if (thisblock->data.block.stack_level != 0
2975 || thisblock->data.block.cleanups != 0)
2977 /* Only clean up here if this point can actually be reached. */
2978 if (GET_CODE (get_last_insn ()) != BARRIER)
2980 /* Don't let cleanups affect ({...}) constructs. */
2981 int old_expr_stmts_for_value = expr_stmts_for_value;
2982 rtx old_last_expr_value = last_expr_value;
2983 tree old_last_expr_type = last_expr_type;
2984 expr_stmts_for_value = 0;
2986 /* Do the cleanups. */
2987 expand_cleanups (thisblock->data.block.cleanups, NULL_TREE);
2988 do_pending_stack_adjust ();
2990 expr_stmts_for_value = old_expr_stmts_for_value;
2991 last_expr_value = old_last_expr_value;
2992 last_expr_type = old_last_expr_type;
2994 /* Restore the stack level. */
2996 if (thisblock->data.block.stack_level != 0)
2998 emit_stack_restore (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
2999 thisblock->data.block.stack_level, NULL_RTX);
3000 if (nonlocal_goto_handler_slot != 0)
3001 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level,
3002 NULL_RTX);
3006 /* Any gotos out of this block must also do these things.
3007 Also report any gotos with fixups that came to labels in this
3008 level. */
3009 fixup_gotos (thisblock,
3010 thisblock->data.block.stack_level,
3011 thisblock->data.block.cleanups,
3012 thisblock->data.block.first_insn,
3013 dont_jump_in);
3016 /* Mark the beginning and end of the scope if requested.
3017 We do this now, after running cleanups on the variables
3018 just going out of scope, so they are in scope for their cleanups. */
3020 if (mark_ends)
3021 last_block_end_note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_END);
3022 else
3023 /* Get rid of the beginning-mark if we don't make an end-mark. */
3024 NOTE_LINE_NUMBER (thisblock->data.block.first_insn) = NOTE_INSN_DELETED;
3026 /* If doing stupid register allocation, make sure lives of all
3027 register variables declared here extend thru end of scope. */
3029 if (obey_regdecls)
3030 for (decl = vars; decl; decl = TREE_CHAIN (decl))
3032 rtx rtl = DECL_RTL (decl);
3033 if (TREE_CODE (decl) == VAR_DECL && rtl != 0)
3034 use_variable (rtl);
3037 /* Restore block_stack level for containing block. */
3039 stack_block_stack = thisblock->data.block.innermost_stack_block;
3040 POPSTACK (block_stack);
3042 /* Pop the stack slot nesting and free any slots at this level. */
3043 pop_temp_slots ();
3047 /* End a binding contour.
3048 VARS is the chain of VAR_DECL nodes for the variables bound
3049 in this contour. MARK_ENDS is nonzer if we should put a note
3050 at the beginning and end of this binding contour.
3051 DONT_JUMP_IN is nonzero if it is not valid to jump into this
3052 contour. */
3054 static void
3055 bc_expand_end_bindings (vars, mark_ends, dont_jump_in)
3056 tree vars;
3057 int mark_ends;
3058 int dont_jump_in;
3060 struct nesting *thisbind = nesting_stack;
3061 tree decl;
3063 if (warn_unused)
3064 for (decl = vars; decl; decl = TREE_CHAIN (decl))
3065 if (! TREE_USED (TREE_VALUE (decl)) && TREE_CODE (TREE_VALUE (decl)) == VAR_DECL)
3066 warning_with_decl (decl, "unused variable `%s'");
3068 if (thisbind->exit_label)
3069 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thisbind->exit_label));
3071 /* Pop block/bindings off stack */
3072 POPSTACK (block_stack);
3075 /* Generate RTL for the automatic variable declaration DECL.
3076 (Other kinds of declarations are simply ignored if seen here.)
3077 CLEANUP is an expression to be executed at exit from this binding contour;
3078 for example, in C++, it might call the destructor for this variable.
3080 If CLEANUP contains any SAVE_EXPRs, then you must preevaluate them
3081 either before or after calling `expand_decl' but before compiling
3082 any subsequent expressions. This is because CLEANUP may be expanded
3083 more than once, on different branches of execution.
3084 For the same reason, CLEANUP may not contain a CALL_EXPR
3085 except as its topmost node--else `preexpand_calls' would get confused.
3087 If CLEANUP is nonzero and DECL is zero, we record a cleanup
3088 that is not associated with any particular variable.
3090 There is no special support here for C++ constructors.
3091 They should be handled by the proper code in DECL_INITIAL. */
3093 void
3094 expand_decl (decl)
3095 register tree decl;
3097 struct nesting *thisblock = block_stack;
3098 tree type;
3100 if (output_bytecode)
3102 bc_expand_decl (decl, 0);
3103 return;
3106 type = TREE_TYPE (decl);
3108 /* Only automatic variables need any expansion done.
3109 Static and external variables, and external functions,
3110 will be handled by `assemble_variable' (called from finish_decl).
3111 TYPE_DECL and CONST_DECL require nothing.
3112 PARM_DECLs are handled in `assign_parms'. */
3114 if (TREE_CODE (decl) != VAR_DECL)
3115 return;
3116 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
3117 return;
3119 /* Create the RTL representation for the variable. */
3121 if (type == error_mark_node)
3122 DECL_RTL (decl) = gen_rtx (MEM, BLKmode, const0_rtx);
3123 else if (DECL_SIZE (decl) == 0)
3124 /* Variable with incomplete type. */
3126 if (DECL_INITIAL (decl) == 0)
3127 /* Error message was already done; now avoid a crash. */
3128 DECL_RTL (decl) = assign_stack_temp (DECL_MODE (decl), 0, 1);
3129 else
3130 /* An initializer is going to decide the size of this array.
3131 Until we know the size, represent its address with a reg. */
3132 DECL_RTL (decl) = gen_rtx (MEM, BLKmode, gen_reg_rtx (Pmode));
3134 else if (DECL_MODE (decl) != BLKmode
3135 /* If -ffloat-store, don't put explicit float vars
3136 into regs. */
3137 && !(flag_float_store
3138 && TREE_CODE (type) == REAL_TYPE)
3139 && ! TREE_THIS_VOLATILE (decl)
3140 && ! TREE_ADDRESSABLE (decl)
3141 && (DECL_REGISTER (decl) || ! obey_regdecls))
3143 /* Automatic variable that can go in a register. */
3144 int unsignedp = TREE_UNSIGNED (type);
3145 enum machine_mode reg_mode
3146 = promote_mode (type, DECL_MODE (decl), &unsignedp, 0);
3148 if (TREE_CODE (type) == COMPLEX_TYPE)
3150 rtx realpart, imagpart;
3151 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (type));
3153 /* For a complex type variable, make a CONCAT of two pseudos
3154 so that the real and imaginary parts
3155 can be allocated separately. */
3156 realpart = gen_reg_rtx (partmode);
3157 REG_USERVAR_P (realpart) = 1;
3158 imagpart = gen_reg_rtx (partmode);
3159 REG_USERVAR_P (imagpart) = 1;
3160 DECL_RTL (decl) = gen_rtx (CONCAT, reg_mode, realpart, imagpart);
3162 else
3164 DECL_RTL (decl) = gen_reg_rtx (reg_mode);
3165 if (TREE_CODE (type) == POINTER_TYPE)
3166 mark_reg_pointer (DECL_RTL (decl));
3167 REG_USERVAR_P (DECL_RTL (decl)) = 1;
3170 else if (TREE_CODE (DECL_SIZE (decl)) == INTEGER_CST)
3172 /* Variable of fixed size that goes on the stack. */
3173 rtx oldaddr = 0;
3174 rtx addr;
3176 /* If we previously made RTL for this decl, it must be an array
3177 whose size was determined by the initializer.
3178 The old address was a register; set that register now
3179 to the proper address. */
3180 if (DECL_RTL (decl) != 0)
3182 if (GET_CODE (DECL_RTL (decl)) != MEM
3183 || GET_CODE (XEXP (DECL_RTL (decl), 0)) != REG)
3184 abort ();
3185 oldaddr = XEXP (DECL_RTL (decl), 0);
3188 DECL_RTL (decl)
3189 = assign_stack_temp (DECL_MODE (decl),
3190 ((TREE_INT_CST_LOW (DECL_SIZE (decl))
3191 + BITS_PER_UNIT - 1)
3192 / BITS_PER_UNIT),
3195 /* Set alignment we actually gave this decl. */
3196 DECL_ALIGN (decl) = (DECL_MODE (decl) == BLKmode ? BIGGEST_ALIGNMENT
3197 : GET_MODE_BITSIZE (DECL_MODE (decl)));
3199 if (oldaddr)
3201 addr = force_operand (XEXP (DECL_RTL (decl), 0), oldaddr);
3202 if (addr != oldaddr)
3203 emit_move_insn (oldaddr, addr);
3206 /* If this is a memory ref that contains aggregate components,
3207 mark it as such for cse and loop optimize. */
3208 MEM_IN_STRUCT_P (DECL_RTL (decl)) = AGGREGATE_TYPE_P (TREE_TYPE (decl));
3209 #if 0
3210 /* If this is in memory because of -ffloat-store,
3211 set the volatile bit, to prevent optimizations from
3212 undoing the effects. */
3213 if (flag_float_store && TREE_CODE (type) == REAL_TYPE)
3214 MEM_VOLATILE_P (DECL_RTL (decl)) = 1;
3215 #endif
3217 else
3218 /* Dynamic-size object: must push space on the stack. */
3220 rtx address, size;
3222 /* Record the stack pointer on entry to block, if have
3223 not already done so. */
3224 if (thisblock->data.block.stack_level == 0)
3226 do_pending_stack_adjust ();
3227 emit_stack_save (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3228 &thisblock->data.block.stack_level,
3229 thisblock->data.block.first_insn);
3230 stack_block_stack = thisblock;
3233 /* Compute the variable's size, in bytes. */
3234 size = expand_expr (size_binop (CEIL_DIV_EXPR,
3235 DECL_SIZE (decl),
3236 size_int (BITS_PER_UNIT)),
3237 NULL_RTX, VOIDmode, 0);
3238 free_temp_slots ();
3240 /* Allocate space on the stack for the variable. */
3241 address = allocate_dynamic_stack_space (size, NULL_RTX,
3242 DECL_ALIGN (decl));
3244 /* Reference the variable indirect through that rtx. */
3245 DECL_RTL (decl) = gen_rtx (MEM, DECL_MODE (decl), address);
3247 /* If this is a memory ref that contains aggregate components,
3248 mark it as such for cse and loop optimize. */
3249 MEM_IN_STRUCT_P (DECL_RTL (decl)) = AGGREGATE_TYPE_P (TREE_TYPE (decl));
3251 /* Indicate the alignment we actually gave this variable. */
3252 #ifdef STACK_BOUNDARY
3253 DECL_ALIGN (decl) = STACK_BOUNDARY;
3254 #else
3255 DECL_ALIGN (decl) = BIGGEST_ALIGNMENT;
3256 #endif
3259 if (TREE_THIS_VOLATILE (decl))
3260 MEM_VOLATILE_P (DECL_RTL (decl)) = 1;
3261 #if 0 /* A variable is not necessarily unchanging
3262 just because it is const. RTX_UNCHANGING_P
3263 means no change in the function,
3264 not merely no change in the variable's scope.
3265 It is correct to set RTX_UNCHANGING_P if the variable's scope
3266 is the whole function. There's no convenient way to test that. */
3267 if (TREE_READONLY (decl))
3268 RTX_UNCHANGING_P (DECL_RTL (decl)) = 1;
3269 #endif
3271 /* If doing stupid register allocation, make sure life of any
3272 register variable starts here, at the start of its scope. */
3274 if (obey_regdecls)
3275 use_variable (DECL_RTL (decl));
3279 /* Generate code for the automatic variable declaration DECL. For
3280 most variables this just means we give it a stack offset. The
3281 compiler sometimes emits cleanups without variables and we will
3282 have to deal with those too. */
3284 static void
3285 bc_expand_decl (decl, cleanup)
3286 tree decl;
3287 tree cleanup;
3289 tree type;
3291 if (!decl)
3293 /* A cleanup with no variable. */
3294 if (!cleanup)
3295 abort ();
3297 return;
3300 /* Only auto variables need any work. */
3301 if (TREE_CODE (decl) != VAR_DECL || TREE_STATIC (decl) || DECL_EXTERNAL (decl))
3302 return;
3304 type = TREE_TYPE (decl);
3306 if (type == error_mark_node)
3307 DECL_RTL (decl) = bc_gen_rtx ((char *) 0, 0, (struct bc_label *) 0);
3309 else if (DECL_SIZE (decl) == 0)
3311 /* Variable with incomplete type. The stack offset herein will be
3312 fixed later in expand_decl_init (). */
3313 DECL_RTL (decl) = bc_gen_rtx ((char *) 0, 0, (struct bc_label *) 0);
3315 else if (TREE_CONSTANT (DECL_SIZE (decl)))
3317 DECL_RTL (decl) = bc_allocate_local (TREE_INT_CST_LOW (DECL_SIZE (decl)) / BITS_PER_UNIT,
3318 DECL_ALIGN (decl));
3320 else
3321 DECL_RTL (decl) = bc_allocate_variable_array (DECL_SIZE (decl));
3324 /* Emit code to perform the initialization of a declaration DECL. */
3326 void
3327 expand_decl_init (decl)
3328 tree decl;
3330 int was_used = TREE_USED (decl);
3332 if (output_bytecode)
3334 bc_expand_decl_init (decl);
3335 return;
3338 /* If this is a CONST_DECL, we don't have to generate any code, but
3339 if DECL_INITIAL is a constant, call expand_expr to force TREE_CST_RTL
3340 to be set while in the obstack containing the constant. If we don't
3341 do this, we can lose if we have functions nested three deep and the middle
3342 function makes a CONST_DECL whose DECL_INITIAL is a STRING_CST while
3343 the innermost function is the first to expand that STRING_CST. */
3344 if (TREE_CODE (decl) == CONST_DECL)
3346 if (DECL_INITIAL (decl) && TREE_CONSTANT (DECL_INITIAL (decl)))
3347 expand_expr (DECL_INITIAL (decl), NULL_RTX, VOIDmode,
3348 EXPAND_INITIALIZER);
3349 return;
3352 if (TREE_STATIC (decl))
3353 return;
3355 /* Compute and store the initial value now. */
3357 if (DECL_INITIAL (decl) == error_mark_node)
3359 enum tree_code code = TREE_CODE (TREE_TYPE (decl));
3360 if (code == INTEGER_TYPE || code == REAL_TYPE || code == ENUMERAL_TYPE
3361 || code == POINTER_TYPE)
3362 expand_assignment (decl, convert (TREE_TYPE (decl), integer_zero_node),
3363 0, 0);
3364 emit_queue ();
3366 else if (DECL_INITIAL (decl) && TREE_CODE (DECL_INITIAL (decl)) != TREE_LIST)
3368 emit_line_note (DECL_SOURCE_FILE (decl), DECL_SOURCE_LINE (decl));
3369 expand_assignment (decl, DECL_INITIAL (decl), 0, 0);
3370 emit_queue ();
3373 /* Don't let the initialization count as "using" the variable. */
3374 TREE_USED (decl) = was_used;
3376 /* Free any temporaries we made while initializing the decl. */
3377 free_temp_slots ();
3380 /* Expand initialization for variable-sized types. Allocate array
3381 using newlocalSI and set local variable, which is a pointer to the
3382 storage. */
3384 static void
3385 bc_expand_variable_local_init (decl)
3386 tree decl;
3388 /* Evaluate size expression and coerce to SI */
3389 bc_expand_expr (DECL_SIZE (decl));
3391 /* Type sizes are always (?) of TREE_CODE INTEGER_CST, so
3392 no coercion is necessary (?) */
3394 /* emit_typecode_conversion (preferred_typecode (TYPE_MODE (DECL_SIZE (decl)),
3395 TREE_UNSIGNED (DECL_SIZE (decl))), SIcode); */
3397 /* Emit code to allocate array */
3398 bc_emit_instruction (newlocalSI);
3400 /* Store array pointer in local variable. This is the only instance
3401 where we actually want the address of the pointer to the
3402 variable-size block, rather than the pointer itself. We avoid
3403 using expand_address() since that would cause the pointer to be
3404 pushed rather than its address. Hence the hard-coded reference;
3405 notice also that the variable is always local (no global
3406 variable-size type variables). */
3408 bc_load_localaddr (DECL_RTL (decl));
3409 bc_emit_instruction (storeP);
3413 /* Emit code to initialize a declaration. */
3415 static void
3416 bc_expand_decl_init (decl)
3417 tree decl;
3419 int org_stack_depth;
3421 /* Statical initializers are handled elsewhere */
3423 if (TREE_STATIC (decl))
3424 return;
3426 /* Memory original stack depth */
3427 org_stack_depth = stack_depth;
3429 /* If the type is variable-size, we first create its space (we ASSUME
3430 it CAN'T be static). We do this regardless of whether there's an
3431 initializer assignment or not. */
3433 if (TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
3434 bc_expand_variable_local_init (decl);
3436 /* Expand initializer assignment */
3437 if (DECL_INITIAL (decl) == error_mark_node)
3439 enum tree_code code = TREE_CODE (TREE_TYPE (decl));
3441 if (code == INTEGER_TYPE || code == REAL_TYPE || code == ENUMERAL_TYPE
3442 || code == POINTER_TYPE)
3444 expand_assignment (TREE_TYPE (decl), decl, 0, 0);
3446 else if (DECL_INITIAL (decl))
3447 expand_assignment (TREE_TYPE (decl), decl, 0, 0);
3449 /* Restore stack depth */
3450 if (org_stack_depth > stack_depth)
3451 abort ();
3453 bc_adjust_stack (stack_depth - org_stack_depth);
3457 /* CLEANUP is an expression to be executed at exit from this binding contour;
3458 for example, in C++, it might call the destructor for this variable.
3460 If CLEANUP contains any SAVE_EXPRs, then you must preevaluate them
3461 either before or after calling `expand_decl' but before compiling
3462 any subsequent expressions. This is because CLEANUP may be expanded
3463 more than once, on different branches of execution.
3464 For the same reason, CLEANUP may not contain a CALL_EXPR
3465 except as its topmost node--else `preexpand_calls' would get confused.
3467 If CLEANUP is nonzero and DECL is zero, we record a cleanup
3468 that is not associated with any particular variable. */
3471 expand_decl_cleanup (decl, cleanup)
3472 tree decl, cleanup;
3474 struct nesting *thisblock = block_stack;
3476 /* Error if we are not in any block. */
3477 if (thisblock == 0)
3478 return 0;
3480 /* Record the cleanup if there is one. */
3482 if (cleanup != 0)
3484 thisblock->data.block.cleanups
3485 = temp_tree_cons (decl, cleanup, thisblock->data.block.cleanups);
3486 /* If this block has a cleanup, it belongs in stack_block_stack. */
3487 stack_block_stack = thisblock;
3488 (*interim_eh_hook) (NULL_TREE);
3490 return 1;
3493 /* DECL is an anonymous union. CLEANUP is a cleanup for DECL.
3494 DECL_ELTS is the list of elements that belong to DECL's type.
3495 In each, the TREE_VALUE is a VAR_DECL, and the TREE_PURPOSE a cleanup. */
3497 void
3498 expand_anon_union_decl (decl, cleanup, decl_elts)
3499 tree decl, cleanup, decl_elts;
3501 struct nesting *thisblock = block_stack;
3502 rtx x;
3504 expand_decl (decl, cleanup);
3505 x = DECL_RTL (decl);
3507 while (decl_elts)
3509 tree decl_elt = TREE_VALUE (decl_elts);
3510 tree cleanup_elt = TREE_PURPOSE (decl_elts);
3511 enum machine_mode mode = TYPE_MODE (TREE_TYPE (decl_elt));
3513 /* (SUBREG (MEM ...)) at RTL generation time is invalid, so we
3514 instead create a new MEM rtx with the proper mode. */
3515 if (GET_CODE (x) == MEM)
3517 if (mode == GET_MODE (x))
3518 DECL_RTL (decl_elt) = x;
3519 else
3521 DECL_RTL (decl_elt) = gen_rtx (MEM, mode, copy_rtx (XEXP (x, 0)));
3522 MEM_IN_STRUCT_P (DECL_RTL (decl_elt)) = MEM_IN_STRUCT_P (x);
3523 RTX_UNCHANGING_P (DECL_RTL (decl_elt)) = RTX_UNCHANGING_P (x);
3526 else if (GET_CODE (x) == REG)
3528 if (mode == GET_MODE (x))
3529 DECL_RTL (decl_elt) = x;
3530 else
3531 DECL_RTL (decl_elt) = gen_rtx (SUBREG, mode, x, 0);
3533 else
3534 abort ();
3536 /* Record the cleanup if there is one. */
3538 if (cleanup != 0)
3539 thisblock->data.block.cleanups
3540 = temp_tree_cons (decl_elt, cleanup_elt,
3541 thisblock->data.block.cleanups);
3543 decl_elts = TREE_CHAIN (decl_elts);
3547 /* Expand a list of cleanups LIST.
3548 Elements may be expressions or may be nested lists.
3550 If DONT_DO is nonnull, then any list-element
3551 whose TREE_PURPOSE matches DONT_DO is omitted.
3552 This is sometimes used to avoid a cleanup associated with
3553 a value that is being returned out of the scope. */
3555 static void
3556 expand_cleanups (list, dont_do)
3557 tree list;
3558 tree dont_do;
3560 tree tail;
3561 for (tail = list; tail; tail = TREE_CHAIN (tail))
3562 if (dont_do == 0 || TREE_PURPOSE (tail) != dont_do)
3564 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3565 expand_cleanups (TREE_VALUE (tail), dont_do);
3566 else
3568 (*interim_eh_hook) (TREE_VALUE (tail));
3570 /* Cleanups may be run multiple times. For example,
3571 when exiting a binding contour, we expand the
3572 cleanups associated with that contour. When a goto
3573 within that binding contour has a target outside that
3574 contour, it will expand all cleanups from its scope to
3575 the target. Though the cleanups are expanded multiple
3576 times, the control paths are non-overlapping so the
3577 cleanups will not be executed twice. */
3578 expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
3579 free_temp_slots ();
3584 /* Move all cleanups from the current block_stack
3585 to the containing block_stack, where they are assumed to
3586 have been created. If anything can cause a temporary to
3587 be created, but not expanded for more than one level of
3588 block_stacks, then this code will have to change. */
3590 void
3591 move_cleanups_up ()
3593 struct nesting *block = block_stack;
3594 struct nesting *outer = block->next;
3596 outer->data.block.cleanups
3597 = chainon (block->data.block.cleanups,
3598 outer->data.block.cleanups);
3599 block->data.block.cleanups = 0;
3602 tree
3603 last_cleanup_this_contour ()
3605 if (block_stack == 0)
3606 return 0;
3608 return block_stack->data.block.cleanups;
3611 /* Return 1 if there are any pending cleanups at this point.
3612 If THIS_CONTOUR is nonzero, check the current contour as well.
3613 Otherwise, look only at the contours that enclose this one. */
3616 any_pending_cleanups (this_contour)
3617 int this_contour;
3619 struct nesting *block;
3621 if (block_stack == 0)
3622 return 0;
3624 if (this_contour && block_stack->data.block.cleanups != NULL)
3625 return 1;
3626 if (block_stack->data.block.cleanups == 0
3627 && (block_stack->data.block.outer_cleanups == 0
3628 #if 0
3629 || block_stack->data.block.outer_cleanups == empty_cleanup_list
3630 #endif
3632 return 0;
3634 for (block = block_stack->next; block; block = block->next)
3635 if (block->data.block.cleanups != 0)
3636 return 1;
3638 return 0;
3641 /* Enter a case (Pascal) or switch (C) statement.
3642 Push a block onto case_stack and nesting_stack
3643 to accumulate the case-labels that are seen
3644 and to record the labels generated for the statement.
3646 EXIT_FLAG is nonzero if `exit_something' should exit this case stmt.
3647 Otherwise, this construct is transparent for `exit_something'.
3649 EXPR is the index-expression to be dispatched on.
3650 TYPE is its nominal type. We could simply convert EXPR to this type,
3651 but instead we take short cuts. */
3653 void
3654 expand_start_case (exit_flag, expr, type, printname)
3655 int exit_flag;
3656 tree expr;
3657 tree type;
3658 char *printname;
3660 register struct nesting *thiscase = ALLOC_NESTING ();
3662 /* Make an entry on case_stack for the case we are entering. */
3664 thiscase->next = case_stack;
3665 thiscase->all = nesting_stack;
3666 thiscase->depth = ++nesting_depth;
3667 thiscase->exit_label = exit_flag ? gen_label_rtx () : 0;
3668 thiscase->data.case_stmt.case_list = 0;
3669 thiscase->data.case_stmt.index_expr = expr;
3670 thiscase->data.case_stmt.nominal_type = type;
3671 thiscase->data.case_stmt.default_label = 0;
3672 thiscase->data.case_stmt.num_ranges = 0;
3673 thiscase->data.case_stmt.printname = printname;
3674 thiscase->data.case_stmt.seenlabel = 0;
3675 case_stack = thiscase;
3676 nesting_stack = thiscase;
3678 if (output_bytecode)
3680 bc_expand_start_case (thiscase, expr, type, printname);
3681 return;
3684 do_pending_stack_adjust ();
3686 /* Make sure case_stmt.start points to something that won't
3687 need any transformation before expand_end_case. */
3688 if (GET_CODE (get_last_insn ()) != NOTE)
3689 emit_note (NULL_PTR, NOTE_INSN_DELETED);
3691 thiscase->data.case_stmt.start = get_last_insn ();
3695 /* Enter a case statement. It is assumed that the caller has pushed
3696 the current context onto the case stack. */
3698 static void
3699 bc_expand_start_case (thiscase, expr, type, printname)
3700 struct nesting *thiscase;
3701 tree expr;
3702 tree type;
3703 char *printname;
3705 bc_expand_expr (expr);
3706 bc_expand_conversion (TREE_TYPE (expr), type);
3708 /* For cases, the skip is a place we jump to that's emitted after
3709 the size of the jump table is known. */
3711 thiscase->data.case_stmt.skip_label = gen_label_rtx ();
3712 bc_emit_bytecode (jump);
3713 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (thiscase->data.case_stmt.skip_label));
3715 #ifdef DEBUG_PRINT_CODE
3716 fputc ('\n', stderr);
3717 #endif
3721 /* Start a "dummy case statement" within which case labels are invalid
3722 and are not connected to any larger real case statement.
3723 This can be used if you don't want to let a case statement jump
3724 into the middle of certain kinds of constructs. */
3726 void
3727 expand_start_case_dummy ()
3729 register struct nesting *thiscase = ALLOC_NESTING ();
3731 /* Make an entry on case_stack for the dummy. */
3733 thiscase->next = case_stack;
3734 thiscase->all = nesting_stack;
3735 thiscase->depth = ++nesting_depth;
3736 thiscase->exit_label = 0;
3737 thiscase->data.case_stmt.case_list = 0;
3738 thiscase->data.case_stmt.start = 0;
3739 thiscase->data.case_stmt.nominal_type = 0;
3740 thiscase->data.case_stmt.default_label = 0;
3741 thiscase->data.case_stmt.num_ranges = 0;
3742 case_stack = thiscase;
3743 nesting_stack = thiscase;
3746 /* End a dummy case statement. */
3748 void
3749 expand_end_case_dummy ()
3751 POPSTACK (case_stack);
3754 /* Return the data type of the index-expression
3755 of the innermost case statement, or null if none. */
3757 tree
3758 case_index_expr_type ()
3760 if (case_stack)
3761 return TREE_TYPE (case_stack->data.case_stmt.index_expr);
3762 return 0;
3765 /* Accumulate one case or default label inside a case or switch statement.
3766 VALUE is the value of the case (a null pointer, for a default label).
3767 The function CONVERTER, when applied to arguments T and V,
3768 converts the value V to the type T.
3770 If not currently inside a case or switch statement, return 1 and do
3771 nothing. The caller will print a language-specific error message.
3772 If VALUE is a duplicate or overlaps, return 2 and do nothing
3773 except store the (first) duplicate node in *DUPLICATE.
3774 If VALUE is out of range, return 3 and do nothing.
3775 If we are jumping into the scope of a cleaup or var-sized array, return 5.
3776 Return 0 on success.
3778 Extended to handle range statements. */
3781 pushcase (value, converter, label, duplicate)
3782 register tree value;
3783 tree (*converter) PROTO((tree, tree));
3784 register tree label;
3785 tree *duplicate;
3787 register struct case_node **l;
3788 register struct case_node *n;
3789 tree index_type;
3790 tree nominal_type;
3792 if (output_bytecode)
3793 return bc_pushcase (value, label);
3795 /* Fail if not inside a real case statement. */
3796 if (! (case_stack && case_stack->data.case_stmt.start))
3797 return 1;
3799 if (stack_block_stack
3800 && stack_block_stack->depth > case_stack->depth)
3801 return 5;
3803 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
3804 nominal_type = case_stack->data.case_stmt.nominal_type;
3806 /* If the index is erroneous, avoid more problems: pretend to succeed. */
3807 if (index_type == error_mark_node)
3808 return 0;
3810 /* Convert VALUE to the type in which the comparisons are nominally done. */
3811 if (value != 0)
3812 value = (*converter) (nominal_type, value);
3814 /* If this is the first label, warn if any insns have been emitted. */
3815 if (case_stack->data.case_stmt.seenlabel == 0)
3817 rtx insn;
3818 for (insn = case_stack->data.case_stmt.start;
3819 insn;
3820 insn = NEXT_INSN (insn))
3822 if (GET_CODE (insn) == CODE_LABEL)
3823 break;
3824 if (GET_CODE (insn) != NOTE
3825 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn)) != USE))
3827 warning ("unreachable code at beginning of %s",
3828 case_stack->data.case_stmt.printname);
3829 break;
3833 case_stack->data.case_stmt.seenlabel = 1;
3835 /* Fail if this value is out of range for the actual type of the index
3836 (which may be narrower than NOMINAL_TYPE). */
3837 if (value != 0 && ! int_fits_type_p (value, index_type))
3838 return 3;
3840 /* Fail if this is a duplicate or overlaps another entry. */
3841 if (value == 0)
3843 if (case_stack->data.case_stmt.default_label != 0)
3845 *duplicate = case_stack->data.case_stmt.default_label;
3846 return 2;
3848 case_stack->data.case_stmt.default_label = label;
3850 else
3852 /* Find the elt in the chain before which to insert the new value,
3853 to keep the chain sorted in increasing order.
3854 But report an error if this element is a duplicate. */
3855 for (l = &case_stack->data.case_stmt.case_list;
3856 /* Keep going past elements distinctly less than VALUE. */
3857 *l != 0 && tree_int_cst_lt ((*l)->high, value);
3858 l = &(*l)->right)
3860 if (*l)
3862 /* Element we will insert before must be distinctly greater;
3863 overlap means error. */
3864 if (! tree_int_cst_lt (value, (*l)->low))
3866 *duplicate = (*l)->code_label;
3867 return 2;
3871 /* Add this label to the chain, and succeed.
3872 Copy VALUE so it is on temporary rather than momentary
3873 obstack and will thus survive till the end of the case statement. */
3874 n = (struct case_node *) oballoc (sizeof (struct case_node));
3875 n->left = 0;
3876 n->right = *l;
3877 n->high = n->low = copy_node (value);
3878 n->code_label = label;
3879 *l = n;
3882 expand_label (label);
3883 return 0;
3886 /* Like pushcase but this case applies to all values
3887 between VALUE1 and VALUE2 (inclusive).
3888 The return value is the same as that of pushcase
3889 but there is one additional error code:
3890 4 means the specified range was empty. */
3893 pushcase_range (value1, value2, converter, label, duplicate)
3894 register tree value1, value2;
3895 tree (*converter) PROTO((tree, tree));
3896 register tree label;
3897 tree *duplicate;
3899 register struct case_node **l;
3900 register struct case_node *n;
3901 tree index_type;
3902 tree nominal_type;
3904 /* Fail if not inside a real case statement. */
3905 if (! (case_stack && case_stack->data.case_stmt.start))
3906 return 1;
3908 if (stack_block_stack
3909 && stack_block_stack->depth > case_stack->depth)
3910 return 5;
3912 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
3913 nominal_type = case_stack->data.case_stmt.nominal_type;
3915 /* If the index is erroneous, avoid more problems: pretend to succeed. */
3916 if (index_type == error_mark_node)
3917 return 0;
3919 /* If this is the first label, warn if any insns have been emitted. */
3920 if (case_stack->data.case_stmt.seenlabel == 0)
3922 rtx insn;
3923 for (insn = case_stack->data.case_stmt.start;
3924 insn;
3925 insn = NEXT_INSN (insn))
3927 if (GET_CODE (insn) == CODE_LABEL)
3928 break;
3929 if (GET_CODE (insn) != NOTE
3930 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn)) != USE))
3932 warning ("unreachable code at beginning of %s",
3933 case_stack->data.case_stmt.printname);
3934 break;
3938 case_stack->data.case_stmt.seenlabel = 1;
3940 /* Convert VALUEs to type in which the comparisons are nominally done. */
3941 if (value1 == 0) /* Negative infinity. */
3942 value1 = TYPE_MIN_VALUE(index_type);
3943 value1 = (*converter) (nominal_type, value1);
3945 if (value2 == 0) /* Positive infinity. */
3946 value2 = TYPE_MAX_VALUE(index_type);
3947 value2 = (*converter) (nominal_type, value2);
3949 /* Fail if these values are out of range. */
3950 if (! int_fits_type_p (value1, index_type))
3951 return 3;
3953 if (! int_fits_type_p (value2, index_type))
3954 return 3;
3956 /* Fail if the range is empty. */
3957 if (tree_int_cst_lt (value2, value1))
3958 return 4;
3960 /* If the bounds are equal, turn this into the one-value case. */
3961 if (tree_int_cst_equal (value1, value2))
3962 return pushcase (value1, converter, label, duplicate);
3964 /* Find the elt in the chain before which to insert the new value,
3965 to keep the chain sorted in increasing order.
3966 But report an error if this element is a duplicate. */
3967 for (l = &case_stack->data.case_stmt.case_list;
3968 /* Keep going past elements distinctly less than this range. */
3969 *l != 0 && tree_int_cst_lt ((*l)->high, value1);
3970 l = &(*l)->right)
3972 if (*l)
3974 /* Element we will insert before must be distinctly greater;
3975 overlap means error. */
3976 if (! tree_int_cst_lt (value2, (*l)->low))
3978 *duplicate = (*l)->code_label;
3979 return 2;
3983 /* Add this label to the chain, and succeed.
3984 Copy VALUE1, VALUE2 so they are on temporary rather than momentary
3985 obstack and will thus survive till the end of the case statement. */
3987 n = (struct case_node *) oballoc (sizeof (struct case_node));
3988 n->left = 0;
3989 n->right = *l;
3990 n->low = copy_node (value1);
3991 n->high = copy_node (value2);
3992 n->code_label = label;
3993 *l = n;
3995 expand_label (label);
3997 case_stack->data.case_stmt.num_ranges++;
3999 return 0;
4003 /* Accumulate one case or default label; VALUE is the value of the
4004 case, or nil for a default label. If not currently inside a case,
4005 return 1 and do nothing. If VALUE is a duplicate or overlaps, return
4006 2 and do nothing. If VALUE is out of range, return 3 and do nothing.
4007 Return 0 on success. This function is a leftover from the earlier
4008 bytecode compiler, which was based on gcc 1.37. It should be
4009 merged into pushcase. */
4011 static int
4012 bc_pushcase (value, label)
4013 tree value;
4014 tree label;
4016 struct nesting *thiscase = case_stack;
4017 struct case_node *case_label, *new_label;
4019 if (! thiscase)
4020 return 1;
4022 /* Fail if duplicate, overlap, or out of type range. */
4023 if (value)
4025 value = convert (thiscase->data.case_stmt.nominal_type, value);
4026 if (! int_fits_type_p (value, thiscase->data.case_stmt.nominal_type))
4027 return 3;
4029 for (case_label = thiscase->data.case_stmt.case_list;
4030 case_label->left; case_label = case_label->left)
4031 if (! tree_int_cst_lt (case_label->left->high, value))
4032 break;
4034 if (case_label != thiscase->data.case_stmt.case_list
4035 && ! tree_int_cst_lt (case_label->high, value)
4036 || case_label->left && ! tree_int_cst_lt (value, case_label->left->low))
4037 return 2;
4039 new_label = (struct case_node *) oballoc (sizeof (struct case_node));
4040 new_label->low = new_label->high = copy_node (value);
4041 new_label->code_label = label;
4042 new_label->left = case_label->left;
4044 case_label->left = new_label;
4045 thiscase->data.case_stmt.num_ranges++;
4047 else
4049 if (thiscase->data.case_stmt.default_label)
4050 return 2;
4051 thiscase->data.case_stmt.default_label = label;
4054 expand_label (label);
4055 return 0;
4058 /* Called when the index of a switch statement is an enumerated type
4059 and there is no default label.
4061 Checks that all enumeration literals are covered by the case
4062 expressions of a switch. Also, warn if there are any extra
4063 switch cases that are *not* elements of the enumerated type.
4065 If all enumeration literals were covered by the case expressions,
4066 turn one of the expressions into the default expression since it should
4067 not be possible to fall through such a switch. */
4069 void
4070 check_for_full_enumeration_handling (type)
4071 tree type;
4073 register struct case_node *n;
4074 register struct case_node **l;
4075 register tree chain;
4076 int all_values = 1;
4078 if (output_bytecode)
4080 bc_check_for_full_enumeration_handling (type);
4081 return;
4084 /* The time complexity of this loop is currently O(N * M), with
4085 N being the number of members in the enumerated type, and
4086 M being the number of case expressions in the switch. */
4088 for (chain = TYPE_VALUES (type);
4089 chain;
4090 chain = TREE_CHAIN (chain))
4092 /* Find a match between enumeral and case expression, if possible.
4093 Quit looking when we've gone too far (since case expressions
4094 are kept sorted in ascending order). Warn about enumerators not
4095 handled in the switch statement case expression list. */
4097 for (n = case_stack->data.case_stmt.case_list;
4098 n && tree_int_cst_lt (n->high, TREE_VALUE (chain));
4099 n = n->right)
4102 if (!n || tree_int_cst_lt (TREE_VALUE (chain), n->low))
4104 if (warn_switch)
4105 warning ("enumeration value `%s' not handled in switch",
4106 IDENTIFIER_POINTER (TREE_PURPOSE (chain)));
4107 all_values = 0;
4111 /* Now we go the other way around; we warn if there are case
4112 expressions that don't correspond to enumerators. This can
4113 occur since C and C++ don't enforce type-checking of
4114 assignments to enumeration variables. */
4116 if (warn_switch)
4117 for (n = case_stack->data.case_stmt.case_list; n; n = n->right)
4119 for (chain = TYPE_VALUES (type);
4120 chain && !tree_int_cst_equal (n->low, TREE_VALUE (chain));
4121 chain = TREE_CHAIN (chain))
4124 if (!chain)
4126 if (TYPE_NAME (type) == 0)
4127 warning ("case value `%d' not in enumerated type",
4128 TREE_INT_CST_LOW (n->low));
4129 else
4130 warning ("case value `%d' not in enumerated type `%s'",
4131 TREE_INT_CST_LOW (n->low),
4132 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
4133 == IDENTIFIER_NODE)
4134 ? TYPE_NAME (type)
4135 : DECL_NAME (TYPE_NAME (type))));
4137 if (!tree_int_cst_equal (n->low, n->high))
4139 for (chain = TYPE_VALUES (type);
4140 chain && !tree_int_cst_equal (n->high, TREE_VALUE (chain));
4141 chain = TREE_CHAIN (chain))
4144 if (!chain)
4146 if (TYPE_NAME (type) == 0)
4147 warning ("case value `%d' not in enumerated type",
4148 TREE_INT_CST_LOW (n->high));
4149 else
4150 warning ("case value `%d' not in enumerated type `%s'",
4151 TREE_INT_CST_LOW (n->high),
4152 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
4153 == IDENTIFIER_NODE)
4154 ? TYPE_NAME (type)
4155 : DECL_NAME (TYPE_NAME (type))));
4160 #if 0
4161 /* ??? This optimization is disabled because it causes valid programs to
4162 fail. ANSI C does not guarantee that an expression with enum type
4163 will have a value that is the same as one of the enumation literals. */
4165 /* If all values were found as case labels, make one of them the default
4166 label. Thus, this switch will never fall through. We arbitrarily pick
4167 the last one to make the default since this is likely the most
4168 efficient choice. */
4170 if (all_values)
4172 for (l = &case_stack->data.case_stmt.case_list;
4173 (*l)->right != 0;
4174 l = &(*l)->right)
4177 case_stack->data.case_stmt.default_label = (*l)->code_label;
4178 *l = 0;
4180 #endif /* 0 */
4184 /* Check that all enumeration literals are covered by the case
4185 expressions of a switch. Also warn if there are any cases
4186 that are not elements of the enumerated type. */
4188 static void
4189 bc_check_for_full_enumeration_handling (type)
4190 tree type;
4192 struct nesting *thiscase = case_stack;
4193 struct case_node *c;
4194 tree e;
4196 /* Check for enums not handled. */
4197 for (e = TYPE_VALUES (type); e; e = TREE_CHAIN (e))
4199 for (c = thiscase->data.case_stmt.case_list->left;
4200 c && tree_int_cst_lt (c->high, TREE_VALUE (e));
4201 c = c->left)
4203 if (! (c && tree_int_cst_equal (c->low, TREE_VALUE (e))))
4204 warning ("enumerated value `%s' not handled in switch",
4205 IDENTIFIER_POINTER (TREE_PURPOSE (e)));
4208 /* Check for cases not in the enumeration. */
4209 for (c = thiscase->data.case_stmt.case_list->left; c; c = c->left)
4211 for (e = TYPE_VALUES (type);
4212 e && !tree_int_cst_equal (c->low, TREE_VALUE (e));
4213 e = TREE_CHAIN (e))
4215 if (! e)
4216 warning ("case value `%d' not in enumerated type `%s'",
4217 TREE_INT_CST_LOW (c->low),
4218 IDENTIFIER_POINTER (TREE_CODE (TYPE_NAME (type)) == IDENTIFIER_NODE
4219 ? TYPE_NAME (type)
4220 : DECL_NAME (TYPE_NAME (type))));
4224 /* Terminate a case (Pascal) or switch (C) statement
4225 in which ORIG_INDEX is the expression to be tested.
4226 Generate the code to test it and jump to the right place. */
4228 void
4229 expand_end_case (orig_index)
4230 tree orig_index;
4232 tree minval, maxval, range, orig_minval;
4233 rtx default_label = 0;
4234 register struct case_node *n;
4235 int count;
4236 rtx index;
4237 rtx table_label;
4238 int ncases;
4239 rtx *labelvec;
4240 register int i;
4241 rtx before_case;
4242 register struct nesting *thiscase = case_stack;
4243 tree index_expr, index_type;
4244 int unsignedp;
4246 if (output_bytecode)
4248 bc_expand_end_case (orig_index);
4249 return;
4252 table_label = gen_label_rtx ();
4253 index_expr = thiscase->data.case_stmt.index_expr;
4254 index_type = TREE_TYPE (index_expr);
4255 unsignedp = TREE_UNSIGNED (index_type);
4257 do_pending_stack_adjust ();
4259 /* An ERROR_MARK occurs for various reasons including invalid data type. */
4260 if (index_type != error_mark_node)
4262 /* If switch expression was an enumerated type, check that all
4263 enumeration literals are covered by the cases.
4264 No sense trying this if there's a default case, however. */
4266 if (!thiscase->data.case_stmt.default_label
4267 && TREE_CODE (TREE_TYPE (orig_index)) == ENUMERAL_TYPE
4268 && TREE_CODE (index_expr) != INTEGER_CST)
4269 check_for_full_enumeration_handling (TREE_TYPE (orig_index));
4271 /* If this is the first label, warn if any insns have been emitted. */
4272 if (thiscase->data.case_stmt.seenlabel == 0)
4274 rtx insn;
4275 for (insn = get_last_insn ();
4276 insn != case_stack->data.case_stmt.start;
4277 insn = PREV_INSN (insn))
4278 if (GET_CODE (insn) != NOTE
4279 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn))!= USE))
4281 warning ("unreachable code at beginning of %s",
4282 case_stack->data.case_stmt.printname);
4283 break;
4287 /* If we don't have a default-label, create one here,
4288 after the body of the switch. */
4289 if (thiscase->data.case_stmt.default_label == 0)
4291 thiscase->data.case_stmt.default_label
4292 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
4293 expand_label (thiscase->data.case_stmt.default_label);
4295 default_label = label_rtx (thiscase->data.case_stmt.default_label);
4297 before_case = get_last_insn ();
4299 /* Simplify the case-list before we count it. */
4300 group_case_nodes (thiscase->data.case_stmt.case_list);
4302 /* Get upper and lower bounds of case values.
4303 Also convert all the case values to the index expr's data type. */
4305 count = 0;
4306 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
4308 /* Check low and high label values are integers. */
4309 if (TREE_CODE (n->low) != INTEGER_CST)
4310 abort ();
4311 if (TREE_CODE (n->high) != INTEGER_CST)
4312 abort ();
4314 n->low = convert (index_type, n->low);
4315 n->high = convert (index_type, n->high);
4317 /* Count the elements and track the largest and smallest
4318 of them (treating them as signed even if they are not). */
4319 if (count++ == 0)
4321 minval = n->low;
4322 maxval = n->high;
4324 else
4326 if (INT_CST_LT (n->low, minval))
4327 minval = n->low;
4328 if (INT_CST_LT (maxval, n->high))
4329 maxval = n->high;
4331 /* A range counts double, since it requires two compares. */
4332 if (! tree_int_cst_equal (n->low, n->high))
4333 count++;
4336 orig_minval = minval;
4338 /* Compute span of values. */
4339 if (count != 0)
4340 range = fold (build (MINUS_EXPR, index_type, maxval, minval));
4342 if (count == 0)
4344 expand_expr (index_expr, const0_rtx, VOIDmode, 0);
4345 emit_queue ();
4346 emit_jump (default_label);
4349 /* If range of values is much bigger than number of values,
4350 make a sequence of conditional branches instead of a dispatch.
4351 If the switch-index is a constant, do it this way
4352 because we can optimize it. */
4354 #ifndef CASE_VALUES_THRESHOLD
4355 #ifdef HAVE_casesi
4356 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
4357 #else
4358 /* If machine does not have a case insn that compares the
4359 bounds, this means extra overhead for dispatch tables
4360 which raises the threshold for using them. */
4361 #define CASE_VALUES_THRESHOLD 5
4362 #endif /* HAVE_casesi */
4363 #endif /* CASE_VALUES_THRESHOLD */
4365 else if (TREE_INT_CST_HIGH (range) != 0
4366 || count < CASE_VALUES_THRESHOLD
4367 || ((unsigned HOST_WIDE_INT) (TREE_INT_CST_LOW (range))
4368 > 10 * count)
4369 || TREE_CODE (index_expr) == INTEGER_CST
4370 /* These will reduce to a constant. */
4371 || (TREE_CODE (index_expr) == CALL_EXPR
4372 && TREE_CODE (TREE_OPERAND (index_expr, 0)) == ADDR_EXPR
4373 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == FUNCTION_DECL
4374 && DECL_FUNCTION_CODE (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == BUILT_IN_CLASSIFY_TYPE)
4375 || (TREE_CODE (index_expr) == COMPOUND_EXPR
4376 && TREE_CODE (TREE_OPERAND (index_expr, 1)) == INTEGER_CST))
4378 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
4380 /* If the index is a short or char that we do not have
4381 an insn to handle comparisons directly, convert it to
4382 a full integer now, rather than letting each comparison
4383 generate the conversion. */
4385 if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT
4386 && (cmp_optab->handlers[(int) GET_MODE(index)].insn_code
4387 == CODE_FOR_nothing))
4389 enum machine_mode wider_mode;
4390 for (wider_mode = GET_MODE (index); wider_mode != VOIDmode;
4391 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
4392 if (cmp_optab->handlers[(int) wider_mode].insn_code
4393 != CODE_FOR_nothing)
4395 index = convert_to_mode (wider_mode, index, unsignedp);
4396 break;
4400 emit_queue ();
4401 do_pending_stack_adjust ();
4403 index = protect_from_queue (index, 0);
4404 if (GET_CODE (index) == MEM)
4405 index = copy_to_reg (index);
4406 if (GET_CODE (index) == CONST_INT
4407 || TREE_CODE (index_expr) == INTEGER_CST)
4409 /* Make a tree node with the proper constant value
4410 if we don't already have one. */
4411 if (TREE_CODE (index_expr) != INTEGER_CST)
4413 index_expr
4414 = build_int_2 (INTVAL (index),
4415 unsignedp || INTVAL (index) >= 0 ? 0 : -1);
4416 index_expr = convert (index_type, index_expr);
4419 /* For constant index expressions we need only
4420 issue a unconditional branch to the appropriate
4421 target code. The job of removing any unreachable
4422 code is left to the optimisation phase if the
4423 "-O" option is specified. */
4424 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
4425 if (! tree_int_cst_lt (index_expr, n->low)
4426 && ! tree_int_cst_lt (n->high, index_expr))
4427 break;
4429 if (n)
4430 emit_jump (label_rtx (n->code_label));
4431 else
4432 emit_jump (default_label);
4434 else
4436 /* If the index expression is not constant we generate
4437 a binary decision tree to select the appropriate
4438 target code. This is done as follows:
4440 The list of cases is rearranged into a binary tree,
4441 nearly optimal assuming equal probability for each case.
4443 The tree is transformed into RTL, eliminating
4444 redundant test conditions at the same time.
4446 If program flow could reach the end of the
4447 decision tree an unconditional jump to the
4448 default code is emitted. */
4450 use_cost_table
4451 = (TREE_CODE (TREE_TYPE (orig_index)) != ENUMERAL_TYPE
4452 && estimate_case_costs (thiscase->data.case_stmt.case_list));
4453 balance_case_nodes (&thiscase->data.case_stmt.case_list,
4454 NULL_PTR);
4455 emit_case_nodes (index, thiscase->data.case_stmt.case_list,
4456 default_label, index_type);
4457 emit_jump_if_reachable (default_label);
4460 else
4462 int win = 0;
4463 #ifdef HAVE_casesi
4464 if (HAVE_casesi)
4466 enum machine_mode index_mode = SImode;
4467 int index_bits = GET_MODE_BITSIZE (index_mode);
4468 rtx op1, op2;
4469 enum machine_mode op_mode;
4471 /* Convert the index to SImode. */
4472 if (GET_MODE_BITSIZE (TYPE_MODE (index_type))
4473 > GET_MODE_BITSIZE (index_mode))
4475 enum machine_mode omode = TYPE_MODE (index_type);
4476 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
4478 /* We must handle the endpoints in the original mode. */
4479 index_expr = build (MINUS_EXPR, index_type,
4480 index_expr, minval);
4481 minval = integer_zero_node;
4482 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
4483 emit_cmp_insn (rangertx, index, LTU, NULL_RTX, omode, 1, 0);
4484 emit_jump_insn (gen_bltu (default_label));
4485 /* Now we can safely truncate. */
4486 index = convert_to_mode (index_mode, index, 0);
4488 else
4490 if (TYPE_MODE (index_type) != index_mode)
4492 index_expr = convert (type_for_size (index_bits, 0),
4493 index_expr);
4494 index_type = TREE_TYPE (index_expr);
4497 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
4499 emit_queue ();
4500 index = protect_from_queue (index, 0);
4501 do_pending_stack_adjust ();
4503 op_mode = insn_operand_mode[(int)CODE_FOR_casesi][0];
4504 if (! (*insn_operand_predicate[(int)CODE_FOR_casesi][0])
4505 (index, op_mode))
4506 index = copy_to_mode_reg (op_mode, index);
4508 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
4510 op_mode = insn_operand_mode[(int)CODE_FOR_casesi][1];
4511 if (! (*insn_operand_predicate[(int)CODE_FOR_casesi][1])
4512 (op1, op_mode))
4513 op1 = copy_to_mode_reg (op_mode, op1);
4515 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
4517 op_mode = insn_operand_mode[(int)CODE_FOR_casesi][2];
4518 if (! (*insn_operand_predicate[(int)CODE_FOR_casesi][2])
4519 (op2, op_mode))
4520 op2 = copy_to_mode_reg (op_mode, op2);
4522 emit_jump_insn (gen_casesi (index, op1, op2,
4523 table_label, default_label));
4524 win = 1;
4526 #endif
4527 #ifdef HAVE_tablejump
4528 if (! win && HAVE_tablejump)
4530 index_expr = convert (thiscase->data.case_stmt.nominal_type,
4531 fold (build (MINUS_EXPR, index_type,
4532 index_expr, minval)));
4533 index_type = TREE_TYPE (index_expr);
4534 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
4535 emit_queue ();
4536 index = protect_from_queue (index, 0);
4537 do_pending_stack_adjust ();
4539 do_tablejump (index, TYPE_MODE (index_type),
4540 expand_expr (range, NULL_RTX, VOIDmode, 0),
4541 table_label, default_label);
4542 win = 1;
4544 #endif
4545 if (! win)
4546 abort ();
4548 /* Get table of labels to jump to, in order of case index. */
4550 ncases = TREE_INT_CST_LOW (range) + 1;
4551 labelvec = (rtx *) alloca (ncases * sizeof (rtx));
4552 bzero ((char *) labelvec, ncases * sizeof (rtx));
4554 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
4556 register HOST_WIDE_INT i
4557 = TREE_INT_CST_LOW (n->low) - TREE_INT_CST_LOW (orig_minval);
4559 while (1)
4561 labelvec[i]
4562 = gen_rtx (LABEL_REF, Pmode, label_rtx (n->code_label));
4563 if (i + TREE_INT_CST_LOW (orig_minval)
4564 == TREE_INT_CST_LOW (n->high))
4565 break;
4566 i++;
4570 /* Fill in the gaps with the default. */
4571 for (i = 0; i < ncases; i++)
4572 if (labelvec[i] == 0)
4573 labelvec[i] = gen_rtx (LABEL_REF, Pmode, default_label);
4575 /* Output the table */
4576 emit_label (table_label);
4578 /* This would be a lot nicer if CASE_VECTOR_PC_RELATIVE
4579 were an expression, instead of an #ifdef/#ifndef. */
4580 if (
4581 #ifdef CASE_VECTOR_PC_RELATIVE
4582 1 ||
4583 #endif
4584 flag_pic)
4585 emit_jump_insn (gen_rtx (ADDR_DIFF_VEC, CASE_VECTOR_MODE,
4586 gen_rtx (LABEL_REF, Pmode, table_label),
4587 gen_rtvec_v (ncases, labelvec)));
4588 else
4589 emit_jump_insn (gen_rtx (ADDR_VEC, CASE_VECTOR_MODE,
4590 gen_rtvec_v (ncases, labelvec)));
4592 /* If the case insn drops through the table,
4593 after the table we must jump to the default-label.
4594 Otherwise record no drop-through after the table. */
4595 #ifdef CASE_DROPS_THROUGH
4596 emit_jump (default_label);
4597 #else
4598 emit_barrier ();
4599 #endif
4602 before_case = squeeze_notes (NEXT_INSN (before_case), get_last_insn ());
4603 reorder_insns (before_case, get_last_insn (),
4604 thiscase->data.case_stmt.start);
4607 if (thiscase->exit_label)
4608 emit_label (thiscase->exit_label);
4610 POPSTACK (case_stack);
4612 free_temp_slots ();
4616 /* Terminate a case statement. EXPR is the original index
4617 expression. */
4619 static void
4620 bc_expand_end_case (expr)
4621 tree expr;
4623 struct nesting *thiscase = case_stack;
4624 enum bytecode_opcode opcode;
4625 struct bc_label *jump_label;
4626 struct case_node *c;
4628 bc_emit_bytecode (jump);
4629 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (thiscase->exit_label));
4631 #ifdef DEBUG_PRINT_CODE
4632 fputc ('\n', stderr);
4633 #endif
4635 /* Now that the size of the jump table is known, emit the actual
4636 indexed jump instruction. */
4637 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thiscase->data.case_stmt.skip_label));
4639 opcode = TYPE_MODE (thiscase->data.case_stmt.nominal_type) == SImode
4640 ? TREE_UNSIGNED (thiscase->data.case_stmt.nominal_type) ? caseSU : caseSI
4641 : TREE_UNSIGNED (thiscase->data.case_stmt.nominal_type) ? caseDU : caseDI;
4643 bc_emit_bytecode (opcode);
4645 /* Now emit the case instructions literal arguments, in order.
4646 In addition to the value on the stack, it uses:
4647 1. The address of the jump table.
4648 2. The size of the jump table.
4649 3. The default label. */
4651 jump_label = bc_get_bytecode_label ();
4652 bc_emit_bytecode_labelref (jump_label);
4653 bc_emit_bytecode_const ((char *) &thiscase->data.case_stmt.num_ranges,
4654 sizeof thiscase->data.case_stmt.num_ranges);
4656 if (thiscase->data.case_stmt.default_label)
4657 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (DECL_RTL (thiscase->data.case_stmt.default_label)));
4658 else
4659 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (thiscase->exit_label));
4661 /* Output the jump table. */
4663 bc_align_bytecode (3 /* PTR_ALIGN */);
4664 bc_emit_bytecode_labeldef (jump_label);
4666 if (TYPE_MODE (thiscase->data.case_stmt.nominal_type) == SImode)
4667 for (c = thiscase->data.case_stmt.case_list->left; c; c = c->left)
4669 opcode = TREE_INT_CST_LOW (c->low);
4670 bc_emit_bytecode_const ((char *) &opcode, sizeof opcode);
4672 opcode = TREE_INT_CST_LOW (c->high);
4673 bc_emit_bytecode_const ((char *) &opcode, sizeof opcode);
4675 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (DECL_RTL (c->code_label)));
4677 else
4678 if (TYPE_MODE (thiscase->data.case_stmt.nominal_type) == DImode)
4679 for (c = thiscase->data.case_stmt.case_list->left; c; c = c->left)
4681 bc_emit_bytecode_DI_const (c->low);
4682 bc_emit_bytecode_DI_const (c->high);
4684 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (DECL_RTL (c->code_label)));
4686 else
4687 /* Bad mode */
4688 abort ();
4691 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thiscase->exit_label));
4693 /* Possibly issue enumeration warnings. */
4695 if (!thiscase->data.case_stmt.default_label
4696 && TREE_CODE (TREE_TYPE (expr)) == ENUMERAL_TYPE
4697 && TREE_CODE (expr) != INTEGER_CST
4698 && warn_switch)
4699 check_for_full_enumeration_handling (TREE_TYPE (expr));
4702 #ifdef DEBUG_PRINT_CODE
4703 fputc ('\n', stderr);
4704 #endif
4706 POPSTACK (case_stack);
4710 /* Return unique bytecode ID. */
4712 int
4713 bc_new_uid ()
4715 static int bc_uid = 0;
4717 return (++bc_uid);
4720 /* Generate code to jump to LABEL if OP1 and OP2 are equal. */
4722 static void
4723 do_jump_if_equal (op1, op2, label, unsignedp)
4724 rtx op1, op2, label;
4725 int unsignedp;
4727 if (GET_CODE (op1) == CONST_INT
4728 && GET_CODE (op2) == CONST_INT)
4730 if (INTVAL (op1) == INTVAL (op2))
4731 emit_jump (label);
4733 else
4735 enum machine_mode mode = GET_MODE (op1);
4736 if (mode == VOIDmode)
4737 mode = GET_MODE (op2);
4738 emit_cmp_insn (op1, op2, EQ, NULL_RTX, mode, unsignedp, 0);
4739 emit_jump_insn (gen_beq (label));
4743 /* Not all case values are encountered equally. This function
4744 uses a heuristic to weight case labels, in cases where that
4745 looks like a reasonable thing to do.
4747 Right now, all we try to guess is text, and we establish the
4748 following weights:
4750 chars above space: 16
4751 digits: 16
4752 default: 12
4753 space, punct: 8
4754 tab: 4
4755 newline: 2
4756 other "\" chars: 1
4757 remaining chars: 0
4759 If we find any cases in the switch that are not either -1 or in the range
4760 of valid ASCII characters, or are control characters other than those
4761 commonly used with "\", don't treat this switch scanning text.
4763 Return 1 if these nodes are suitable for cost estimation, otherwise
4764 return 0. */
4766 static int
4767 estimate_case_costs (node)
4768 case_node_ptr node;
4770 tree min_ascii = build_int_2 (-1, -1);
4771 tree max_ascii = convert (TREE_TYPE (node->high), build_int_2 (127, 0));
4772 case_node_ptr n;
4773 int i;
4775 /* If we haven't already made the cost table, make it now. Note that the
4776 lower bound of the table is -1, not zero. */
4778 if (cost_table == NULL)
4780 cost_table = ((short *) xmalloc (129 * sizeof (short))) + 1;
4781 bzero ((char *) (cost_table - 1), 129 * sizeof (short));
4783 for (i = 0; i < 128; i++)
4785 if (isalnum (i))
4786 cost_table[i] = 16;
4787 else if (ispunct (i))
4788 cost_table[i] = 8;
4789 else if (iscntrl (i))
4790 cost_table[i] = -1;
4793 cost_table[' '] = 8;
4794 cost_table['\t'] = 4;
4795 cost_table['\0'] = 4;
4796 cost_table['\n'] = 2;
4797 cost_table['\f'] = 1;
4798 cost_table['\v'] = 1;
4799 cost_table['\b'] = 1;
4802 /* See if all the case expressions look like text. It is text if the
4803 constant is >= -1 and the highest constant is <= 127. Do all comparisons
4804 as signed arithmetic since we don't want to ever access cost_table with a
4805 value less than -1. Also check that none of the constants in a range
4806 are strange control characters. */
4808 for (n = node; n; n = n->right)
4810 if ((INT_CST_LT (n->low, min_ascii)) || INT_CST_LT (max_ascii, n->high))
4811 return 0;
4813 for (i = TREE_INT_CST_LOW (n->low); i <= TREE_INT_CST_LOW (n->high); i++)
4814 if (cost_table[i] < 0)
4815 return 0;
4818 /* All interesting values are within the range of interesting
4819 ASCII characters. */
4820 return 1;
4823 /* Scan an ordered list of case nodes
4824 combining those with consecutive values or ranges.
4826 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
4828 static void
4829 group_case_nodes (head)
4830 case_node_ptr head;
4832 case_node_ptr node = head;
4834 while (node)
4836 rtx lb = next_real_insn (label_rtx (node->code_label));
4837 case_node_ptr np = node;
4839 /* Try to group the successors of NODE with NODE. */
4840 while (((np = np->right) != 0)
4841 /* Do they jump to the same place? */
4842 && next_real_insn (label_rtx (np->code_label)) == lb
4843 /* Are their ranges consecutive? */
4844 && tree_int_cst_equal (np->low,
4845 fold (build (PLUS_EXPR,
4846 TREE_TYPE (node->high),
4847 node->high,
4848 integer_one_node)))
4849 /* An overflow is not consecutive. */
4850 && tree_int_cst_lt (node->high,
4851 fold (build (PLUS_EXPR,
4852 TREE_TYPE (node->high),
4853 node->high,
4854 integer_one_node))))
4856 node->high = np->high;
4858 /* NP is the first node after NODE which can't be grouped with it.
4859 Delete the nodes in between, and move on to that node. */
4860 node->right = np;
4861 node = np;
4865 /* Take an ordered list of case nodes
4866 and transform them into a near optimal binary tree,
4867 on the assumption that any target code selection value is as
4868 likely as any other.
4870 The transformation is performed by splitting the ordered
4871 list into two equal sections plus a pivot. The parts are
4872 then attached to the pivot as left and right branches. Each
4873 branch is is then transformed recursively. */
4875 static void
4876 balance_case_nodes (head, parent)
4877 case_node_ptr *head;
4878 case_node_ptr parent;
4880 register case_node_ptr np;
4882 np = *head;
4883 if (np)
4885 int cost = 0;
4886 int i = 0;
4887 int ranges = 0;
4888 register case_node_ptr *npp;
4889 case_node_ptr left;
4891 /* Count the number of entries on branch. Also count the ranges. */
4893 while (np)
4895 if (!tree_int_cst_equal (np->low, np->high))
4897 ranges++;
4898 if (use_cost_table)
4899 cost += cost_table[TREE_INT_CST_LOW (np->high)];
4902 if (use_cost_table)
4903 cost += cost_table[TREE_INT_CST_LOW (np->low)];
4905 i++;
4906 np = np->right;
4909 if (i > 2)
4911 /* Split this list if it is long enough for that to help. */
4912 npp = head;
4913 left = *npp;
4914 if (use_cost_table)
4916 /* Find the place in the list that bisects the list's total cost,
4917 Here I gets half the total cost. */
4918 int n_moved = 0;
4919 i = (cost + 1) / 2;
4920 while (1)
4922 /* Skip nodes while their cost does not reach that amount. */
4923 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
4924 i -= cost_table[TREE_INT_CST_LOW ((*npp)->high)];
4925 i -= cost_table[TREE_INT_CST_LOW ((*npp)->low)];
4926 if (i <= 0)
4927 break;
4928 npp = &(*npp)->right;
4929 n_moved += 1;
4931 if (n_moved == 0)
4933 /* Leave this branch lopsided, but optimize left-hand
4934 side and fill in `parent' fields for right-hand side. */
4935 np = *head;
4936 np->parent = parent;
4937 balance_case_nodes (&np->left, np);
4938 for (; np->right; np = np->right)
4939 np->right->parent = np;
4940 return;
4943 /* If there are just three nodes, split at the middle one. */
4944 else if (i == 3)
4945 npp = &(*npp)->right;
4946 else
4948 /* Find the place in the list that bisects the list's total cost,
4949 where ranges count as 2.
4950 Here I gets half the total cost. */
4951 i = (i + ranges + 1) / 2;
4952 while (1)
4954 /* Skip nodes while their cost does not reach that amount. */
4955 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
4956 i--;
4957 i--;
4958 if (i <= 0)
4959 break;
4960 npp = &(*npp)->right;
4963 *head = np = *npp;
4964 *npp = 0;
4965 np->parent = parent;
4966 np->left = left;
4968 /* Optimize each of the two split parts. */
4969 balance_case_nodes (&np->left, np);
4970 balance_case_nodes (&np->right, np);
4972 else
4974 /* Else leave this branch as one level,
4975 but fill in `parent' fields. */
4976 np = *head;
4977 np->parent = parent;
4978 for (; np->right; np = np->right)
4979 np->right->parent = np;
4984 /* Search the parent sections of the case node tree
4985 to see if a test for the lower bound of NODE would be redundant.
4986 INDEX_TYPE is the type of the index expression.
4988 The instructions to generate the case decision tree are
4989 output in the same order as nodes are processed so it is
4990 known that if a parent node checks the range of the current
4991 node minus one that the current node is bounded at its lower
4992 span. Thus the test would be redundant. */
4994 static int
4995 node_has_low_bound (node, index_type)
4996 case_node_ptr node;
4997 tree index_type;
4999 tree low_minus_one;
5000 case_node_ptr pnode;
5002 /* If the lower bound of this node is the lowest value in the index type,
5003 we need not test it. */
5005 if (tree_int_cst_equal (node->low, TYPE_MIN_VALUE (index_type)))
5006 return 1;
5008 /* If this node has a left branch, the value at the left must be less
5009 than that at this node, so it cannot be bounded at the bottom and
5010 we need not bother testing any further. */
5012 if (node->left)
5013 return 0;
5015 low_minus_one = fold (build (MINUS_EXPR, TREE_TYPE (node->low),
5016 node->low, integer_one_node));
5018 /* If the subtraction above overflowed, we can't verify anything.
5019 Otherwise, look for a parent that tests our value - 1. */
5021 if (! tree_int_cst_lt (low_minus_one, node->low))
5022 return 0;
5024 for (pnode = node->parent; pnode; pnode = pnode->parent)
5025 if (tree_int_cst_equal (low_minus_one, pnode->high))
5026 return 1;
5028 return 0;
5031 /* Search the parent sections of the case node tree
5032 to see if a test for the upper bound of NODE would be redundant.
5033 INDEX_TYPE is the type of the index expression.
5035 The instructions to generate the case decision tree are
5036 output in the same order as nodes are processed so it is
5037 known that if a parent node checks the range of the current
5038 node plus one that the current node is bounded at its upper
5039 span. Thus the test would be redundant. */
5041 static int
5042 node_has_high_bound (node, index_type)
5043 case_node_ptr node;
5044 tree index_type;
5046 tree high_plus_one;
5047 case_node_ptr pnode;
5049 /* If the upper bound of this node is the highest value in the type
5050 of the index expression, we need not test against it. */
5052 if (tree_int_cst_equal (node->high, TYPE_MAX_VALUE (index_type)))
5053 return 1;
5055 /* If this node has a right branch, the value at the right must be greater
5056 than that at this node, so it cannot be bounded at the top and
5057 we need not bother testing any further. */
5059 if (node->right)
5060 return 0;
5062 high_plus_one = fold (build (PLUS_EXPR, TREE_TYPE (node->high),
5063 node->high, integer_one_node));
5065 /* If the addition above overflowed, we can't verify anything.
5066 Otherwise, look for a parent that tests our value + 1. */
5068 if (! tree_int_cst_lt (node->high, high_plus_one))
5069 return 0;
5071 for (pnode = node->parent; pnode; pnode = pnode->parent)
5072 if (tree_int_cst_equal (high_plus_one, pnode->low))
5073 return 1;
5075 return 0;
5078 /* Search the parent sections of the
5079 case node tree to see if both tests for the upper and lower
5080 bounds of NODE would be redundant. */
5082 static int
5083 node_is_bounded (node, index_type)
5084 case_node_ptr node;
5085 tree index_type;
5087 return (node_has_low_bound (node, index_type)
5088 && node_has_high_bound (node, index_type));
5091 /* Emit an unconditional jump to LABEL unless it would be dead code. */
5093 static void
5094 emit_jump_if_reachable (label)
5095 rtx label;
5097 if (GET_CODE (get_last_insn ()) != BARRIER)
5098 emit_jump (label);
5101 /* Emit step-by-step code to select a case for the value of INDEX.
5102 The thus generated decision tree follows the form of the
5103 case-node binary tree NODE, whose nodes represent test conditions.
5104 INDEX_TYPE is the type of the index of the switch.
5106 Care is taken to prune redundant tests from the decision tree
5107 by detecting any boundary conditions already checked by
5108 emitted rtx. (See node_has_high_bound, node_has_low_bound
5109 and node_is_bounded, above.)
5111 Where the test conditions can be shown to be redundant we emit
5112 an unconditional jump to the target code. As a further
5113 optimization, the subordinates of a tree node are examined to
5114 check for bounded nodes. In this case conditional and/or
5115 unconditional jumps as a result of the boundary check for the
5116 current node are arranged to target the subordinates associated
5117 code for out of bound conditions on the current node node.
5119 We can assume that when control reaches the code generated here,
5120 the index value has already been compared with the parents
5121 of this node, and determined to be on the same side of each parent
5122 as this node is. Thus, if this node tests for the value 51,
5123 and a parent tested for 52, we don't need to consider
5124 the possibility of a value greater than 51. If another parent
5125 tests for the value 50, then this node need not test anything. */
5127 static void
5128 emit_case_nodes (index, node, default_label, index_type)
5129 rtx index;
5130 case_node_ptr node;
5131 rtx default_label;
5132 tree index_type;
5134 /* If INDEX has an unsigned type, we must make unsigned branches. */
5135 int unsignedp = TREE_UNSIGNED (index_type);
5136 typedef rtx rtx_function ();
5137 rtx_function *gen_bgt_pat = unsignedp ? gen_bgtu : gen_bgt;
5138 rtx_function *gen_bge_pat = unsignedp ? gen_bgeu : gen_bge;
5139 rtx_function *gen_blt_pat = unsignedp ? gen_bltu : gen_blt;
5140 rtx_function *gen_ble_pat = unsignedp ? gen_bleu : gen_ble;
5141 enum machine_mode mode = GET_MODE (index);
5143 /* See if our parents have already tested everything for us.
5144 If they have, emit an unconditional jump for this node. */
5145 if (node_is_bounded (node, index_type))
5146 emit_jump (label_rtx (node->code_label));
5148 else if (tree_int_cst_equal (node->low, node->high))
5150 /* Node is single valued. First see if the index expression matches
5151 this node and then check our children, if any. */
5153 do_jump_if_equal (index, expand_expr (node->low, NULL_RTX, VOIDmode, 0),
5154 label_rtx (node->code_label), unsignedp);
5156 if (node->right != 0 && node->left != 0)
5158 /* This node has children on both sides.
5159 Dispatch to one side or the other
5160 by comparing the index value with this node's value.
5161 If one subtree is bounded, check that one first,
5162 so we can avoid real branches in the tree. */
5164 if (node_is_bounded (node->right, index_type))
5166 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5167 VOIDmode, 0),
5168 GT, NULL_RTX, mode, unsignedp, 0);
5170 emit_jump_insn ((*gen_bgt_pat) (label_rtx (node->right->code_label)));
5171 emit_case_nodes (index, node->left, default_label, index_type);
5174 else if (node_is_bounded (node->left, index_type))
5176 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5177 VOIDmode, 0),
5178 LT, NULL_RTX, mode, unsignedp, 0);
5179 emit_jump_insn ((*gen_blt_pat) (label_rtx (node->left->code_label)));
5180 emit_case_nodes (index, node->right, default_label, index_type);
5183 else
5185 /* Neither node is bounded. First distinguish the two sides;
5186 then emit the code for one side at a time. */
5188 tree test_label
5189 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
5191 /* See if the value is on the right. */
5192 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5193 VOIDmode, 0),
5194 GT, NULL_RTX, mode, unsignedp, 0);
5195 emit_jump_insn ((*gen_bgt_pat) (label_rtx (test_label)));
5197 /* Value must be on the left.
5198 Handle the left-hand subtree. */
5199 emit_case_nodes (index, node->left, default_label, index_type);
5200 /* If left-hand subtree does nothing,
5201 go to default. */
5202 emit_jump_if_reachable (default_label);
5204 /* Code branches here for the right-hand subtree. */
5205 expand_label (test_label);
5206 emit_case_nodes (index, node->right, default_label, index_type);
5210 else if (node->right != 0 && node->left == 0)
5212 /* Here we have a right child but no left so we issue conditional
5213 branch to default and process the right child.
5215 Omit the conditional branch to default if we it avoid only one
5216 right child; it costs too much space to save so little time. */
5218 if (node->right->right || node->right->left
5219 || !tree_int_cst_equal (node->right->low, node->right->high))
5221 if (!node_has_low_bound (node, index_type))
5223 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5224 VOIDmode, 0),
5225 LT, NULL_RTX, mode, unsignedp, 0);
5226 emit_jump_insn ((*gen_blt_pat) (default_label));
5229 emit_case_nodes (index, node->right, default_label, index_type);
5231 else
5232 /* We cannot process node->right normally
5233 since we haven't ruled out the numbers less than
5234 this node's value. So handle node->right explicitly. */
5235 do_jump_if_equal (index,
5236 expand_expr (node->right->low, NULL_RTX,
5237 VOIDmode, 0),
5238 label_rtx (node->right->code_label), unsignedp);
5241 else if (node->right == 0 && node->left != 0)
5243 /* Just one subtree, on the left. */
5245 #if 0 /* The following code and comment were formerly part
5246 of the condition here, but they didn't work
5247 and I don't understand what the idea was. -- rms. */
5248 /* If our "most probable entry" is less probable
5249 than the default label, emit a jump to
5250 the default label using condition codes
5251 already lying around. With no right branch,
5252 a branch-greater-than will get us to the default
5253 label correctly. */
5254 if (use_cost_table
5255 && cost_table[TREE_INT_CST_LOW (node->high)] < 12)
5257 #endif /* 0 */
5258 if (node->left->left || node->left->right
5259 || !tree_int_cst_equal (node->left->low, node->left->high))
5261 if (!node_has_high_bound (node, index_type))
5263 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5264 VOIDmode, 0),
5265 GT, NULL_RTX, mode, unsignedp, 0);
5266 emit_jump_insn ((*gen_bgt_pat) (default_label));
5269 emit_case_nodes (index, node->left, default_label, index_type);
5271 else
5272 /* We cannot process node->left normally
5273 since we haven't ruled out the numbers less than
5274 this node's value. So handle node->left explicitly. */
5275 do_jump_if_equal (index,
5276 expand_expr (node->left->low, NULL_RTX,
5277 VOIDmode, 0),
5278 label_rtx (node->left->code_label), unsignedp);
5281 else
5283 /* Node is a range. These cases are very similar to those for a single
5284 value, except that we do not start by testing whether this node
5285 is the one to branch to. */
5287 if (node->right != 0 && node->left != 0)
5289 /* Node has subtrees on both sides.
5290 If the right-hand subtree is bounded,
5291 test for it first, since we can go straight there.
5292 Otherwise, we need to make a branch in the control structure,
5293 then handle the two subtrees. */
5294 tree test_label = 0;
5296 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5297 VOIDmode, 0),
5298 GT, NULL_RTX, mode, unsignedp, 0);
5300 if (node_is_bounded (node->right, index_type))
5301 /* Right hand node is fully bounded so we can eliminate any
5302 testing and branch directly to the target code. */
5303 emit_jump_insn ((*gen_bgt_pat) (label_rtx (node->right->code_label)));
5304 else
5306 /* Right hand node requires testing.
5307 Branch to a label where we will handle it later. */
5309 test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
5310 emit_jump_insn ((*gen_bgt_pat) (label_rtx (test_label)));
5313 /* Value belongs to this node or to the left-hand subtree. */
5315 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX, VOIDmode, 0),
5316 GE, NULL_RTX, mode, unsignedp, 0);
5317 emit_jump_insn ((*gen_bge_pat) (label_rtx (node->code_label)));
5319 /* Handle the left-hand subtree. */
5320 emit_case_nodes (index, node->left, default_label, index_type);
5322 /* If right node had to be handled later, do that now. */
5324 if (test_label)
5326 /* If the left-hand subtree fell through,
5327 don't let it fall into the right-hand subtree. */
5328 emit_jump_if_reachable (default_label);
5330 expand_label (test_label);
5331 emit_case_nodes (index, node->right, default_label, index_type);
5335 else if (node->right != 0 && node->left == 0)
5337 /* Deal with values to the left of this node,
5338 if they are possible. */
5339 if (!node_has_low_bound (node, index_type))
5341 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX,
5342 VOIDmode, 0),
5343 LT, NULL_RTX, mode, unsignedp, 0);
5344 emit_jump_insn ((*gen_blt_pat) (default_label));
5347 /* Value belongs to this node or to the right-hand subtree. */
5349 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5350 VOIDmode, 0),
5351 LE, NULL_RTX, mode, unsignedp, 0);
5352 emit_jump_insn ((*gen_ble_pat) (label_rtx (node->code_label)));
5354 emit_case_nodes (index, node->right, default_label, index_type);
5357 else if (node->right == 0 && node->left != 0)
5359 /* Deal with values to the right of this node,
5360 if they are possible. */
5361 if (!node_has_high_bound (node, index_type))
5363 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5364 VOIDmode, 0),
5365 GT, NULL_RTX, mode, unsignedp, 0);
5366 emit_jump_insn ((*gen_bgt_pat) (default_label));
5369 /* Value belongs to this node or to the left-hand subtree. */
5371 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX, VOIDmode, 0),
5372 GE, NULL_RTX, mode, unsignedp, 0);
5373 emit_jump_insn ((*gen_bge_pat) (label_rtx (node->code_label)));
5375 emit_case_nodes (index, node->left, default_label, index_type);
5378 else
5380 /* Node has no children so we check low and high bounds to remove
5381 redundant tests. Only one of the bounds can exist,
5382 since otherwise this node is bounded--a case tested already. */
5384 if (!node_has_high_bound (node, index_type))
5386 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5387 VOIDmode, 0),
5388 GT, NULL_RTX, mode, unsignedp, 0);
5389 emit_jump_insn ((*gen_bgt_pat) (default_label));
5392 if (!node_has_low_bound (node, index_type))
5394 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX,
5395 VOIDmode, 0),
5396 LT, NULL_RTX, mode, unsignedp, 0);
5397 emit_jump_insn ((*gen_blt_pat) (default_label));
5400 emit_jump (label_rtx (node->code_label));
5405 /* These routines are used by the loop unrolling code. They copy BLOCK trees
5406 so that the debugging info will be correct for the unrolled loop. */
5408 /* Indexed by block number, contains a pointer to the N'th block node. */
5410 static tree *block_vector;
5412 void
5413 find_loop_tree_blocks ()
5415 tree block = DECL_INITIAL (current_function_decl);
5417 /* There first block is for the function body, and does not have
5418 corresponding block notes. Don't include it in the block vector. */
5419 block = BLOCK_SUBBLOCKS (block);
5421 block_vector = identify_blocks (block, get_insns ());
5424 void
5425 unroll_block_trees ()
5427 tree block = DECL_INITIAL (current_function_decl);
5429 reorder_blocks (block_vector, block, get_insns ());