(decl_attributes): Added argument.
[official-gcc.git] / gcc / stmt.c
blobab6ecad33469f3d05750bbfb0e510d92e509c02a
1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 88, 89, 92, 93, 94, 1995 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
21 /* This file handles the generation of rtl code from tree structure
22 above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
23 It also creates the rtl expressions for parameters and auto variables
24 and has full responsibility for allocating stack slots.
26 The functions whose names start with `expand_' are called by the
27 parser to generate RTL instructions for various kinds of constructs.
29 Some control and binding constructs require calling several such
30 functions at different times. For example, a simple if-then
31 is expanded by calling `expand_start_cond' (with the condition-expression
32 as argument) before parsing the then-clause and calling `expand_end_cond'
33 after parsing the then-clause. */
35 #include "config.h"
37 #include <stdio.h>
38 #include <ctype.h>
40 #include "rtl.h"
41 #include "tree.h"
42 #include "flags.h"
43 #include "function.h"
44 #include "insn-flags.h"
45 #include "insn-config.h"
46 #include "insn-codes.h"
47 #include "expr.h"
48 #include "hard-reg-set.h"
49 #include "obstack.h"
50 #include "loop.h"
51 #include "recog.h"
52 #include "machmode.h"
54 #include "bytecode.h"
55 #include "bc-typecd.h"
56 #include "bc-opcode.h"
57 #include "bc-optab.h"
58 #include "bc-emit.h"
60 #define obstack_chunk_alloc xmalloc
61 #define obstack_chunk_free free
62 struct obstack stmt_obstack;
64 /* Filename and line number of last line-number note,
65 whether we actually emitted it or not. */
66 char *emit_filename;
67 int emit_lineno;
69 /* Nonzero if within a ({...}) grouping, in which case we must
70 always compute a value for each expr-stmt in case it is the last one. */
72 int expr_stmts_for_value;
74 /* Each time we expand an expression-statement,
75 record the expr's type and its RTL value here. */
77 static tree last_expr_type;
78 static rtx last_expr_value;
80 /* Each time we expand the end of a binding contour (in `expand_end_bindings')
81 and we emit a new NOTE_INSN_BLOCK_END note, we save a pointer to it here.
82 This is used by the `remember_end_note' function to record the endpoint
83 of each generated block in its associated BLOCK node. */
85 static rtx last_block_end_note;
87 /* Number of binding contours started so far in this function. */
89 int block_start_count;
91 /* Nonzero if function being compiled needs to
92 return the address of where it has put a structure value. */
94 extern int current_function_returns_pcc_struct;
96 /* Label that will go on parm cleanup code, if any.
97 Jumping to this label runs cleanup code for parameters, if
98 such code must be run. Following this code is the logical return label. */
100 extern rtx cleanup_label;
102 /* Label that will go on function epilogue.
103 Jumping to this label serves as a "return" instruction
104 on machines which require execution of the epilogue on all returns. */
106 extern rtx return_label;
108 /* List (chain of EXPR_LISTs) of pseudo-regs of SAVE_EXPRs.
109 So we can mark them all live at the end of the function, if nonopt. */
110 extern rtx save_expr_regs;
112 /* Offset to end of allocated area of stack frame.
113 If stack grows down, this is the address of the last stack slot allocated.
114 If stack grows up, this is the address for the next slot. */
115 extern int frame_offset;
117 /* Label to jump back to for tail recursion, or 0 if we have
118 not yet needed one for this function. */
119 extern rtx tail_recursion_label;
121 /* Place after which to insert the tail_recursion_label if we need one. */
122 extern rtx tail_recursion_reentry;
124 /* Location at which to save the argument pointer if it will need to be
125 referenced. There are two cases where this is done: if nonlocal gotos
126 exist, or if vars whose is an offset from the argument pointer will be
127 needed by inner routines. */
129 extern rtx arg_pointer_save_area;
131 /* Chain of all RTL_EXPRs that have insns in them. */
132 extern tree rtl_expr_chain;
134 #if 0 /* Turned off because 0 seems to work just as well. */
135 /* Cleanup lists are required for binding levels regardless of whether
136 that binding level has cleanups or not. This node serves as the
137 cleanup list whenever an empty list is required. */
138 static tree empty_cleanup_list;
139 #endif
141 extern void (*interim_eh_hook) PROTO((tree));
143 /* Functions and data structures for expanding case statements. */
145 /* Case label structure, used to hold info on labels within case
146 statements. We handle "range" labels; for a single-value label
147 as in C, the high and low limits are the same.
149 A chain of case nodes is initially maintained via the RIGHT fields
150 in the nodes. Nodes with higher case values are later in the list.
152 Switch statements can be output in one of two forms. A branch table
153 is used if there are more than a few labels and the labels are dense
154 within the range between the smallest and largest case value. If a
155 branch table is used, no further manipulations are done with the case
156 node chain.
158 The alternative to the use of a branch table is to generate a series
159 of compare and jump insns. When that is done, we use the LEFT, RIGHT,
160 and PARENT fields to hold a binary tree. Initially the tree is
161 totally unbalanced, with everything on the right. We balance the tree
162 with nodes on the left having lower case values than the parent
163 and nodes on the right having higher values. We then output the tree
164 in order. */
166 struct case_node
168 struct case_node *left; /* Left son in binary tree */
169 struct case_node *right; /* Right son in binary tree; also node chain */
170 struct case_node *parent; /* Parent of node in binary tree */
171 tree low; /* Lowest index value for this label */
172 tree high; /* Highest index value for this label */
173 tree code_label; /* Label to jump to when node matches */
176 typedef struct case_node case_node;
177 typedef struct case_node *case_node_ptr;
179 /* These are used by estimate_case_costs and balance_case_nodes. */
181 /* This must be a signed type, and non-ANSI compilers lack signed char. */
182 static short *cost_table;
183 static int use_cost_table;
185 /* Stack of control and binding constructs we are currently inside.
187 These constructs begin when you call `expand_start_WHATEVER'
188 and end when you call `expand_end_WHATEVER'. This stack records
189 info about how the construct began that tells the end-function
190 what to do. It also may provide information about the construct
191 to alter the behavior of other constructs within the body.
192 For example, they may affect the behavior of C `break' and `continue'.
194 Each construct gets one `struct nesting' object.
195 All of these objects are chained through the `all' field.
196 `nesting_stack' points to the first object (innermost construct).
197 The position of an entry on `nesting_stack' is in its `depth' field.
199 Each type of construct has its own individual stack.
200 For example, loops have `loop_stack'. Each object points to the
201 next object of the same type through the `next' field.
203 Some constructs are visible to `break' exit-statements and others
204 are not. Which constructs are visible depends on the language.
205 Therefore, the data structure allows each construct to be visible
206 or not, according to the args given when the construct is started.
207 The construct is visible if the `exit_label' field is non-null.
208 In that case, the value should be a CODE_LABEL rtx. */
210 struct nesting
212 struct nesting *all;
213 struct nesting *next;
214 int depth;
215 rtx exit_label;
216 union
218 /* For conds (if-then and if-then-else statements). */
219 struct
221 /* Label for the end of the if construct.
222 There is none if EXITFLAG was not set
223 and no `else' has been seen yet. */
224 rtx endif_label;
225 /* Label for the end of this alternative.
226 This may be the end of the if or the next else/elseif. */
227 rtx next_label;
228 } cond;
229 /* For loops. */
230 struct
232 /* Label at the top of the loop; place to loop back to. */
233 rtx start_label;
234 /* Label at the end of the whole construct. */
235 rtx end_label;
236 /* Label before a jump that branches to the end of the whole
237 construct. This is where destructors go if any. */
238 rtx alt_end_label;
239 /* Label for `continue' statement to jump to;
240 this is in front of the stepper of the loop. */
241 rtx continue_label;
242 } loop;
243 /* For variable binding contours. */
244 struct
246 /* Sequence number of this binding contour within the function,
247 in order of entry. */
248 int block_start_count;
249 /* Nonzero => value to restore stack to on exit. Complemented by
250 bc_stack_level (see below) when generating bytecodes. */
251 rtx stack_level;
252 /* The NOTE that starts this contour.
253 Used by expand_goto to check whether the destination
254 is within each contour or not. */
255 rtx first_insn;
256 /* Innermost containing binding contour that has a stack level. */
257 struct nesting *innermost_stack_block;
258 /* List of cleanups to be run on exit from this contour.
259 This is a list of expressions to be evaluated.
260 The TREE_PURPOSE of each link is the ..._DECL node
261 which the cleanup pertains to. */
262 tree cleanups;
263 /* List of cleanup-lists of blocks containing this block,
264 as they were at the locus where this block appears.
265 There is an element for each containing block,
266 ordered innermost containing block first.
267 The tail of this list can be 0 (was empty_cleanup_list),
268 if all remaining elements would be empty lists.
269 The element's TREE_VALUE is the cleanup-list of that block,
270 which may be null. */
271 tree outer_cleanups;
272 /* Chain of labels defined inside this binding contour.
273 For contours that have stack levels or cleanups. */
274 struct label_chain *label_chain;
275 /* Number of function calls seen, as of start of this block. */
276 int function_call_count;
277 /* Bytecode specific: stack level to restore stack to on exit. */
278 int bc_stack_level;
279 } block;
280 /* For switch (C) or case (Pascal) statements,
281 and also for dummies (see `expand_start_case_dummy'). */
282 struct
284 /* The insn after which the case dispatch should finally
285 be emitted. Zero for a dummy. */
286 rtx start;
287 /* For bytecodes, the case table is in-lined right in the code.
288 A label is needed for skipping over this block. It is only
289 used when generating bytecodes. */
290 rtx skip_label;
291 /* A list of case labels, kept in ascending order by value
292 as the list is built.
293 During expand_end_case, this list may be rearranged into a
294 nearly balanced binary tree. */
295 struct case_node *case_list;
296 /* Label to jump to if no case matches. */
297 tree default_label;
298 /* The expression to be dispatched on. */
299 tree index_expr;
300 /* Type that INDEX_EXPR should be converted to. */
301 tree nominal_type;
302 /* Number of range exprs in case statement. */
303 int num_ranges;
304 /* Name of this kind of statement, for warnings. */
305 char *printname;
306 /* Nonzero if a case label has been seen in this case stmt. */
307 char seenlabel;
308 } case_stmt;
309 } data;
312 /* Chain of all pending binding contours. */
313 struct nesting *block_stack;
315 /* If any new stacks are added here, add them to POPSTACKS too. */
317 /* Chain of all pending binding contours that restore stack levels
318 or have cleanups. */
319 struct nesting *stack_block_stack;
321 /* Chain of all pending conditional statements. */
322 struct nesting *cond_stack;
324 /* Chain of all pending loops. */
325 struct nesting *loop_stack;
327 /* Chain of all pending case or switch statements. */
328 struct nesting *case_stack;
330 /* Separate chain including all of the above,
331 chained through the `all' field. */
332 struct nesting *nesting_stack;
334 /* Number of entries on nesting_stack now. */
335 int nesting_depth;
337 /* Allocate and return a new `struct nesting'. */
339 #define ALLOC_NESTING() \
340 (struct nesting *) obstack_alloc (&stmt_obstack, sizeof (struct nesting))
342 /* Pop the nesting stack element by element until we pop off
343 the element which is at the top of STACK.
344 Update all the other stacks, popping off elements from them
345 as we pop them from nesting_stack. */
347 #define POPSTACK(STACK) \
348 do { struct nesting *target = STACK; \
349 struct nesting *this; \
350 do { this = nesting_stack; \
351 if (loop_stack == this) \
352 loop_stack = loop_stack->next; \
353 if (cond_stack == this) \
354 cond_stack = cond_stack->next; \
355 if (block_stack == this) \
356 block_stack = block_stack->next; \
357 if (stack_block_stack == this) \
358 stack_block_stack = stack_block_stack->next; \
359 if (case_stack == this) \
360 case_stack = case_stack->next; \
361 nesting_depth = nesting_stack->depth - 1; \
362 nesting_stack = this->all; \
363 obstack_free (&stmt_obstack, this); } \
364 while (this != target); } while (0)
366 /* In some cases it is impossible to generate code for a forward goto
367 until the label definition is seen. This happens when it may be necessary
368 for the goto to reset the stack pointer: we don't yet know how to do that.
369 So expand_goto puts an entry on this fixup list.
370 Each time a binding contour that resets the stack is exited,
371 we check each fixup.
372 If the target label has now been defined, we can insert the proper code. */
374 struct goto_fixup
376 /* Points to following fixup. */
377 struct goto_fixup *next;
378 /* Points to the insn before the jump insn.
379 If more code must be inserted, it goes after this insn. */
380 rtx before_jump;
381 /* The LABEL_DECL that this jump is jumping to, or 0
382 for break, continue or return. */
383 tree target;
384 /* The BLOCK for the place where this goto was found. */
385 tree context;
386 /* The CODE_LABEL rtx that this is jumping to. */
387 rtx target_rtl;
388 /* Number of binding contours started in current function
389 before the label reference. */
390 int block_start_count;
391 /* The outermost stack level that should be restored for this jump.
392 Each time a binding contour that resets the stack is exited,
393 if the target label is *not* yet defined, this slot is updated. */
394 rtx stack_level;
395 /* List of lists of cleanup expressions to be run by this goto.
396 There is one element for each block that this goto is within.
397 The tail of this list can be 0 (was empty_cleanup_list),
398 if all remaining elements would be empty.
399 The TREE_VALUE contains the cleanup list of that block as of the
400 time this goto was seen.
401 The TREE_ADDRESSABLE flag is 1 for a block that has been exited. */
402 tree cleanup_list_list;
404 /* Bytecode specific members follow */
406 /* The label that this jump is jumping to, or 0 for break, continue
407 or return. */
408 struct bc_label *bc_target;
410 /* The label we use for the fixup patch */
411 struct bc_label *label;
413 /* True (non-0) if fixup has been handled */
414 int bc_handled:1;
416 /* Like stack_level above, except refers to the interpreter stack */
417 int bc_stack_level;
420 static struct goto_fixup *goto_fixup_chain;
422 /* Within any binding contour that must restore a stack level,
423 all labels are recorded with a chain of these structures. */
425 struct label_chain
427 /* Points to following fixup. */
428 struct label_chain *next;
429 tree label;
431 static void expand_goto_internal PROTO((tree, rtx, rtx));
432 static void bc_expand_goto_internal PROTO((enum bytecode_opcode,
433 struct bc_label *, tree));
434 static int expand_fixup PROTO((tree, rtx, rtx));
435 static void bc_expand_fixup PROTO((enum bytecode_opcode,
436 struct bc_label *, int));
437 static void fixup_gotos PROTO((struct nesting *, rtx, tree,
438 rtx, int));
439 static void bc_fixup_gotos PROTO((struct nesting *, int, tree,
440 rtx, int));
441 static int warn_if_unused_value PROTO((tree));
442 static void bc_expand_start_cond PROTO((tree, int));
443 static void bc_expand_end_cond PROTO((void));
444 static void bc_expand_start_else PROTO((void));
445 static void bc_expand_end_loop PROTO((void));
446 static void bc_expand_end_bindings PROTO((tree, int, int));
447 static void bc_expand_decl PROTO((tree, tree));
448 static void bc_expand_variable_local_init PROTO((tree));
449 static void bc_expand_decl_init PROTO((tree));
450 static void expand_null_return_1 PROTO((rtx, int));
451 static void expand_value_return PROTO((rtx));
452 static int tail_recursion_args PROTO((tree, tree));
453 static void expand_cleanups PROTO((tree, tree, int, int));
454 static void bc_expand_start_case PROTO((struct nesting *, tree,
455 tree, char *));
456 static int bc_pushcase PROTO((tree, tree));
457 static void bc_check_for_full_enumeration_handling PROTO((tree));
458 static void bc_expand_end_case PROTO((tree));
459 static void do_jump_if_equal PROTO((rtx, rtx, rtx, int));
460 static int estimate_case_costs PROTO((case_node_ptr));
461 static void group_case_nodes PROTO((case_node_ptr));
462 static void balance_case_nodes PROTO((case_node_ptr *,
463 case_node_ptr));
464 static int node_has_low_bound PROTO((case_node_ptr, tree));
465 static int node_has_high_bound PROTO((case_node_ptr, tree));
466 static int node_is_bounded PROTO((case_node_ptr, tree));
467 static void emit_jump_if_reachable PROTO((rtx));
468 static void emit_case_nodes PROTO((rtx, case_node_ptr, rtx, tree));
470 int bc_expand_exit_loop_if_false ();
471 void bc_expand_start_cond ();
472 void bc_expand_end_cond ();
473 void bc_expand_start_else ();
474 void bc_expand_end_bindings ();
475 void bc_expand_start_case ();
476 void bc_check_for_full_enumeration_handling ();
477 void bc_expand_end_case ();
478 void bc_expand_decl ();
480 extern rtx bc_allocate_local ();
481 extern rtx bc_allocate_variable_array ();
483 void
484 init_stmt ()
486 gcc_obstack_init (&stmt_obstack);
487 #if 0
488 empty_cleanup_list = build_tree_list (NULL_TREE, NULL_TREE);
489 #endif
492 void
493 init_stmt_for_function ()
495 /* We are not currently within any block, conditional, loop or case. */
496 block_stack = 0;
497 stack_block_stack = 0;
498 loop_stack = 0;
499 case_stack = 0;
500 cond_stack = 0;
501 nesting_stack = 0;
502 nesting_depth = 0;
504 block_start_count = 0;
506 /* No gotos have been expanded yet. */
507 goto_fixup_chain = 0;
509 /* We are not processing a ({...}) grouping. */
510 expr_stmts_for_value = 0;
511 last_expr_type = 0;
514 void
515 save_stmt_status (p)
516 struct function *p;
518 p->block_stack = block_stack;
519 p->stack_block_stack = stack_block_stack;
520 p->cond_stack = cond_stack;
521 p->loop_stack = loop_stack;
522 p->case_stack = case_stack;
523 p->nesting_stack = nesting_stack;
524 p->nesting_depth = nesting_depth;
525 p->block_start_count = block_start_count;
526 p->last_expr_type = last_expr_type;
527 p->last_expr_value = last_expr_value;
528 p->expr_stmts_for_value = expr_stmts_for_value;
529 p->emit_filename = emit_filename;
530 p->emit_lineno = emit_lineno;
531 p->goto_fixup_chain = goto_fixup_chain;
534 void
535 restore_stmt_status (p)
536 struct function *p;
538 block_stack = p->block_stack;
539 stack_block_stack = p->stack_block_stack;
540 cond_stack = p->cond_stack;
541 loop_stack = p->loop_stack;
542 case_stack = p->case_stack;
543 nesting_stack = p->nesting_stack;
544 nesting_depth = p->nesting_depth;
545 block_start_count = p->block_start_count;
546 last_expr_type = p->last_expr_type;
547 last_expr_value = p->last_expr_value;
548 expr_stmts_for_value = p->expr_stmts_for_value;
549 emit_filename = p->emit_filename;
550 emit_lineno = p->emit_lineno;
551 goto_fixup_chain = p->goto_fixup_chain;
554 /* Emit a no-op instruction. */
556 void
557 emit_nop ()
559 rtx last_insn;
561 if (!output_bytecode)
563 last_insn = get_last_insn ();
564 if (!optimize
565 && (GET_CODE (last_insn) == CODE_LABEL
566 || prev_real_insn (last_insn) == 0))
567 emit_insn (gen_nop ());
571 /* Return the rtx-label that corresponds to a LABEL_DECL,
572 creating it if necessary. */
575 label_rtx (label)
576 tree label;
578 if (TREE_CODE (label) != LABEL_DECL)
579 abort ();
581 if (DECL_RTL (label))
582 return DECL_RTL (label);
584 return DECL_RTL (label) = gen_label_rtx ();
587 /* Add an unconditional jump to LABEL as the next sequential instruction. */
589 void
590 emit_jump (label)
591 rtx label;
593 do_pending_stack_adjust ();
594 emit_jump_insn (gen_jump (label));
595 emit_barrier ();
598 /* Emit code to jump to the address
599 specified by the pointer expression EXP. */
601 void
602 expand_computed_goto (exp)
603 tree exp;
605 if (output_bytecode)
607 bc_expand_expr (exp);
608 bc_emit_instruction (jumpP);
610 else
612 rtx x = expand_expr (exp, NULL_RTX, VOIDmode, 0);
613 emit_queue ();
614 emit_indirect_jump (x);
618 /* Handle goto statements and the labels that they can go to. */
620 /* Specify the location in the RTL code of a label LABEL,
621 which is a LABEL_DECL tree node.
623 This is used for the kind of label that the user can jump to with a
624 goto statement, and for alternatives of a switch or case statement.
625 RTL labels generated for loops and conditionals don't go through here;
626 they are generated directly at the RTL level, by other functions below.
628 Note that this has nothing to do with defining label *names*.
629 Languages vary in how they do that and what that even means. */
631 void
632 expand_label (label)
633 tree label;
635 struct label_chain *p;
637 if (output_bytecode)
639 if (! DECL_RTL (label))
640 DECL_RTL (label) = bc_gen_rtx ((char *) 0, 0, bc_get_bytecode_label ());
641 if (! bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (DECL_RTL (label))))
642 error ("multiply defined label");
643 return;
646 do_pending_stack_adjust ();
647 emit_label (label_rtx (label));
648 if (DECL_NAME (label))
649 LABEL_NAME (DECL_RTL (label)) = IDENTIFIER_POINTER (DECL_NAME (label));
651 if (stack_block_stack != 0)
653 p = (struct label_chain *) oballoc (sizeof (struct label_chain));
654 p->next = stack_block_stack->data.block.label_chain;
655 stack_block_stack->data.block.label_chain = p;
656 p->label = label;
660 /* Declare that LABEL (a LABEL_DECL) may be used for nonlocal gotos
661 from nested functions. */
663 void
664 declare_nonlocal_label (label)
665 tree label;
667 nonlocal_labels = tree_cons (NULL_TREE, label, nonlocal_labels);
668 LABEL_PRESERVE_P (label_rtx (label)) = 1;
669 if (nonlocal_goto_handler_slot == 0)
671 nonlocal_goto_handler_slot
672 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
673 emit_stack_save (SAVE_NONLOCAL,
674 &nonlocal_goto_stack_level,
675 PREV_INSN (tail_recursion_reentry));
679 /* Generate RTL code for a `goto' statement with target label LABEL.
680 LABEL should be a LABEL_DECL tree node that was or will later be
681 defined with `expand_label'. */
683 void
684 expand_goto (label)
685 tree label;
687 tree context;
689 if (output_bytecode)
691 expand_goto_internal (label, label_rtx (label), NULL_RTX);
692 return;
695 /* Check for a nonlocal goto to a containing function. */
696 context = decl_function_context (label);
697 if (context != 0 && context != current_function_decl)
699 struct function *p = find_function_data (context);
700 rtx label_ref = gen_rtx (LABEL_REF, Pmode, label_rtx (label));
701 rtx temp;
703 p->has_nonlocal_label = 1;
704 current_function_has_nonlocal_goto = 1;
705 LABEL_REF_NONLOCAL_P (label_ref) = 1;
707 /* Copy the rtl for the slots so that they won't be shared in
708 case the virtual stack vars register gets instantiated differently
709 in the parent than in the child. */
711 #if HAVE_nonlocal_goto
712 if (HAVE_nonlocal_goto)
713 emit_insn (gen_nonlocal_goto (lookup_static_chain (label),
714 copy_rtx (p->nonlocal_goto_handler_slot),
715 copy_rtx (p->nonlocal_goto_stack_level),
716 label_ref));
717 else
718 #endif
720 rtx addr;
722 /* Restore frame pointer for containing function.
723 This sets the actual hard register used for the frame pointer
724 to the location of the function's incoming static chain info.
725 The non-local goto handler will then adjust it to contain the
726 proper value and reload the argument pointer, if needed. */
727 emit_move_insn (hard_frame_pointer_rtx, lookup_static_chain (label));
729 /* We have now loaded the frame pointer hardware register with
730 the address of that corresponds to the start of the virtual
731 stack vars. So replace virtual_stack_vars_rtx in all
732 addresses we use with stack_pointer_rtx. */
734 /* Get addr of containing function's current nonlocal goto handler,
735 which will do any cleanups and then jump to the label. */
736 addr = copy_rtx (p->nonlocal_goto_handler_slot);
737 temp = copy_to_reg (replace_rtx (addr, virtual_stack_vars_rtx,
738 hard_frame_pointer_rtx));
740 /* Restore the stack pointer. Note this uses fp just restored. */
741 addr = p->nonlocal_goto_stack_level;
742 if (addr)
743 addr = replace_rtx (copy_rtx (addr),
744 virtual_stack_vars_rtx,
745 hard_frame_pointer_rtx);
747 emit_stack_restore (SAVE_NONLOCAL, addr, NULL_RTX);
749 /* Put in the static chain register the nonlocal label address. */
750 emit_move_insn (static_chain_rtx, label_ref);
751 /* USE of hard_frame_pointer_rtx added for consistency; not clear if
752 really needed. */
753 emit_insn (gen_rtx (USE, VOIDmode, hard_frame_pointer_rtx));
754 emit_insn (gen_rtx (USE, VOIDmode, stack_pointer_rtx));
755 emit_insn (gen_rtx (USE, VOIDmode, static_chain_rtx));
756 emit_indirect_jump (temp);
759 else
760 expand_goto_internal (label, label_rtx (label), NULL_RTX);
763 /* Generate RTL code for a `goto' statement with target label BODY.
764 LABEL should be a LABEL_REF.
765 LAST_INSN, if non-0, is the rtx we should consider as the last
766 insn emitted (for the purposes of cleaning up a return). */
768 static void
769 expand_goto_internal (body, label, last_insn)
770 tree body;
771 rtx label;
772 rtx last_insn;
774 struct nesting *block;
775 rtx stack_level = 0;
777 /* NOTICE! If a bytecode instruction other than `jump' is needed,
778 then the caller has to call bc_expand_goto_internal()
779 directly. This is rather an exceptional case, and there aren't
780 that many places where this is necessary. */
781 if (output_bytecode)
783 expand_goto_internal (body, label, last_insn);
784 return;
787 if (GET_CODE (label) != CODE_LABEL)
788 abort ();
790 /* If label has already been defined, we can tell now
791 whether and how we must alter the stack level. */
793 if (PREV_INSN (label) != 0)
795 /* Find the innermost pending block that contains the label.
796 (Check containment by comparing insn-uids.)
797 Then restore the outermost stack level within that block,
798 and do cleanups of all blocks contained in it. */
799 for (block = block_stack; block; block = block->next)
801 if (INSN_UID (block->data.block.first_insn) < INSN_UID (label))
802 break;
803 if (block->data.block.stack_level != 0)
804 stack_level = block->data.block.stack_level;
805 /* Execute the cleanups for blocks we are exiting. */
806 if (block->data.block.cleanups != 0)
808 expand_cleanups (block->data.block.cleanups, NULL_TREE, 1, 1);
809 do_pending_stack_adjust ();
813 if (stack_level)
815 /* Ensure stack adjust isn't done by emit_jump, as this would clobber
816 the stack pointer. This one should be deleted as dead by flow. */
817 clear_pending_stack_adjust ();
818 do_pending_stack_adjust ();
819 emit_stack_restore (SAVE_BLOCK, stack_level, NULL_RTX);
822 if (body != 0 && DECL_TOO_LATE (body))
823 error ("jump to `%s' invalidly jumps into binding contour",
824 IDENTIFIER_POINTER (DECL_NAME (body)));
826 /* Label not yet defined: may need to put this goto
827 on the fixup list. */
828 else if (! expand_fixup (body, label, last_insn))
830 /* No fixup needed. Record that the label is the target
831 of at least one goto that has no fixup. */
832 if (body != 0)
833 TREE_ADDRESSABLE (body) = 1;
836 emit_jump (label);
839 /* Generate a jump with OPCODE to the given bytecode LABEL which is
840 found within BODY. */
842 static void
843 bc_expand_goto_internal (opcode, label, body)
844 enum bytecode_opcode opcode;
845 struct bc_label *label;
846 tree body;
848 struct nesting *block;
849 int stack_level = -1;
851 /* If the label is defined, adjust the stack as necessary.
852 If it's not defined, we have to push the reference on the
853 fixup list. */
855 if (label->defined)
858 /* Find the innermost pending block that contains the label.
859 (Check containment by comparing bytecode uids.) Then restore the
860 outermost stack level within that block. */
862 for (block = block_stack; block; block = block->next)
864 if (BYTECODE_BC_LABEL (block->data.block.first_insn)->uid < label->uid)
865 break;
866 if (block->data.block.bc_stack_level)
867 stack_level = block->data.block.bc_stack_level;
869 /* Execute the cleanups for blocks we are exiting. */
870 if (block->data.block.cleanups != 0)
872 expand_cleanups (block->data.block.cleanups, NULL_TREE, 1, 1);
873 do_pending_stack_adjust ();
877 /* Restore the stack level. If we need to adjust the stack, we
878 must do so after the jump, since the jump may depend on
879 what's on the stack. Thus, any stack-modifying conditional
880 jumps (these are the only ones that rely on what's on the
881 stack) go into the fixup list. */
883 if (stack_level >= 0
884 && stack_depth != stack_level
885 && opcode != jump)
887 bc_expand_fixup (opcode, label, stack_level);
888 else
890 if (stack_level >= 0)
891 bc_adjust_stack (stack_depth - stack_level);
893 if (body && DECL_BIT_FIELD (body))
894 error ("jump to `%s' invalidly jumps into binding contour",
895 IDENTIFIER_POINTER (DECL_NAME (body)));
897 /* Emit immediate jump */
898 bc_emit_bytecode (opcode);
899 bc_emit_bytecode_labelref (label);
901 #ifdef DEBUG_PRINT_CODE
902 fputc ('\n', stderr);
903 #endif
906 else
907 /* Put goto in the fixup list */
908 bc_expand_fixup (opcode, label, stack_level);
911 /* Generate if necessary a fixup for a goto
912 whose target label in tree structure (if any) is TREE_LABEL
913 and whose target in rtl is RTL_LABEL.
915 If LAST_INSN is nonzero, we pretend that the jump appears
916 after insn LAST_INSN instead of at the current point in the insn stream.
918 The fixup will be used later to insert insns just before the goto.
919 Those insns will restore the stack level as appropriate for the
920 target label, and will (in the case of C++) also invoke any object
921 destructors which have to be invoked when we exit the scopes which
922 are exited by the goto.
924 Value is nonzero if a fixup is made. */
926 static int
927 expand_fixup (tree_label, rtl_label, last_insn)
928 tree tree_label;
929 rtx rtl_label;
930 rtx last_insn;
932 struct nesting *block, *end_block;
934 /* See if we can recognize which block the label will be output in.
935 This is possible in some very common cases.
936 If we succeed, set END_BLOCK to that block.
937 Otherwise, set it to 0. */
939 if (cond_stack
940 && (rtl_label == cond_stack->data.cond.endif_label
941 || rtl_label == cond_stack->data.cond.next_label))
942 end_block = cond_stack;
943 /* If we are in a loop, recognize certain labels which
944 are likely targets. This reduces the number of fixups
945 we need to create. */
946 else if (loop_stack
947 && (rtl_label == loop_stack->data.loop.start_label
948 || rtl_label == loop_stack->data.loop.end_label
949 || rtl_label == loop_stack->data.loop.continue_label))
950 end_block = loop_stack;
951 else
952 end_block = 0;
954 /* Now set END_BLOCK to the binding level to which we will return. */
956 if (end_block)
958 struct nesting *next_block = end_block->all;
959 block = block_stack;
961 /* First see if the END_BLOCK is inside the innermost binding level.
962 If so, then no cleanups or stack levels are relevant. */
963 while (next_block && next_block != block)
964 next_block = next_block->all;
966 if (next_block)
967 return 0;
969 /* Otherwise, set END_BLOCK to the innermost binding level
970 which is outside the relevant control-structure nesting. */
971 next_block = block_stack->next;
972 for (block = block_stack; block != end_block; block = block->all)
973 if (block == next_block)
974 next_block = next_block->next;
975 end_block = next_block;
978 /* Does any containing block have a stack level or cleanups?
979 If not, no fixup is needed, and that is the normal case
980 (the only case, for standard C). */
981 for (block = block_stack; block != end_block; block = block->next)
982 if (block->data.block.stack_level != 0
983 || block->data.block.cleanups != 0)
984 break;
986 if (block != end_block)
988 /* Ok, a fixup is needed. Add a fixup to the list of such. */
989 struct goto_fixup *fixup
990 = (struct goto_fixup *) oballoc (sizeof (struct goto_fixup));
991 /* In case an old stack level is restored, make sure that comes
992 after any pending stack adjust. */
993 /* ?? If the fixup isn't to come at the present position,
994 doing the stack adjust here isn't useful. Doing it with our
995 settings at that location isn't useful either. Let's hope
996 someone does it! */
997 if (last_insn == 0)
998 do_pending_stack_adjust ();
999 fixup->target = tree_label;
1000 fixup->target_rtl = rtl_label;
1002 /* Create a BLOCK node and a corresponding matched set of
1003 NOTE_INSN_BEGIN_BLOCK and NOTE_INSN_END_BLOCK notes at
1004 this point. The notes will encapsulate any and all fixup
1005 code which we might later insert at this point in the insn
1006 stream. Also, the BLOCK node will be the parent (i.e. the
1007 `SUPERBLOCK') of any other BLOCK nodes which we might create
1008 later on when we are expanding the fixup code. */
1011 register rtx original_before_jump
1012 = last_insn ? last_insn : get_last_insn ();
1014 start_sequence ();
1015 pushlevel (0);
1016 fixup->before_jump = emit_note (NULL_PTR, NOTE_INSN_BLOCK_BEG);
1017 last_block_end_note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_END);
1018 fixup->context = poplevel (1, 0, 0); /* Create the BLOCK node now! */
1019 end_sequence ();
1020 emit_insns_after (fixup->before_jump, original_before_jump);
1023 fixup->block_start_count = block_start_count;
1024 fixup->stack_level = 0;
1025 fixup->cleanup_list_list
1026 = (((block->data.block.outer_cleanups
1027 #if 0
1028 && block->data.block.outer_cleanups != empty_cleanup_list
1029 #endif
1031 || block->data.block.cleanups)
1032 ? tree_cons (NULL_TREE, block->data.block.cleanups,
1033 block->data.block.outer_cleanups)
1034 : 0);
1035 fixup->next = goto_fixup_chain;
1036 goto_fixup_chain = fixup;
1039 return block != 0;
1043 /* Generate bytecode jump with OPCODE to a fixup routine that links to LABEL.
1044 Make the fixup restore the stack level to STACK_LEVEL. */
1046 static void
1047 bc_expand_fixup (opcode, label, stack_level)
1048 enum bytecode_opcode opcode;
1049 struct bc_label *label;
1050 int stack_level;
1052 struct goto_fixup *fixup
1053 = (struct goto_fixup *) oballoc (sizeof (struct goto_fixup));
1055 fixup->label = bc_get_bytecode_label ();
1056 fixup->bc_target = label;
1057 fixup->bc_stack_level = stack_level;
1058 fixup->bc_handled = FALSE;
1060 fixup->next = goto_fixup_chain;
1061 goto_fixup_chain = fixup;
1063 /* Insert a jump to the fixup code */
1064 bc_emit_bytecode (opcode);
1065 bc_emit_bytecode_labelref (fixup->label);
1067 #ifdef DEBUG_PRINT_CODE
1068 fputc ('\n', stderr);
1069 #endif
1072 /* Expand any needed fixups in the outputmost binding level of the
1073 function. FIRST_INSN is the first insn in the function. */
1075 void
1076 expand_fixups (first_insn)
1077 rtx first_insn;
1079 fixup_gotos (NULL_PTR, NULL_RTX, NULL_TREE, first_insn, 0);
1082 /* When exiting a binding contour, process all pending gotos requiring fixups.
1083 THISBLOCK is the structure that describes the block being exited.
1084 STACK_LEVEL is the rtx for the stack level to restore exiting this contour.
1085 CLEANUP_LIST is a list of expressions to evaluate on exiting this contour.
1086 FIRST_INSN is the insn that began this contour.
1088 Gotos that jump out of this contour must restore the
1089 stack level and do the cleanups before actually jumping.
1091 DONT_JUMP_IN nonzero means report error there is a jump into this
1092 contour from before the beginning of the contour.
1093 This is also done if STACK_LEVEL is nonzero. */
1095 static void
1096 fixup_gotos (thisblock, stack_level, cleanup_list, first_insn, dont_jump_in)
1097 struct nesting *thisblock;
1098 rtx stack_level;
1099 tree cleanup_list;
1100 rtx first_insn;
1101 int dont_jump_in;
1103 register struct goto_fixup *f, *prev;
1105 if (output_bytecode)
1107 /* ??? The second arg is the bc stack level, which is not the same
1108 as STACK_LEVEL. I have no idea what should go here, so I'll
1109 just pass 0. */
1110 bc_fixup_gotos (thisblock, 0, cleanup_list, first_insn, dont_jump_in);
1111 return;
1114 /* F is the fixup we are considering; PREV is the previous one. */
1115 /* We run this loop in two passes so that cleanups of exited blocks
1116 are run first, and blocks that are exited are marked so
1117 afterwards. */
1119 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1121 /* Test for a fixup that is inactive because it is already handled. */
1122 if (f->before_jump == 0)
1124 /* Delete inactive fixup from the chain, if that is easy to do. */
1125 if (prev != 0)
1126 prev->next = f->next;
1128 /* Has this fixup's target label been defined?
1129 If so, we can finalize it. */
1130 else if (PREV_INSN (f->target_rtl) != 0)
1132 register rtx cleanup_insns;
1134 /* Get the first non-label after the label
1135 this goto jumps to. If that's before this scope begins,
1136 we don't have a jump into the scope. */
1137 rtx after_label = f->target_rtl;
1138 while (after_label != 0 && GET_CODE (after_label) == CODE_LABEL)
1139 after_label = NEXT_INSN (after_label);
1141 /* If this fixup jumped into this contour from before the beginning
1142 of this contour, report an error. */
1143 /* ??? Bug: this does not detect jumping in through intermediate
1144 blocks that have stack levels or cleanups.
1145 It detects only a problem with the innermost block
1146 around the label. */
1147 if (f->target != 0
1148 && (dont_jump_in || stack_level || cleanup_list)
1149 /* If AFTER_LABEL is 0, it means the jump goes to the end
1150 of the rtl, which means it jumps into this scope. */
1151 && (after_label == 0
1152 || INSN_UID (first_insn) < INSN_UID (after_label))
1153 && INSN_UID (first_insn) > INSN_UID (f->before_jump)
1154 && ! DECL_REGISTER (f->target))
1156 error_with_decl (f->target,
1157 "label `%s' used before containing binding contour");
1158 /* Prevent multiple errors for one label. */
1159 DECL_REGISTER (f->target) = 1;
1162 /* We will expand the cleanups into a sequence of their own and
1163 then later on we will attach this new sequence to the insn
1164 stream just ahead of the actual jump insn. */
1166 start_sequence ();
1168 /* Temporarily restore the lexical context where we will
1169 logically be inserting the fixup code. We do this for the
1170 sake of getting the debugging information right. */
1172 pushlevel (0);
1173 set_block (f->context);
1175 /* Expand the cleanups for blocks this jump exits. */
1176 if (f->cleanup_list_list)
1178 tree lists;
1179 for (lists = f->cleanup_list_list; lists; lists = TREE_CHAIN (lists))
1180 /* Marked elements correspond to blocks that have been closed.
1181 Do their cleanups. */
1182 if (TREE_ADDRESSABLE (lists)
1183 && TREE_VALUE (lists) != 0)
1185 expand_cleanups (TREE_VALUE (lists), NULL_TREE, 1, 1);
1186 /* Pop any pushes done in the cleanups,
1187 in case function is about to return. */
1188 do_pending_stack_adjust ();
1192 /* Restore stack level for the biggest contour that this
1193 jump jumps out of. */
1194 if (f->stack_level)
1195 emit_stack_restore (SAVE_BLOCK, f->stack_level, f->before_jump);
1197 /* Finish up the sequence containing the insns which implement the
1198 necessary cleanups, and then attach that whole sequence to the
1199 insn stream just ahead of the actual jump insn. Attaching it
1200 at that point insures that any cleanups which are in fact
1201 implicit C++ object destructions (which must be executed upon
1202 leaving the block) appear (to the debugger) to be taking place
1203 in an area of the generated code where the object(s) being
1204 destructed are still "in scope". */
1206 cleanup_insns = get_insns ();
1207 poplevel (1, 0, 0);
1209 end_sequence ();
1210 emit_insns_after (cleanup_insns, f->before_jump);
1213 f->before_jump = 0;
1217 /* Mark the cleanups of exited blocks so that they are executed
1218 by the code above. */
1219 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1220 if (f->before_jump != 0
1221 && PREV_INSN (f->target_rtl) == 0
1222 /* Label has still not appeared. If we are exiting a block with
1223 a stack level to restore, that started before the fixup,
1224 mark this stack level as needing restoration
1225 when the fixup is later finalized.
1226 Also mark the cleanup_list_list element for F
1227 that corresponds to this block, so that ultimately
1228 this block's cleanups will be executed by the code above. */
1229 && thisblock != 0
1230 /* Note: if THISBLOCK == 0 and we have a label that hasn't appeared,
1231 it means the label is undefined. That's erroneous, but possible. */
1232 && (thisblock->data.block.block_start_count
1233 <= f->block_start_count))
1235 tree lists = f->cleanup_list_list;
1236 for (; lists; lists = TREE_CHAIN (lists))
1237 /* If the following elt. corresponds to our containing block
1238 then the elt. must be for this block. */
1239 if (TREE_CHAIN (lists) == thisblock->data.block.outer_cleanups)
1240 TREE_ADDRESSABLE (lists) = 1;
1242 if (stack_level)
1243 f->stack_level = stack_level;
1248 /* When exiting a binding contour, process all pending gotos requiring fixups.
1249 Note: STACK_DEPTH is not altered.
1251 The arguments are currently not used in the bytecode compiler, but we may
1252 need them one day for languages other than C.
1254 THISBLOCK is the structure that describes the block being exited.
1255 STACK_LEVEL is the rtx for the stack level to restore exiting this contour.
1256 CLEANUP_LIST is a list of expressions to evaluate on exiting this contour.
1257 FIRST_INSN is the insn that began this contour.
1259 Gotos that jump out of this contour must restore the
1260 stack level and do the cleanups before actually jumping.
1262 DONT_JUMP_IN nonzero means report error there is a jump into this
1263 contour from before the beginning of the contour.
1264 This is also done if STACK_LEVEL is nonzero. */
1266 static void
1267 bc_fixup_gotos (thisblock, stack_level, cleanup_list, first_insn, dont_jump_in)
1268 struct nesting *thisblock;
1269 int stack_level;
1270 tree cleanup_list;
1271 rtx first_insn;
1272 int dont_jump_in;
1274 register struct goto_fixup *f, *prev;
1275 int saved_stack_depth;
1277 /* F is the fixup we are considering; PREV is the previous one. */
1279 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1281 /* Test for a fixup that is inactive because it is already handled. */
1282 if (f->before_jump == 0)
1284 /* Delete inactive fixup from the chain, if that is easy to do. */
1285 if (prev)
1286 prev->next = f->next;
1289 /* Emit code to restore the stack and continue */
1290 bc_emit_bytecode_labeldef (f->label);
1292 /* Save stack_depth across call, since bc_adjust_stack () will alter
1293 the perceived stack depth via the instructions generated. */
1295 if (f->bc_stack_level >= 0)
1297 saved_stack_depth = stack_depth;
1298 bc_adjust_stack (stack_depth - f->bc_stack_level);
1299 stack_depth = saved_stack_depth;
1302 bc_emit_bytecode (jump);
1303 bc_emit_bytecode_labelref (f->bc_target);
1305 #ifdef DEBUG_PRINT_CODE
1306 fputc ('\n', stderr);
1307 #endif
1310 goto_fixup_chain = NULL;
1313 /* Generate RTL for an asm statement (explicit assembler code).
1314 BODY is a STRING_CST node containing the assembler code text,
1315 or an ADDR_EXPR containing a STRING_CST. */
1317 void
1318 expand_asm (body)
1319 tree body;
1321 if (output_bytecode)
1323 error ("`asm' is invalid when generating bytecode");
1324 return;
1327 if (TREE_CODE (body) == ADDR_EXPR)
1328 body = TREE_OPERAND (body, 0);
1330 emit_insn (gen_rtx (ASM_INPUT, VOIDmode,
1331 TREE_STRING_POINTER (body)));
1332 last_expr_type = 0;
1335 /* Generate RTL for an asm statement with arguments.
1336 STRING is the instruction template.
1337 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
1338 Each output or input has an expression in the TREE_VALUE and
1339 a constraint-string in the TREE_PURPOSE.
1340 CLOBBERS is a list of STRING_CST nodes each naming a hard register
1341 that is clobbered by this insn.
1343 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
1344 Some elements of OUTPUTS may be replaced with trees representing temporary
1345 values. The caller should copy those temporary values to the originally
1346 specified lvalues.
1348 VOL nonzero means the insn is volatile; don't optimize it. */
1350 void
1351 expand_asm_operands (string, outputs, inputs, clobbers, vol, filename, line)
1352 tree string, outputs, inputs, clobbers;
1353 int vol;
1354 char *filename;
1355 int line;
1357 rtvec argvec, constraints;
1358 rtx body;
1359 int ninputs = list_length (inputs);
1360 int noutputs = list_length (outputs);
1361 int nclobbers;
1362 tree tail;
1363 register int i;
1364 /* Vector of RTX's of evaluated output operands. */
1365 rtx *output_rtx = (rtx *) alloca (noutputs * sizeof (rtx));
1366 /* The insn we have emitted. */
1367 rtx insn;
1369 if (output_bytecode)
1371 error ("`asm' is invalid when generating bytecode");
1372 return;
1375 /* Count the number of meaningful clobbered registers, ignoring what
1376 we would ignore later. */
1377 nclobbers = 0;
1378 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1380 char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1381 i = decode_reg_name (regname);
1382 if (i >= 0 || i == -4)
1383 ++nclobbers;
1384 else if (i == -2)
1385 error ("unknown register name `%s' in `asm'", regname);
1388 last_expr_type = 0;
1390 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1392 tree val = TREE_VALUE (tail);
1393 tree val1;
1394 int j;
1395 int found_equal;
1397 /* If there's an erroneous arg, emit no insn. */
1398 if (TREE_TYPE (val) == error_mark_node)
1399 return;
1401 /* Make sure constraint has `=' and does not have `+'. */
1403 found_equal = 0;
1404 for (j = 0; j < TREE_STRING_LENGTH (TREE_PURPOSE (tail)); j++)
1406 if (TREE_STRING_POINTER (TREE_PURPOSE (tail))[j] == '+')
1408 error ("output operand constraint contains `+'");
1409 return;
1411 if (TREE_STRING_POINTER (TREE_PURPOSE (tail))[j] == '=')
1412 found_equal = 1;
1414 if (! found_equal)
1416 error ("output operand constraint lacks `='");
1417 return;
1420 /* If an output operand is not a variable or indirect ref,
1421 or a part of one,
1422 create a SAVE_EXPR which is a pseudo-reg
1423 to act as an intermediate temporary.
1424 Make the asm insn write into that, then copy it to
1425 the real output operand. */
1427 while (TREE_CODE (val) == COMPONENT_REF
1428 || TREE_CODE (val) == ARRAY_REF)
1429 val = TREE_OPERAND (val, 0);
1431 if (TREE_CODE (val) != VAR_DECL
1432 && TREE_CODE (val) != PARM_DECL
1433 && TREE_CODE (val) != INDIRECT_REF)
1435 TREE_VALUE (tail) = save_expr (TREE_VALUE (tail));
1436 /* If it's a constant, print error now so don't crash later. */
1437 if (TREE_CODE (TREE_VALUE (tail)) != SAVE_EXPR)
1439 error ("invalid output in `asm'");
1440 return;
1444 output_rtx[i] = expand_expr (TREE_VALUE (tail), NULL_RTX, VOIDmode, 0);
1447 if (ninputs + noutputs > MAX_RECOG_OPERANDS)
1449 error ("more than %d operands in `asm'", MAX_RECOG_OPERANDS);
1450 return;
1453 /* Make vectors for the expression-rtx and constraint strings. */
1455 argvec = rtvec_alloc (ninputs);
1456 constraints = rtvec_alloc (ninputs);
1458 body = gen_rtx (ASM_OPERANDS, VOIDmode,
1459 TREE_STRING_POINTER (string), "", 0, argvec, constraints,
1460 filename, line);
1461 MEM_VOLATILE_P (body) = vol;
1463 /* Eval the inputs and put them into ARGVEC.
1464 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
1466 i = 0;
1467 for (tail = inputs; tail; tail = TREE_CHAIN (tail))
1469 int j;
1471 /* If there's an erroneous arg, emit no insn,
1472 because the ASM_INPUT would get VOIDmode
1473 and that could cause a crash in reload. */
1474 if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
1475 return;
1476 if (TREE_PURPOSE (tail) == NULL_TREE)
1478 error ("hard register `%s' listed as input operand to `asm'",
1479 TREE_STRING_POINTER (TREE_VALUE (tail)) );
1480 return;
1483 /* Make sure constraint has neither `=' nor `+'. */
1485 for (j = 0; j < TREE_STRING_LENGTH (TREE_PURPOSE (tail)); j++)
1486 if (TREE_STRING_POINTER (TREE_PURPOSE (tail))[j] == '='
1487 || TREE_STRING_POINTER (TREE_PURPOSE (tail))[j] == '+')
1489 error ("input operand constraint contains `%c'",
1490 TREE_STRING_POINTER (TREE_PURPOSE (tail))[j]);
1491 return;
1494 XVECEXP (body, 3, i) /* argvec */
1495 = expand_expr (TREE_VALUE (tail), NULL_RTX, VOIDmode, 0);
1496 if (CONSTANT_P (XVECEXP (body, 3, i))
1497 && ! general_operand (XVECEXP (body, 3, i),
1498 TYPE_MODE (TREE_TYPE (TREE_VALUE (tail)))))
1499 XVECEXP (body, 3, i)
1500 = force_reg (TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))),
1501 XVECEXP (body, 3, i));
1502 XVECEXP (body, 4, i) /* constraints */
1503 = gen_rtx (ASM_INPUT, TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))),
1504 TREE_STRING_POINTER (TREE_PURPOSE (tail)));
1505 i++;
1508 /* Protect all the operands from the queue,
1509 now that they have all been evaluated. */
1511 for (i = 0; i < ninputs; i++)
1512 XVECEXP (body, 3, i) = protect_from_queue (XVECEXP (body, 3, i), 0);
1514 for (i = 0; i < noutputs; i++)
1515 output_rtx[i] = protect_from_queue (output_rtx[i], 1);
1517 /* Now, for each output, construct an rtx
1518 (set OUTPUT (asm_operands INSN OUTPUTNUMBER OUTPUTCONSTRAINT
1519 ARGVEC CONSTRAINTS))
1520 If there is more than one, put them inside a PARALLEL. */
1522 if (noutputs == 1 && nclobbers == 0)
1524 XSTR (body, 1) = TREE_STRING_POINTER (TREE_PURPOSE (outputs));
1525 insn = emit_insn (gen_rtx (SET, VOIDmode, output_rtx[0], body));
1527 else if (noutputs == 0 && nclobbers == 0)
1529 /* No output operands: put in a raw ASM_OPERANDS rtx. */
1530 insn = emit_insn (body);
1532 else
1534 rtx obody = body;
1535 int num = noutputs;
1536 if (num == 0) num = 1;
1537 body = gen_rtx (PARALLEL, VOIDmode, rtvec_alloc (num + nclobbers));
1539 /* For each output operand, store a SET. */
1541 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1543 XVECEXP (body, 0, i)
1544 = gen_rtx (SET, VOIDmode,
1545 output_rtx[i],
1546 gen_rtx (ASM_OPERANDS, VOIDmode,
1547 TREE_STRING_POINTER (string),
1548 TREE_STRING_POINTER (TREE_PURPOSE (tail)),
1549 i, argvec, constraints,
1550 filename, line));
1551 MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
1554 /* If there are no outputs (but there are some clobbers)
1555 store the bare ASM_OPERANDS into the PARALLEL. */
1557 if (i == 0)
1558 XVECEXP (body, 0, i++) = obody;
1560 /* Store (clobber REG) for each clobbered register specified. */
1562 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1564 char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1565 int j = decode_reg_name (regname);
1567 if (j < 0)
1569 if (j == -3) /* `cc', which is not a register */
1570 continue;
1572 if (j == -4) /* `memory', don't cache memory across asm */
1574 XVECEXP (body, 0, i++)
1575 = gen_rtx (CLOBBER, VOIDmode,
1576 gen_rtx (MEM, BLKmode,
1577 gen_rtx (SCRATCH, VOIDmode, 0)));
1578 continue;
1581 /* Ignore unknown register, error already signalled. */
1584 /* Use QImode since that's guaranteed to clobber just one reg. */
1585 XVECEXP (body, 0, i++)
1586 = gen_rtx (CLOBBER, VOIDmode, gen_rtx (REG, QImode, j));
1589 insn = emit_insn (body);
1592 free_temp_slots ();
1595 /* Generate RTL to evaluate the expression EXP
1596 and remember it in case this is the VALUE in a ({... VALUE; }) constr. */
1598 void
1599 expand_expr_stmt (exp)
1600 tree exp;
1602 if (output_bytecode)
1604 int org_stack_depth = stack_depth;
1606 bc_expand_expr (exp);
1608 /* Restore stack depth */
1609 if (stack_depth < org_stack_depth)
1610 abort ();
1612 bc_emit_instruction (drop);
1614 last_expr_type = TREE_TYPE (exp);
1615 return;
1618 /* If -W, warn about statements with no side effects,
1619 except for an explicit cast to void (e.g. for assert()), and
1620 except inside a ({...}) where they may be useful. */
1621 if (expr_stmts_for_value == 0 && exp != error_mark_node)
1623 if (! TREE_SIDE_EFFECTS (exp) && (extra_warnings || warn_unused)
1624 && !(TREE_CODE (exp) == CONVERT_EXPR
1625 && TREE_TYPE (exp) == void_type_node))
1626 warning_with_file_and_line (emit_filename, emit_lineno,
1627 "statement with no effect");
1628 else if (warn_unused)
1629 warn_if_unused_value (exp);
1632 /* If EXP is of function type and we are expanding statements for
1633 value, convert it to pointer-to-function. */
1634 if (expr_stmts_for_value && TREE_CODE (TREE_TYPE (exp)) == FUNCTION_TYPE)
1635 exp = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (exp)), exp);
1637 last_expr_type = TREE_TYPE (exp);
1638 if (! flag_syntax_only)
1639 last_expr_value = expand_expr (exp,
1640 (expr_stmts_for_value
1641 ? NULL_RTX : const0_rtx),
1642 VOIDmode, 0);
1644 /* If all we do is reference a volatile value in memory,
1645 copy it to a register to be sure it is actually touched. */
1646 if (last_expr_value != 0 && GET_CODE (last_expr_value) == MEM
1647 && TREE_THIS_VOLATILE (exp))
1649 if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode)
1651 else if (TYPE_MODE (TREE_TYPE (exp)) != BLKmode)
1652 copy_to_reg (last_expr_value);
1653 else
1655 rtx lab = gen_label_rtx ();
1657 /* Compare the value with itself to reference it. */
1658 emit_cmp_insn (last_expr_value, last_expr_value, EQ,
1659 expand_expr (TYPE_SIZE (last_expr_type),
1660 NULL_RTX, VOIDmode, 0),
1661 BLKmode, 0,
1662 TYPE_ALIGN (last_expr_type) / BITS_PER_UNIT);
1663 emit_jump_insn ((*bcc_gen_fctn[(int) EQ]) (lab));
1664 emit_label (lab);
1668 /* If this expression is part of a ({...}) and is in memory, we may have
1669 to preserve temporaries. */
1670 preserve_temp_slots (last_expr_value);
1672 /* Free any temporaries used to evaluate this expression. Any temporary
1673 used as a result of this expression will already have been preserved
1674 above. */
1675 free_temp_slots ();
1677 emit_queue ();
1680 /* Warn if EXP contains any computations whose results are not used.
1681 Return 1 if a warning is printed; 0 otherwise. */
1683 static int
1684 warn_if_unused_value (exp)
1685 tree exp;
1687 if (TREE_USED (exp))
1688 return 0;
1690 switch (TREE_CODE (exp))
1692 case PREINCREMENT_EXPR:
1693 case POSTINCREMENT_EXPR:
1694 case PREDECREMENT_EXPR:
1695 case POSTDECREMENT_EXPR:
1696 case MODIFY_EXPR:
1697 case INIT_EXPR:
1698 case TARGET_EXPR:
1699 case CALL_EXPR:
1700 case METHOD_CALL_EXPR:
1701 case RTL_EXPR:
1702 case WITH_CLEANUP_EXPR:
1703 case EXIT_EXPR:
1704 /* We don't warn about COND_EXPR because it may be a useful
1705 construct if either arm contains a side effect. */
1706 case COND_EXPR:
1707 return 0;
1709 case BIND_EXPR:
1710 /* For a binding, warn if no side effect within it. */
1711 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1713 case TRUTH_ORIF_EXPR:
1714 case TRUTH_ANDIF_EXPR:
1715 /* In && or ||, warn if 2nd operand has no side effect. */
1716 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1718 case COMPOUND_EXPR:
1719 if (TREE_NO_UNUSED_WARNING (exp))
1720 return 0;
1721 if (warn_if_unused_value (TREE_OPERAND (exp, 0)))
1722 return 1;
1723 /* Let people do `(foo (), 0)' without a warning. */
1724 if (TREE_CONSTANT (TREE_OPERAND (exp, 1)))
1725 return 0;
1726 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1728 case NOP_EXPR:
1729 case CONVERT_EXPR:
1730 case NON_LVALUE_EXPR:
1731 /* Don't warn about values cast to void. */
1732 if (TREE_TYPE (exp) == void_type_node)
1733 return 0;
1734 /* Don't warn about conversions not explicit in the user's program. */
1735 if (TREE_NO_UNUSED_WARNING (exp))
1736 return 0;
1737 /* Assignment to a cast usually results in a cast of a modify.
1738 Don't complain about that. There can be an arbitrary number of
1739 casts before the modify, so we must loop until we find the first
1740 non-cast expression and then test to see if that is a modify. */
1742 tree tem = TREE_OPERAND (exp, 0);
1744 while (TREE_CODE (tem) == CONVERT_EXPR || TREE_CODE (tem) == NOP_EXPR)
1745 tem = TREE_OPERAND (tem, 0);
1747 if (TREE_CODE (tem) == MODIFY_EXPR || TREE_CODE (tem) == INIT_EXPR)
1748 return 0;
1750 goto warn;
1752 case INDIRECT_REF:
1753 /* Don't warn about automatic dereferencing of references, since
1754 the user cannot control it. */
1755 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == REFERENCE_TYPE)
1756 return warn_if_unused_value (TREE_OPERAND (exp, 0));
1757 /* ... fall through ... */
1759 default:
1760 /* Referencing a volatile value is a side effect, so don't warn. */
1761 if ((TREE_CODE_CLASS (TREE_CODE (exp)) == 'd'
1762 || TREE_CODE_CLASS (TREE_CODE (exp)) == 'r')
1763 && TREE_THIS_VOLATILE (exp))
1764 return 0;
1765 warn:
1766 warning_with_file_and_line (emit_filename, emit_lineno,
1767 "value computed is not used");
1768 return 1;
1772 /* Clear out the memory of the last expression evaluated. */
1774 void
1775 clear_last_expr ()
1777 last_expr_type = 0;
1780 /* Begin a statement which will return a value.
1781 Return the RTL_EXPR for this statement expr.
1782 The caller must save that value and pass it to expand_end_stmt_expr. */
1784 tree
1785 expand_start_stmt_expr ()
1787 int momentary;
1788 tree t;
1790 /* When generating bytecode just note down the stack depth */
1791 if (output_bytecode)
1792 return (build_int_2 (stack_depth, 0));
1794 /* Make the RTL_EXPR node temporary, not momentary,
1795 so that rtl_expr_chain doesn't become garbage. */
1796 momentary = suspend_momentary ();
1797 t = make_node (RTL_EXPR);
1798 resume_momentary (momentary);
1799 start_sequence_for_rtl_expr (t);
1800 NO_DEFER_POP;
1801 expr_stmts_for_value++;
1802 return t;
1805 /* Restore the previous state at the end of a statement that returns a value.
1806 Returns a tree node representing the statement's value and the
1807 insns to compute the value.
1809 The nodes of that expression have been freed by now, so we cannot use them.
1810 But we don't want to do that anyway; the expression has already been
1811 evaluated and now we just want to use the value. So generate a RTL_EXPR
1812 with the proper type and RTL value.
1814 If the last substatement was not an expression,
1815 return something with type `void'. */
1817 tree
1818 expand_end_stmt_expr (t)
1819 tree t;
1821 if (output_bytecode)
1823 int i;
1824 tree t;
1827 /* At this point, all expressions have been evaluated in order.
1828 However, all expression values have been popped when evaluated,
1829 which means we have to recover the last expression value. This is
1830 the last value removed by means of a `drop' instruction. Instead
1831 of adding code to inhibit dropping the last expression value, it
1832 is here recovered by undoing the `drop'. Since `drop' is
1833 equivalent to `adjustackSI [1]', it can be undone with `adjstackSI
1834 [-1]'. */
1836 bc_adjust_stack (-1);
1838 if (!last_expr_type)
1839 last_expr_type = void_type_node;
1841 t = make_node (RTL_EXPR);
1842 TREE_TYPE (t) = last_expr_type;
1843 RTL_EXPR_RTL (t) = NULL;
1844 RTL_EXPR_SEQUENCE (t) = NULL;
1846 /* Don't consider deleting this expr or containing exprs at tree level. */
1847 TREE_THIS_VOLATILE (t) = 1;
1849 last_expr_type = 0;
1850 return t;
1853 OK_DEFER_POP;
1855 if (last_expr_type == 0)
1857 last_expr_type = void_type_node;
1858 last_expr_value = const0_rtx;
1860 else if (last_expr_value == 0)
1861 /* There are some cases where this can happen, such as when the
1862 statement is void type. */
1863 last_expr_value = const0_rtx;
1864 else if (GET_CODE (last_expr_value) != REG && ! CONSTANT_P (last_expr_value))
1865 /* Remove any possible QUEUED. */
1866 last_expr_value = protect_from_queue (last_expr_value, 0);
1868 emit_queue ();
1870 TREE_TYPE (t) = last_expr_type;
1871 RTL_EXPR_RTL (t) = last_expr_value;
1872 RTL_EXPR_SEQUENCE (t) = get_insns ();
1874 rtl_expr_chain = tree_cons (NULL_TREE, t, rtl_expr_chain);
1876 end_sequence ();
1878 /* Don't consider deleting this expr or containing exprs at tree level. */
1879 TREE_SIDE_EFFECTS (t) = 1;
1880 /* Propagate volatility of the actual RTL expr. */
1881 TREE_THIS_VOLATILE (t) = volatile_refs_p (last_expr_value);
1883 last_expr_type = 0;
1884 expr_stmts_for_value--;
1886 return t;
1889 /* Generate RTL for the start of an if-then. COND is the expression
1890 whose truth should be tested.
1892 If EXITFLAG is nonzero, this conditional is visible to
1893 `exit_something'. */
1895 void
1896 expand_start_cond (cond, exitflag)
1897 tree cond;
1898 int exitflag;
1900 struct nesting *thiscond = ALLOC_NESTING ();
1902 /* Make an entry on cond_stack for the cond we are entering. */
1904 thiscond->next = cond_stack;
1905 thiscond->all = nesting_stack;
1906 thiscond->depth = ++nesting_depth;
1907 thiscond->data.cond.next_label = gen_label_rtx ();
1908 /* Before we encounter an `else', we don't need a separate exit label
1909 unless there are supposed to be exit statements
1910 to exit this conditional. */
1911 thiscond->exit_label = exitflag ? gen_label_rtx () : 0;
1912 thiscond->data.cond.endif_label = thiscond->exit_label;
1913 cond_stack = thiscond;
1914 nesting_stack = thiscond;
1916 if (output_bytecode)
1917 bc_expand_start_cond (cond, exitflag);
1918 else
1919 do_jump (cond, thiscond->data.cond.next_label, NULL_RTX);
1922 /* Generate RTL between then-clause and the elseif-clause
1923 of an if-then-elseif-.... */
1925 void
1926 expand_start_elseif (cond)
1927 tree cond;
1929 if (cond_stack->data.cond.endif_label == 0)
1930 cond_stack->data.cond.endif_label = gen_label_rtx ();
1931 emit_jump (cond_stack->data.cond.endif_label);
1932 emit_label (cond_stack->data.cond.next_label);
1933 cond_stack->data.cond.next_label = gen_label_rtx ();
1934 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
1937 /* Generate RTL between the then-clause and the else-clause
1938 of an if-then-else. */
1940 void
1941 expand_start_else ()
1943 if (cond_stack->data.cond.endif_label == 0)
1944 cond_stack->data.cond.endif_label = gen_label_rtx ();
1946 if (output_bytecode)
1948 bc_expand_start_else ();
1949 return;
1952 emit_jump (cond_stack->data.cond.endif_label);
1953 emit_label (cond_stack->data.cond.next_label);
1954 cond_stack->data.cond.next_label = 0; /* No more _else or _elseif calls. */
1957 /* After calling expand_start_else, turn this "else" into an "else if"
1958 by providing another condition. */
1960 void
1961 expand_elseif (cond)
1962 tree cond;
1964 cond_stack->data.cond.next_label = gen_label_rtx ();
1965 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
1968 /* Generate RTL for the end of an if-then.
1969 Pop the record for it off of cond_stack. */
1971 void
1972 expand_end_cond ()
1974 struct nesting *thiscond = cond_stack;
1976 if (output_bytecode)
1977 bc_expand_end_cond ();
1978 else
1980 do_pending_stack_adjust ();
1981 if (thiscond->data.cond.next_label)
1982 emit_label (thiscond->data.cond.next_label);
1983 if (thiscond->data.cond.endif_label)
1984 emit_label (thiscond->data.cond.endif_label);
1987 POPSTACK (cond_stack);
1988 last_expr_type = 0;
1992 /* Generate code for the start of an if-then. COND is the expression
1993 whose truth is to be tested; if EXITFLAG is nonzero this conditional
1994 is to be visible to exit_something. It is assumed that the caller
1995 has pushed the previous context on the cond stack. */
1997 static void
1998 bc_expand_start_cond (cond, exitflag)
1999 tree cond;
2000 int exitflag;
2002 struct nesting *thiscond = cond_stack;
2004 thiscond->data.case_stmt.nominal_type = cond;
2005 if (! exitflag)
2006 thiscond->exit_label = gen_label_rtx ();
2007 bc_expand_expr (cond);
2008 bc_emit_bytecode (xjumpifnot);
2009 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (thiscond->exit_label));
2011 #ifdef DEBUG_PRINT_CODE
2012 fputc ('\n', stderr);
2013 #endif
2016 /* Generate the label for the end of an if with
2017 no else- clause. */
2019 static void
2020 bc_expand_end_cond ()
2022 struct nesting *thiscond = cond_stack;
2024 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thiscond->exit_label));
2027 /* Generate code for the start of the else- clause of
2028 an if-then-else. */
2030 static void
2031 bc_expand_start_else ()
2033 struct nesting *thiscond = cond_stack;
2035 thiscond->data.cond.endif_label = thiscond->exit_label;
2036 thiscond->exit_label = gen_label_rtx ();
2037 bc_emit_bytecode (jump);
2038 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (thiscond->exit_label));
2040 #ifdef DEBUG_PRINT_CODE
2041 fputc ('\n', stderr);
2042 #endif
2044 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thiscond->data.cond.endif_label));
2047 /* Generate RTL for the start of a loop. EXIT_FLAG is nonzero if this
2048 loop should be exited by `exit_something'. This is a loop for which
2049 `expand_continue' will jump to the top of the loop.
2051 Make an entry on loop_stack to record the labels associated with
2052 this loop. */
2054 struct nesting *
2055 expand_start_loop (exit_flag)
2056 int exit_flag;
2058 register struct nesting *thisloop = ALLOC_NESTING ();
2060 /* Make an entry on loop_stack for the loop we are entering. */
2062 thisloop->next = loop_stack;
2063 thisloop->all = nesting_stack;
2064 thisloop->depth = ++nesting_depth;
2065 thisloop->data.loop.start_label = gen_label_rtx ();
2066 thisloop->data.loop.end_label = gen_label_rtx ();
2067 thisloop->data.loop.alt_end_label = 0;
2068 thisloop->data.loop.continue_label = thisloop->data.loop.start_label;
2069 thisloop->exit_label = exit_flag ? thisloop->data.loop.end_label : 0;
2070 loop_stack = thisloop;
2071 nesting_stack = thisloop;
2073 if (output_bytecode)
2075 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thisloop->data.loop.start_label));
2076 return thisloop;
2079 do_pending_stack_adjust ();
2080 emit_queue ();
2081 emit_note (NULL_PTR, NOTE_INSN_LOOP_BEG);
2082 emit_label (thisloop->data.loop.start_label);
2084 return thisloop;
2087 /* Like expand_start_loop but for a loop where the continuation point
2088 (for expand_continue_loop) will be specified explicitly. */
2090 struct nesting *
2091 expand_start_loop_continue_elsewhere (exit_flag)
2092 int exit_flag;
2094 struct nesting *thisloop = expand_start_loop (exit_flag);
2095 loop_stack->data.loop.continue_label = gen_label_rtx ();
2096 return thisloop;
2099 /* Specify the continuation point for a loop started with
2100 expand_start_loop_continue_elsewhere.
2101 Use this at the point in the code to which a continue statement
2102 should jump. */
2104 void
2105 expand_loop_continue_here ()
2107 if (output_bytecode)
2109 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (loop_stack->data.loop.continue_label));
2110 return;
2112 do_pending_stack_adjust ();
2113 emit_note (NULL_PTR, NOTE_INSN_LOOP_CONT);
2114 emit_label (loop_stack->data.loop.continue_label);
2117 /* End a loop. */
2119 static void
2120 bc_expand_end_loop ()
2122 struct nesting *thisloop = loop_stack;
2124 bc_emit_bytecode (jump);
2125 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (thisloop->data.loop.start_label));
2127 #ifdef DEBUG_PRINT_CODE
2128 fputc ('\n', stderr);
2129 #endif
2131 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thisloop->exit_label));
2132 POPSTACK (loop_stack);
2133 last_expr_type = 0;
2137 /* Finish a loop. Generate a jump back to the top and the loop-exit label.
2138 Pop the block off of loop_stack. */
2140 void
2141 expand_end_loop ()
2143 register rtx insn;
2144 register rtx start_label;
2145 rtx last_test_insn = 0;
2146 int num_insns = 0;
2148 if (output_bytecode)
2150 bc_expand_end_loop ();
2151 return;
2154 insn = get_last_insn ();
2155 start_label = loop_stack->data.loop.start_label;
2157 /* Mark the continue-point at the top of the loop if none elsewhere. */
2158 if (start_label == loop_stack->data.loop.continue_label)
2159 emit_note_before (NOTE_INSN_LOOP_CONT, start_label);
2161 do_pending_stack_adjust ();
2163 /* If optimizing, perhaps reorder the loop. If the loop
2164 starts with a conditional exit, roll that to the end
2165 where it will optimize together with the jump back.
2167 We look for the last conditional branch to the exit that we encounter
2168 before hitting 30 insns or a CALL_INSN. If we see an unconditional
2169 branch to the exit first, use it.
2171 We must also stop at NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes
2172 because moving them is not valid. */
2174 if (optimize
2176 ! (GET_CODE (insn) == JUMP_INSN
2177 && GET_CODE (PATTERN (insn)) == SET
2178 && SET_DEST (PATTERN (insn)) == pc_rtx
2179 && GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE))
2181 /* Scan insns from the top of the loop looking for a qualified
2182 conditional exit. */
2183 for (insn = NEXT_INSN (loop_stack->data.loop.start_label); insn;
2184 insn = NEXT_INSN (insn))
2186 if (GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == CODE_LABEL)
2187 break;
2189 if (GET_CODE (insn) == NOTE
2190 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2191 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
2192 break;
2194 if (GET_CODE (insn) == JUMP_INSN || GET_CODE (insn) == INSN)
2195 num_insns++;
2197 if (last_test_insn && num_insns > 30)
2198 break;
2200 if (GET_CODE (insn) == JUMP_INSN && GET_CODE (PATTERN (insn)) == SET
2201 && SET_DEST (PATTERN (insn)) == pc_rtx
2202 && GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE
2203 && ((GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 1)) == LABEL_REF
2204 && ((XEXP (XEXP (SET_SRC (PATTERN (insn)), 1), 0)
2205 == loop_stack->data.loop.end_label)
2206 || (XEXP (XEXP (SET_SRC (PATTERN (insn)), 1), 0)
2207 == loop_stack->data.loop.alt_end_label)))
2208 || (GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 2)) == LABEL_REF
2209 && ((XEXP (XEXP (SET_SRC (PATTERN (insn)), 2), 0)
2210 == loop_stack->data.loop.end_label)
2211 || (XEXP (XEXP (SET_SRC (PATTERN (insn)), 2), 0)
2212 == loop_stack->data.loop.alt_end_label)))))
2213 last_test_insn = insn;
2215 if (last_test_insn == 0 && GET_CODE (insn) == JUMP_INSN
2216 && GET_CODE (PATTERN (insn)) == SET
2217 && SET_DEST (PATTERN (insn)) == pc_rtx
2218 && GET_CODE (SET_SRC (PATTERN (insn))) == LABEL_REF
2219 && ((XEXP (SET_SRC (PATTERN (insn)), 0)
2220 == loop_stack->data.loop.end_label)
2221 || (XEXP (SET_SRC (PATTERN (insn)), 0)
2222 == loop_stack->data.loop.alt_end_label)))
2223 /* Include BARRIER. */
2224 last_test_insn = NEXT_INSN (insn);
2227 if (last_test_insn != 0 && last_test_insn != get_last_insn ())
2229 /* We found one. Move everything from there up
2230 to the end of the loop, and add a jump into the loop
2231 to jump to there. */
2232 register rtx newstart_label = gen_label_rtx ();
2233 register rtx start_move = start_label;
2235 /* If the start label is preceded by a NOTE_INSN_LOOP_CONT note,
2236 then we want to move this note also. */
2237 if (GET_CODE (PREV_INSN (start_move)) == NOTE
2238 && (NOTE_LINE_NUMBER (PREV_INSN (start_move))
2239 == NOTE_INSN_LOOP_CONT))
2240 start_move = PREV_INSN (start_move);
2242 emit_label_after (newstart_label, PREV_INSN (start_move));
2243 reorder_insns (start_move, last_test_insn, get_last_insn ());
2244 emit_jump_insn_after (gen_jump (start_label),
2245 PREV_INSN (newstart_label));
2246 emit_barrier_after (PREV_INSN (newstart_label));
2247 start_label = newstart_label;
2251 emit_jump (start_label);
2252 emit_note (NULL_PTR, NOTE_INSN_LOOP_END);
2253 emit_label (loop_stack->data.loop.end_label);
2255 POPSTACK (loop_stack);
2257 last_expr_type = 0;
2260 /* Generate a jump to the current loop's continue-point.
2261 This is usually the top of the loop, but may be specified
2262 explicitly elsewhere. If not currently inside a loop,
2263 return 0 and do nothing; caller will print an error message. */
2266 expand_continue_loop (whichloop)
2267 struct nesting *whichloop;
2269 last_expr_type = 0;
2270 if (whichloop == 0)
2271 whichloop = loop_stack;
2272 if (whichloop == 0)
2273 return 0;
2274 expand_goto_internal (NULL_TREE, whichloop->data.loop.continue_label,
2275 NULL_RTX);
2276 return 1;
2279 /* Generate a jump to exit the current loop. If not currently inside a loop,
2280 return 0 and do nothing; caller will print an error message. */
2283 expand_exit_loop (whichloop)
2284 struct nesting *whichloop;
2286 last_expr_type = 0;
2287 if (whichloop == 0)
2288 whichloop = loop_stack;
2289 if (whichloop == 0)
2290 return 0;
2291 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label, NULL_RTX);
2292 return 1;
2295 /* Generate a conditional jump to exit the current loop if COND
2296 evaluates to zero. If not currently inside a loop,
2297 return 0 and do nothing; caller will print an error message. */
2300 expand_exit_loop_if_false (whichloop, cond)
2301 struct nesting *whichloop;
2302 tree cond;
2304 last_expr_type = 0;
2305 if (whichloop == 0)
2306 whichloop = loop_stack;
2307 if (whichloop == 0)
2308 return 0;
2309 if (output_bytecode)
2311 bc_expand_expr (cond);
2312 bc_expand_goto_internal (xjumpifnot,
2313 BYTECODE_BC_LABEL (whichloop->exit_label),
2314 NULL_TREE);
2316 else
2318 /* In order to handle fixups, we actually create a conditional jump
2319 around a unconditional branch to exit the loop. If fixups are
2320 necessary, they go before the unconditional branch. */
2322 rtx label = gen_label_rtx ();
2323 rtx last_insn;
2325 do_jump (cond, NULL_RTX, label);
2326 last_insn = get_last_insn ();
2327 if (GET_CODE (last_insn) == CODE_LABEL)
2328 whichloop->data.loop.alt_end_label = last_insn;
2329 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label,
2330 NULL_RTX);
2331 emit_label (label);
2334 return 1;
2337 /* Return non-zero if we should preserve sub-expressions as separate
2338 pseudos. We never do so if we aren't optimizing. We always do so
2339 if -fexpensive-optimizations.
2341 Otherwise, we only do so if we are in the "early" part of a loop. I.e.,
2342 the loop may still be a small one. */
2345 preserve_subexpressions_p ()
2347 rtx insn;
2349 if (flag_expensive_optimizations)
2350 return 1;
2352 if (optimize == 0 || loop_stack == 0)
2353 return 0;
2355 insn = get_last_insn_anywhere ();
2357 return (insn
2358 && (INSN_UID (insn) - INSN_UID (loop_stack->data.loop.start_label)
2359 < n_non_fixed_regs * 3));
2363 /* Generate a jump to exit the current loop, conditional, binding contour
2364 or case statement. Not all such constructs are visible to this function,
2365 only those started with EXIT_FLAG nonzero. Individual languages use
2366 the EXIT_FLAG parameter to control which kinds of constructs you can
2367 exit this way.
2369 If not currently inside anything that can be exited,
2370 return 0 and do nothing; caller will print an error message. */
2373 expand_exit_something ()
2375 struct nesting *n;
2376 last_expr_type = 0;
2377 for (n = nesting_stack; n; n = n->all)
2378 if (n->exit_label != 0)
2380 expand_goto_internal (NULL_TREE, n->exit_label, NULL_RTX);
2381 return 1;
2384 return 0;
2387 /* Generate RTL to return from the current function, with no value.
2388 (That is, we do not do anything about returning any value.) */
2390 void
2391 expand_null_return ()
2393 struct nesting *block = block_stack;
2394 rtx last_insn = 0;
2396 if (output_bytecode)
2398 bc_emit_instruction (ret);
2399 return;
2402 /* Does any pending block have cleanups? */
2404 while (block && block->data.block.cleanups == 0)
2405 block = block->next;
2407 /* If yes, use a goto to return, since that runs cleanups. */
2409 expand_null_return_1 (last_insn, block != 0);
2412 /* Generate RTL to return from the current function, with value VAL. */
2414 static void
2415 expand_value_return (val)
2416 rtx val;
2418 struct nesting *block = block_stack;
2419 rtx last_insn = get_last_insn ();
2420 rtx return_reg = DECL_RTL (DECL_RESULT (current_function_decl));
2422 /* Copy the value to the return location
2423 unless it's already there. */
2425 if (return_reg != val)
2427 #ifdef PROMOTE_FUNCTION_RETURN
2428 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
2429 int unsignedp = TREE_UNSIGNED (type);
2430 enum machine_mode mode
2431 = promote_mode (type, DECL_MODE (DECL_RESULT (current_function_decl)),
2432 &unsignedp, 1);
2434 if (GET_MODE (val) != VOIDmode && GET_MODE (val) != mode)
2435 convert_move (return_reg, val, unsignedp);
2436 else
2437 #endif
2438 emit_move_insn (return_reg, val);
2440 if (GET_CODE (return_reg) == REG
2441 && REGNO (return_reg) < FIRST_PSEUDO_REGISTER)
2442 emit_insn (gen_rtx (USE, VOIDmode, return_reg));
2444 /* Does any pending block have cleanups? */
2446 while (block && block->data.block.cleanups == 0)
2447 block = block->next;
2449 /* If yes, use a goto to return, since that runs cleanups.
2450 Use LAST_INSN to put cleanups *before* the move insn emitted above. */
2452 expand_null_return_1 (last_insn, block != 0);
2455 /* Output a return with no value. If LAST_INSN is nonzero,
2456 pretend that the return takes place after LAST_INSN.
2457 If USE_GOTO is nonzero then don't use a return instruction;
2458 go to the return label instead. This causes any cleanups
2459 of pending blocks to be executed normally. */
2461 static void
2462 expand_null_return_1 (last_insn, use_goto)
2463 rtx last_insn;
2464 int use_goto;
2466 rtx end_label = cleanup_label ? cleanup_label : return_label;
2468 clear_pending_stack_adjust ();
2469 do_pending_stack_adjust ();
2470 last_expr_type = 0;
2472 /* PCC-struct return always uses an epilogue. */
2473 if (current_function_returns_pcc_struct || use_goto)
2475 if (end_label == 0)
2476 end_label = return_label = gen_label_rtx ();
2477 expand_goto_internal (NULL_TREE, end_label, last_insn);
2478 return;
2481 /* Otherwise output a simple return-insn if one is available,
2482 unless it won't do the job. */
2483 #ifdef HAVE_return
2484 if (HAVE_return && use_goto == 0 && cleanup_label == 0)
2486 emit_jump_insn (gen_return ());
2487 emit_barrier ();
2488 return;
2490 #endif
2492 /* Otherwise jump to the epilogue. */
2493 expand_goto_internal (NULL_TREE, end_label, last_insn);
2496 /* Generate RTL to evaluate the expression RETVAL and return it
2497 from the current function. */
2499 void
2500 expand_return (retval)
2501 tree retval;
2503 /* If there are any cleanups to be performed, then they will
2504 be inserted following LAST_INSN. It is desirable
2505 that the last_insn, for such purposes, should be the
2506 last insn before computing the return value. Otherwise, cleanups
2507 which call functions can clobber the return value. */
2508 /* ??? rms: I think that is erroneous, because in C++ it would
2509 run destructors on variables that might be used in the subsequent
2510 computation of the return value. */
2511 rtx last_insn = 0;
2512 register rtx val = 0;
2513 register rtx op0;
2514 tree retval_rhs;
2515 int cleanups;
2516 struct nesting *block;
2518 /* Bytecode returns are quite simple, just leave the result on the
2519 arithmetic stack. */
2520 if (output_bytecode)
2522 bc_expand_expr (retval);
2523 bc_emit_instruction (ret);
2524 return;
2527 /* If function wants no value, give it none. */
2528 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
2530 expand_expr (retval, NULL_RTX, VOIDmode, 0);
2531 emit_queue ();
2532 expand_null_return ();
2533 return;
2536 /* Are any cleanups needed? E.g. C++ destructors to be run? */
2537 /* This is not sufficient. We also need to watch for cleanups of the
2538 expression we are about to expand. Unfortunately, we cannot know
2539 if it has cleanups until we expand it, and we want to change how we
2540 expand it depending upon if we need cleanups. We can't win. */
2541 #if 0
2542 cleanups = any_pending_cleanups (1);
2543 #else
2544 cleanups = 1;
2545 #endif
2547 if (TREE_CODE (retval) == RESULT_DECL)
2548 retval_rhs = retval;
2549 else if ((TREE_CODE (retval) == MODIFY_EXPR || TREE_CODE (retval) == INIT_EXPR)
2550 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
2551 retval_rhs = TREE_OPERAND (retval, 1);
2552 else if (TREE_TYPE (retval) == void_type_node)
2553 /* Recognize tail-recursive call to void function. */
2554 retval_rhs = retval;
2555 else
2556 retval_rhs = NULL_TREE;
2558 /* Only use `last_insn' if there are cleanups which must be run. */
2559 if (cleanups || cleanup_label != 0)
2560 last_insn = get_last_insn ();
2562 /* Distribute return down conditional expr if either of the sides
2563 may involve tail recursion (see test below). This enhances the number
2564 of tail recursions we see. Don't do this always since it can produce
2565 sub-optimal code in some cases and we distribute assignments into
2566 conditional expressions when it would help. */
2568 if (optimize && retval_rhs != 0
2569 && frame_offset == 0
2570 && TREE_CODE (retval_rhs) == COND_EXPR
2571 && (TREE_CODE (TREE_OPERAND (retval_rhs, 1)) == CALL_EXPR
2572 || TREE_CODE (TREE_OPERAND (retval_rhs, 2)) == CALL_EXPR))
2574 rtx label = gen_label_rtx ();
2575 tree expr;
2577 do_jump (TREE_OPERAND (retval_rhs, 0), label, NULL_RTX);
2578 expr = build (MODIFY_EXPR, TREE_TYPE (current_function_decl),
2579 DECL_RESULT (current_function_decl),
2580 TREE_OPERAND (retval_rhs, 1));
2581 TREE_SIDE_EFFECTS (expr) = 1;
2582 expand_return (expr);
2583 emit_label (label);
2585 expr = build (MODIFY_EXPR, TREE_TYPE (current_function_decl),
2586 DECL_RESULT (current_function_decl),
2587 TREE_OPERAND (retval_rhs, 2));
2588 TREE_SIDE_EFFECTS (expr) = 1;
2589 expand_return (expr);
2590 return;
2593 /* For tail-recursive call to current function,
2594 just jump back to the beginning.
2595 It's unsafe if any auto variable in this function
2596 has its address taken; for simplicity,
2597 require stack frame to be empty. */
2598 if (optimize && retval_rhs != 0
2599 && frame_offset == 0
2600 && TREE_CODE (retval_rhs) == CALL_EXPR
2601 && TREE_CODE (TREE_OPERAND (retval_rhs, 0)) == ADDR_EXPR
2602 && TREE_OPERAND (TREE_OPERAND (retval_rhs, 0), 0) == current_function_decl
2603 /* Finish checking validity, and if valid emit code
2604 to set the argument variables for the new call. */
2605 && tail_recursion_args (TREE_OPERAND (retval_rhs, 1),
2606 DECL_ARGUMENTS (current_function_decl)))
2608 if (tail_recursion_label == 0)
2610 tail_recursion_label = gen_label_rtx ();
2611 emit_label_after (tail_recursion_label,
2612 tail_recursion_reentry);
2614 emit_queue ();
2615 expand_goto_internal (NULL_TREE, tail_recursion_label, last_insn);
2616 emit_barrier ();
2617 return;
2619 #ifdef HAVE_return
2620 /* This optimization is safe if there are local cleanups
2621 because expand_null_return takes care of them.
2622 ??? I think it should also be safe when there is a cleanup label,
2623 because expand_null_return takes care of them, too.
2624 Any reason why not? */
2625 if (HAVE_return && cleanup_label == 0
2626 && ! current_function_returns_pcc_struct
2627 && BRANCH_COST <= 1)
2629 /* If this is return x == y; then generate
2630 if (x == y) return 1; else return 0;
2631 if we can do it with explicit return insns and
2632 branches are cheap. */
2633 if (retval_rhs)
2634 switch (TREE_CODE (retval_rhs))
2636 case EQ_EXPR:
2637 case NE_EXPR:
2638 case GT_EXPR:
2639 case GE_EXPR:
2640 case LT_EXPR:
2641 case LE_EXPR:
2642 case TRUTH_ANDIF_EXPR:
2643 case TRUTH_ORIF_EXPR:
2644 case TRUTH_AND_EXPR:
2645 case TRUTH_OR_EXPR:
2646 case TRUTH_NOT_EXPR:
2647 case TRUTH_XOR_EXPR:
2648 op0 = gen_label_rtx ();
2649 jumpifnot (retval_rhs, op0);
2650 expand_value_return (const1_rtx);
2651 emit_label (op0);
2652 expand_value_return (const0_rtx);
2653 return;
2656 #endif /* HAVE_return */
2658 /* If the result is an aggregate that is being returned in one (or more)
2659 registers, load the registers here. The compiler currently can't handle
2660 copying a BLKmode value into registers. We could put this code in a
2661 more general area (for use by everyone instead of just function
2662 call/return), but until this feature is generally usable it is kept here
2663 (and in expand_call). */
2665 if (retval_rhs != 0
2666 && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
2667 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG)
2669 int i;
2670 int big_endian_correction = 0;
2671 int bytes = int_size_in_bytes (TREE_TYPE (retval_rhs));
2672 int n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
2673 rtx *result_pseudos = (rtx *) alloca (sizeof (rtx) * n_regs);
2674 rtx result_reg = DECL_RTL (DECL_RESULT (current_function_decl));
2675 rtx result_val = expand_expr (retval_rhs, NULL_RTX, VOIDmode, 0);
2676 enum machine_mode tmpmode;
2678 /* Structures smaller than a word are aligned to the least significant
2679 byte (to the right). On a BYTES_BIG_ENDIAN machine, this means we
2680 must skip the empty high order bytes when calculating the bit
2681 offset. */
2682 if (BYTES_BIG_ENDIAN && bytes < UNITS_PER_WORD)
2683 big_endian_correction = (BITS_PER_WORD - (bytes * BITS_PER_UNIT));
2685 for (i = 0; i < n_regs; i++)
2687 rtx reg = gen_reg_rtx (word_mode);
2688 rtx word = operand_subword_force (result_val, i, BLKmode);
2689 int bitsize = MIN (TYPE_ALIGN (TREE_TYPE (retval_rhs)),BITS_PER_WORD);
2690 int bitpos;
2692 result_pseudos[i] = reg;
2694 /* Clobber REG and move each partword into it. Ensure we don't
2695 go past the end of the structure. Note that the loop below
2696 works because we've already verified that padding and
2697 endianness are compatable. */
2698 emit_insn (gen_rtx (CLOBBER, VOIDmode, reg));
2700 for (bitpos = 0;
2701 bitpos < BITS_PER_WORD && bytes > 0;
2702 bitpos += bitsize, bytes -= bitsize / BITS_PER_UNIT)
2704 int xbitpos = bitpos + big_endian_correction;
2706 store_bit_field (reg, bitsize, xbitpos, word_mode,
2707 extract_bit_field (word, bitsize, bitpos, 1,
2708 NULL_RTX, word_mode,
2709 word_mode,
2710 bitsize / BITS_PER_UNIT,
2711 BITS_PER_WORD),
2712 bitsize / BITS_PER_UNIT, BITS_PER_WORD);
2716 /* Now that the value is in pseudos, copy it to the result reg(s). */
2717 emit_queue ();
2718 free_temp_slots ();
2719 for (i = 0; i < n_regs; i++)
2720 emit_move_insn (gen_rtx (REG, word_mode, REGNO (result_reg) + i),
2721 result_pseudos[i]);
2723 /* Find the smallest integer mode large enough to hold the
2724 entire structure and use that mode instead of BLKmode
2725 on the USE insn for the return register. */
2726 bytes = int_size_in_bytes (TREE_TYPE (retval_rhs));
2727 for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2728 tmpmode != MAX_MACHINE_MODE;
2729 tmpmode = GET_MODE_WIDER_MODE (tmpmode))
2731 /* Have we found a large enough mode? */
2732 if (GET_MODE_SIZE (tmpmode) >= bytes)
2733 break;
2736 /* No suitable mode found. */
2737 if (tmpmode == MAX_MACHINE_MODE)
2738 abort ();
2740 PUT_MODE (result_reg, tmpmode);
2742 expand_value_return (result_reg);
2744 else if (cleanups
2745 && retval_rhs != 0
2746 && TREE_TYPE (retval_rhs) != void_type_node
2747 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG)
2749 /* Calculate the return value into a pseudo reg. */
2750 val = expand_expr (retval_rhs, NULL_RTX, VOIDmode, 0);
2751 emit_queue ();
2752 /* All temporaries have now been used. */
2753 free_temp_slots ();
2754 /* Return the calculated value, doing cleanups first. */
2755 expand_value_return (val);
2757 else
2759 /* No cleanups or no hard reg used;
2760 calculate value into hard return reg. */
2761 expand_expr (retval, const0_rtx, VOIDmode, 0);
2762 emit_queue ();
2763 free_temp_slots ();
2764 expand_value_return (DECL_RTL (DECL_RESULT (current_function_decl)));
2768 /* Return 1 if the end of the generated RTX is not a barrier.
2769 This means code already compiled can drop through. */
2772 drop_through_at_end_p ()
2774 rtx insn = get_last_insn ();
2775 while (insn && GET_CODE (insn) == NOTE)
2776 insn = PREV_INSN (insn);
2777 return insn && GET_CODE (insn) != BARRIER;
2780 /* Emit code to alter this function's formal parms for a tail-recursive call.
2781 ACTUALS is a list of actual parameter expressions (chain of TREE_LISTs).
2782 FORMALS is the chain of decls of formals.
2783 Return 1 if this can be done;
2784 otherwise return 0 and do not emit any code. */
2786 static int
2787 tail_recursion_args (actuals, formals)
2788 tree actuals, formals;
2790 register tree a = actuals, f = formals;
2791 register int i;
2792 register rtx *argvec;
2794 /* Check that number and types of actuals are compatible
2795 with the formals. This is not always true in valid C code.
2796 Also check that no formal needs to be addressable
2797 and that all formals are scalars. */
2799 /* Also count the args. */
2801 for (a = actuals, f = formals, i = 0; a && f; a = TREE_CHAIN (a), f = TREE_CHAIN (f), i++)
2803 if (TREE_TYPE (TREE_VALUE (a)) != TREE_TYPE (f))
2804 return 0;
2805 if (GET_CODE (DECL_RTL (f)) != REG || DECL_MODE (f) == BLKmode)
2806 return 0;
2808 if (a != 0 || f != 0)
2809 return 0;
2811 /* Compute all the actuals. */
2813 argvec = (rtx *) alloca (i * sizeof (rtx));
2815 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
2816 argvec[i] = expand_expr (TREE_VALUE (a), NULL_RTX, VOIDmode, 0);
2818 /* Find which actual values refer to current values of previous formals.
2819 Copy each of them now, before any formal is changed. */
2821 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
2823 int copy = 0;
2824 register int j;
2825 for (f = formals, j = 0; j < i; f = TREE_CHAIN (f), j++)
2826 if (reg_mentioned_p (DECL_RTL (f), argvec[i]))
2827 { copy = 1; break; }
2828 if (copy)
2829 argvec[i] = copy_to_reg (argvec[i]);
2832 /* Store the values of the actuals into the formals. */
2834 for (f = formals, a = actuals, i = 0; f;
2835 f = TREE_CHAIN (f), a = TREE_CHAIN (a), i++)
2837 if (GET_MODE (DECL_RTL (f)) == GET_MODE (argvec[i]))
2838 emit_move_insn (DECL_RTL (f), argvec[i]);
2839 else
2840 convert_move (DECL_RTL (f), argvec[i],
2841 TREE_UNSIGNED (TREE_TYPE (TREE_VALUE (a))));
2844 free_temp_slots ();
2845 return 1;
2848 /* Generate the RTL code for entering a binding contour.
2849 The variables are declared one by one, by calls to `expand_decl'.
2851 EXIT_FLAG is nonzero if this construct should be visible to
2852 `exit_something'. */
2854 void
2855 expand_start_bindings (exit_flag)
2856 int exit_flag;
2858 struct nesting *thisblock = ALLOC_NESTING ();
2859 rtx note = output_bytecode ? 0 : emit_note (NULL_PTR, NOTE_INSN_BLOCK_BEG);
2861 /* Make an entry on block_stack for the block we are entering. */
2863 thisblock->next = block_stack;
2864 thisblock->all = nesting_stack;
2865 thisblock->depth = ++nesting_depth;
2866 thisblock->data.block.stack_level = 0;
2867 thisblock->data.block.cleanups = 0;
2868 thisblock->data.block.function_call_count = 0;
2869 #if 0
2870 if (block_stack)
2872 if (block_stack->data.block.cleanups == NULL_TREE
2873 && (block_stack->data.block.outer_cleanups == NULL_TREE
2874 || block_stack->data.block.outer_cleanups == empty_cleanup_list))
2875 thisblock->data.block.outer_cleanups = empty_cleanup_list;
2876 else
2877 thisblock->data.block.outer_cleanups
2878 = tree_cons (NULL_TREE, block_stack->data.block.cleanups,
2879 block_stack->data.block.outer_cleanups);
2881 else
2882 thisblock->data.block.outer_cleanups = 0;
2883 #endif
2884 #if 1
2885 if (block_stack
2886 && !(block_stack->data.block.cleanups == NULL_TREE
2887 && block_stack->data.block.outer_cleanups == NULL_TREE))
2888 thisblock->data.block.outer_cleanups
2889 = tree_cons (NULL_TREE, block_stack->data.block.cleanups,
2890 block_stack->data.block.outer_cleanups);
2891 else
2892 thisblock->data.block.outer_cleanups = 0;
2893 #endif
2894 thisblock->data.block.label_chain = 0;
2895 thisblock->data.block.innermost_stack_block = stack_block_stack;
2896 thisblock->data.block.first_insn = note;
2897 thisblock->data.block.block_start_count = ++block_start_count;
2898 thisblock->exit_label = exit_flag ? gen_label_rtx () : 0;
2899 block_stack = thisblock;
2900 nesting_stack = thisblock;
2902 if (!output_bytecode)
2904 /* Make a new level for allocating stack slots. */
2905 push_temp_slots ();
2909 /* Given a pointer to a BLOCK node, save a pointer to the most recently
2910 generated NOTE_INSN_BLOCK_END in the BLOCK_END_NOTE field of the given
2911 BLOCK node. */
2913 void
2914 remember_end_note (block)
2915 register tree block;
2917 BLOCK_END_NOTE (block) = last_block_end_note;
2918 last_block_end_note = NULL_RTX;
2921 /* Generate RTL code to terminate a binding contour.
2922 VARS is the chain of VAR_DECL nodes
2923 for the variables bound in this contour.
2924 MARK_ENDS is nonzero if we should put a note at the beginning
2925 and end of this binding contour.
2927 DONT_JUMP_IN is nonzero if it is not valid to jump into this contour.
2928 (That is true automatically if the contour has a saved stack level.) */
2930 void
2931 expand_end_bindings (vars, mark_ends, dont_jump_in)
2932 tree vars;
2933 int mark_ends;
2934 int dont_jump_in;
2936 register struct nesting *thisblock = block_stack;
2937 register tree decl;
2939 if (output_bytecode)
2941 bc_expand_end_bindings (vars, mark_ends, dont_jump_in);
2942 return;
2945 if (warn_unused)
2946 for (decl = vars; decl; decl = TREE_CHAIN (decl))
2947 if (! TREE_USED (decl) && TREE_CODE (decl) == VAR_DECL
2948 && ! DECL_IN_SYSTEM_HEADER (decl))
2949 warning_with_decl (decl, "unused variable `%s'");
2951 if (thisblock->exit_label)
2953 do_pending_stack_adjust ();
2954 emit_label (thisblock->exit_label);
2957 /* If necessary, make a handler for nonlocal gotos taking
2958 place in the function calls in this block. */
2959 if (function_call_count != thisblock->data.block.function_call_count
2960 && nonlocal_labels
2961 /* Make handler for outermost block
2962 if there were any nonlocal gotos to this function. */
2963 && (thisblock->next == 0 ? current_function_has_nonlocal_label
2964 /* Make handler for inner block if it has something
2965 special to do when you jump out of it. */
2966 : (thisblock->data.block.cleanups != 0
2967 || thisblock->data.block.stack_level != 0)))
2969 tree link;
2970 rtx afterward = gen_label_rtx ();
2971 rtx handler_label = gen_label_rtx ();
2972 rtx save_receiver = gen_reg_rtx (Pmode);
2973 rtx insns;
2975 /* Don't let jump_optimize delete the handler. */
2976 LABEL_PRESERVE_P (handler_label) = 1;
2978 /* Record the handler address in the stack slot for that purpose,
2979 during this block, saving and restoring the outer value. */
2980 if (thisblock->next != 0)
2982 emit_move_insn (nonlocal_goto_handler_slot, save_receiver);
2984 start_sequence ();
2985 emit_move_insn (save_receiver, nonlocal_goto_handler_slot);
2986 insns = get_insns ();
2987 end_sequence ();
2988 emit_insns_before (insns, thisblock->data.block.first_insn);
2991 start_sequence ();
2992 emit_move_insn (nonlocal_goto_handler_slot,
2993 gen_rtx (LABEL_REF, Pmode, handler_label));
2994 insns = get_insns ();
2995 end_sequence ();
2996 emit_insns_before (insns, thisblock->data.block.first_insn);
2998 /* Jump around the handler; it runs only when specially invoked. */
2999 emit_jump (afterward);
3000 emit_label (handler_label);
3002 #ifdef HAVE_nonlocal_goto
3003 if (! HAVE_nonlocal_goto)
3004 #endif
3005 /* First adjust our frame pointer to its actual value. It was
3006 previously set to the start of the virtual area corresponding to
3007 the stacked variables when we branched here and now needs to be
3008 adjusted to the actual hardware fp value.
3010 Assignments are to virtual registers are converted by
3011 instantiate_virtual_regs into the corresponding assignment
3012 to the underlying register (fp in this case) that makes
3013 the original assignment true.
3014 So the following insn will actually be
3015 decrementing fp by STARTING_FRAME_OFFSET. */
3016 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
3018 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3019 if (fixed_regs[ARG_POINTER_REGNUM])
3021 #ifdef ELIMINABLE_REGS
3022 /* If the argument pointer can be eliminated in favor of the
3023 frame pointer, we don't need to restore it. We assume here
3024 that if such an elimination is present, it can always be used.
3025 This is the case on all known machines; if we don't make this
3026 assumption, we do unnecessary saving on many machines. */
3027 static struct elims {int from, to;} elim_regs[] = ELIMINABLE_REGS;
3028 int i;
3030 for (i = 0; i < sizeof elim_regs / sizeof elim_regs[0]; i++)
3031 if (elim_regs[i].from == ARG_POINTER_REGNUM
3032 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
3033 break;
3035 if (i == sizeof elim_regs / sizeof elim_regs [0])
3036 #endif
3038 /* Now restore our arg pointer from the address at which it
3039 was saved in our stack frame.
3040 If there hasn't be space allocated for it yet, make
3041 some now. */
3042 if (arg_pointer_save_area == 0)
3043 arg_pointer_save_area
3044 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
3045 emit_move_insn (virtual_incoming_args_rtx,
3046 /* We need a pseudo here, or else
3047 instantiate_virtual_regs_1 complains. */
3048 copy_to_reg (arg_pointer_save_area));
3051 #endif
3053 /* The handler expects the desired label address in the static chain
3054 register. It tests the address and does an appropriate jump
3055 to whatever label is desired. */
3056 for (link = nonlocal_labels; link; link = TREE_CHAIN (link))
3057 /* Skip any labels we shouldn't be able to jump to from here. */
3058 if (! DECL_TOO_LATE (TREE_VALUE (link)))
3060 rtx not_this = gen_label_rtx ();
3061 rtx this = gen_label_rtx ();
3062 do_jump_if_equal (static_chain_rtx,
3063 gen_rtx (LABEL_REF, Pmode, DECL_RTL (TREE_VALUE (link))),
3064 this, 0);
3065 emit_jump (not_this);
3066 emit_label (this);
3067 expand_goto (TREE_VALUE (link));
3068 emit_label (not_this);
3070 /* If label is not recognized, abort. */
3071 emit_library_call (gen_rtx (SYMBOL_REF, Pmode, "abort"), 0,
3072 VOIDmode, 0);
3073 emit_label (afterward);
3076 /* Don't allow jumping into a block that has cleanups or a stack level. */
3077 if (dont_jump_in
3078 || thisblock->data.block.stack_level != 0
3079 || thisblock->data.block.cleanups != 0)
3081 struct label_chain *chain;
3083 /* Any labels in this block are no longer valid to go to.
3084 Mark them to cause an error message. */
3085 for (chain = thisblock->data.block.label_chain; chain; chain = chain->next)
3087 DECL_TOO_LATE (chain->label) = 1;
3088 /* If any goto without a fixup came to this label,
3089 that must be an error, because gotos without fixups
3090 come from outside all saved stack-levels and all cleanups. */
3091 if (TREE_ADDRESSABLE (chain->label))
3092 error_with_decl (chain->label,
3093 "label `%s' used before containing binding contour");
3097 /* Restore stack level in effect before the block
3098 (only if variable-size objects allocated). */
3099 /* Perform any cleanups associated with the block. */
3101 if (thisblock->data.block.stack_level != 0
3102 || thisblock->data.block.cleanups != 0)
3104 /* Only clean up here if this point can actually be reached. */
3105 int reachable = GET_CODE (get_last_insn ()) != BARRIER;
3107 /* Don't let cleanups affect ({...}) constructs. */
3108 int old_expr_stmts_for_value = expr_stmts_for_value;
3109 rtx old_last_expr_value = last_expr_value;
3110 tree old_last_expr_type = last_expr_type;
3111 expr_stmts_for_value = 0;
3113 /* Do the cleanups. */
3114 expand_cleanups (thisblock->data.block.cleanups, NULL_TREE, 0, reachable);
3115 if (reachable)
3116 do_pending_stack_adjust ();
3118 expr_stmts_for_value = old_expr_stmts_for_value;
3119 last_expr_value = old_last_expr_value;
3120 last_expr_type = old_last_expr_type;
3122 /* Restore the stack level. */
3124 if (reachable && thisblock->data.block.stack_level != 0)
3126 emit_stack_restore (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3127 thisblock->data.block.stack_level, NULL_RTX);
3128 if (nonlocal_goto_handler_slot != 0)
3129 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level,
3130 NULL_RTX);
3133 /* Any gotos out of this block must also do these things.
3134 Also report any gotos with fixups that came to labels in this
3135 level. */
3136 fixup_gotos (thisblock,
3137 thisblock->data.block.stack_level,
3138 thisblock->data.block.cleanups,
3139 thisblock->data.block.first_insn,
3140 dont_jump_in);
3143 /* Mark the beginning and end of the scope if requested.
3144 We do this now, after running cleanups on the variables
3145 just going out of scope, so they are in scope for their cleanups. */
3147 if (mark_ends)
3148 last_block_end_note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_END);
3149 else
3150 /* Get rid of the beginning-mark if we don't make an end-mark. */
3151 NOTE_LINE_NUMBER (thisblock->data.block.first_insn) = NOTE_INSN_DELETED;
3153 /* If doing stupid register allocation, make sure lives of all
3154 register variables declared here extend thru end of scope. */
3156 if (obey_regdecls)
3157 for (decl = vars; decl; decl = TREE_CHAIN (decl))
3159 rtx rtl = DECL_RTL (decl);
3160 if (TREE_CODE (decl) == VAR_DECL && rtl != 0)
3161 use_variable (rtl);
3164 /* Restore block_stack level for containing block. */
3166 stack_block_stack = thisblock->data.block.innermost_stack_block;
3167 POPSTACK (block_stack);
3169 /* Pop the stack slot nesting and free any slots at this level. */
3170 pop_temp_slots ();
3174 /* End a binding contour.
3175 VARS is the chain of VAR_DECL nodes for the variables bound
3176 in this contour. MARK_ENDS is nonzer if we should put a note
3177 at the beginning and end of this binding contour.
3178 DONT_JUMP_IN is nonzero if it is not valid to jump into this
3179 contour. */
3181 static void
3182 bc_expand_end_bindings (vars, mark_ends, dont_jump_in)
3183 tree vars;
3184 int mark_ends;
3185 int dont_jump_in;
3187 struct nesting *thisbind = nesting_stack;
3188 tree decl;
3190 if (warn_unused)
3191 for (decl = vars; decl; decl = TREE_CHAIN (decl))
3192 if (! TREE_USED (TREE_VALUE (decl)) && TREE_CODE (TREE_VALUE (decl)) == VAR_DECL)
3193 warning_with_decl (decl, "unused variable `%s'");
3195 if (thisbind->exit_label)
3196 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thisbind->exit_label));
3198 /* Pop block/bindings off stack */
3199 POPSTACK (block_stack);
3202 /* Generate RTL for the automatic variable declaration DECL.
3203 (Other kinds of declarations are simply ignored if seen here.)
3204 CLEANUP is an expression to be executed at exit from this binding contour;
3205 for example, in C++, it might call the destructor for this variable.
3207 If CLEANUP contains any SAVE_EXPRs, then you must preevaluate them
3208 either before or after calling `expand_decl' but before compiling
3209 any subsequent expressions. This is because CLEANUP may be expanded
3210 more than once, on different branches of execution.
3211 For the same reason, CLEANUP may not contain a CALL_EXPR
3212 except as its topmost node--else `preexpand_calls' would get confused.
3214 If CLEANUP is nonzero and DECL is zero, we record a cleanup
3215 that is not associated with any particular variable.
3217 There is no special support here for C++ constructors.
3218 They should be handled by the proper code in DECL_INITIAL. */
3220 void
3221 expand_decl (decl)
3222 register tree decl;
3224 struct nesting *thisblock = block_stack;
3225 tree type;
3227 if (output_bytecode)
3229 bc_expand_decl (decl, 0);
3230 return;
3233 type = TREE_TYPE (decl);
3235 /* Only automatic variables need any expansion done.
3236 Static and external variables, and external functions,
3237 will be handled by `assemble_variable' (called from finish_decl).
3238 TYPE_DECL and CONST_DECL require nothing.
3239 PARM_DECLs are handled in `assign_parms'. */
3241 if (TREE_CODE (decl) != VAR_DECL)
3242 return;
3243 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
3244 return;
3246 /* Create the RTL representation for the variable. */
3248 if (type == error_mark_node)
3249 DECL_RTL (decl) = gen_rtx (MEM, BLKmode, const0_rtx);
3250 else if (DECL_SIZE (decl) == 0)
3251 /* Variable with incomplete type. */
3253 if (DECL_INITIAL (decl) == 0)
3254 /* Error message was already done; now avoid a crash. */
3255 DECL_RTL (decl) = assign_stack_temp (DECL_MODE (decl), 0, 1);
3256 else
3257 /* An initializer is going to decide the size of this array.
3258 Until we know the size, represent its address with a reg. */
3259 DECL_RTL (decl) = gen_rtx (MEM, BLKmode, gen_reg_rtx (Pmode));
3260 MEM_IN_STRUCT_P (DECL_RTL (decl)) = AGGREGATE_TYPE_P (type);
3262 else if (DECL_MODE (decl) != BLKmode
3263 /* If -ffloat-store, don't put explicit float vars
3264 into regs. */
3265 && !(flag_float_store
3266 && TREE_CODE (type) == REAL_TYPE)
3267 && ! TREE_THIS_VOLATILE (decl)
3268 && ! TREE_ADDRESSABLE (decl)
3269 && (DECL_REGISTER (decl) || ! obey_regdecls))
3271 /* Automatic variable that can go in a register. */
3272 int unsignedp = TREE_UNSIGNED (type);
3273 enum machine_mode reg_mode
3274 = promote_mode (type, DECL_MODE (decl), &unsignedp, 0);
3276 if (TREE_CODE (type) == COMPLEX_TYPE)
3278 rtx realpart, imagpart;
3279 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (type));
3281 /* For a complex type variable, make a CONCAT of two pseudos
3282 so that the real and imaginary parts
3283 can be allocated separately. */
3284 realpart = gen_reg_rtx (partmode);
3285 REG_USERVAR_P (realpart) = 1;
3286 imagpart = gen_reg_rtx (partmode);
3287 REG_USERVAR_P (imagpart) = 1;
3288 DECL_RTL (decl) = gen_rtx (CONCAT, reg_mode, realpart, imagpart);
3290 else
3292 DECL_RTL (decl) = gen_reg_rtx (reg_mode);
3293 if (TREE_CODE (type) == POINTER_TYPE)
3294 mark_reg_pointer (DECL_RTL (decl));
3295 REG_USERVAR_P (DECL_RTL (decl)) = 1;
3298 else if (TREE_CODE (DECL_SIZE (decl)) == INTEGER_CST)
3300 /* Variable of fixed size that goes on the stack. */
3301 rtx oldaddr = 0;
3302 rtx addr;
3304 /* If we previously made RTL for this decl, it must be an array
3305 whose size was determined by the initializer.
3306 The old address was a register; set that register now
3307 to the proper address. */
3308 if (DECL_RTL (decl) != 0)
3310 if (GET_CODE (DECL_RTL (decl)) != MEM
3311 || GET_CODE (XEXP (DECL_RTL (decl), 0)) != REG)
3312 abort ();
3313 oldaddr = XEXP (DECL_RTL (decl), 0);
3316 DECL_RTL (decl)
3317 = assign_stack_temp (DECL_MODE (decl),
3318 ((TREE_INT_CST_LOW (DECL_SIZE (decl))
3319 + BITS_PER_UNIT - 1)
3320 / BITS_PER_UNIT),
3322 MEM_IN_STRUCT_P (DECL_RTL (decl)) = AGGREGATE_TYPE_P (TREE_TYPE (decl));
3324 /* Set alignment we actually gave this decl. */
3325 DECL_ALIGN (decl) = (DECL_MODE (decl) == BLKmode ? BIGGEST_ALIGNMENT
3326 : GET_MODE_BITSIZE (DECL_MODE (decl)));
3328 if (oldaddr)
3330 addr = force_operand (XEXP (DECL_RTL (decl), 0), oldaddr);
3331 if (addr != oldaddr)
3332 emit_move_insn (oldaddr, addr);
3335 /* If this is a memory ref that contains aggregate components,
3336 mark it as such for cse and loop optimize. */
3337 MEM_IN_STRUCT_P (DECL_RTL (decl)) = AGGREGATE_TYPE_P (TREE_TYPE (decl));
3338 #if 0
3339 /* If this is in memory because of -ffloat-store,
3340 set the volatile bit, to prevent optimizations from
3341 undoing the effects. */
3342 if (flag_float_store && TREE_CODE (type) == REAL_TYPE)
3343 MEM_VOLATILE_P (DECL_RTL (decl)) = 1;
3344 #endif
3346 else
3347 /* Dynamic-size object: must push space on the stack. */
3349 rtx address, size;
3351 /* Record the stack pointer on entry to block, if have
3352 not already done so. */
3353 if (thisblock->data.block.stack_level == 0)
3355 do_pending_stack_adjust ();
3356 emit_stack_save (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3357 &thisblock->data.block.stack_level,
3358 thisblock->data.block.first_insn);
3359 stack_block_stack = thisblock;
3362 /* Compute the variable's size, in bytes. */
3363 size = expand_expr (size_binop (CEIL_DIV_EXPR,
3364 DECL_SIZE (decl),
3365 size_int (BITS_PER_UNIT)),
3366 NULL_RTX, VOIDmode, 0);
3367 free_temp_slots ();
3369 /* Allocate space on the stack for the variable. */
3370 address = allocate_dynamic_stack_space (size, NULL_RTX,
3371 DECL_ALIGN (decl));
3373 /* Reference the variable indirect through that rtx. */
3374 DECL_RTL (decl) = gen_rtx (MEM, DECL_MODE (decl), address);
3376 /* If this is a memory ref that contains aggregate components,
3377 mark it as such for cse and loop optimize. */
3378 MEM_IN_STRUCT_P (DECL_RTL (decl)) = AGGREGATE_TYPE_P (TREE_TYPE (decl));
3380 /* Indicate the alignment we actually gave this variable. */
3381 #ifdef STACK_BOUNDARY
3382 DECL_ALIGN (decl) = STACK_BOUNDARY;
3383 #else
3384 DECL_ALIGN (decl) = BIGGEST_ALIGNMENT;
3385 #endif
3388 if (TREE_THIS_VOLATILE (decl))
3389 MEM_VOLATILE_P (DECL_RTL (decl)) = 1;
3390 #if 0 /* A variable is not necessarily unchanging
3391 just because it is const. RTX_UNCHANGING_P
3392 means no change in the function,
3393 not merely no change in the variable's scope.
3394 It is correct to set RTX_UNCHANGING_P if the variable's scope
3395 is the whole function. There's no convenient way to test that. */
3396 if (TREE_READONLY (decl))
3397 RTX_UNCHANGING_P (DECL_RTL (decl)) = 1;
3398 #endif
3400 /* If doing stupid register allocation, make sure life of any
3401 register variable starts here, at the start of its scope. */
3403 if (obey_regdecls)
3404 use_variable (DECL_RTL (decl));
3408 /* Generate code for the automatic variable declaration DECL. For
3409 most variables this just means we give it a stack offset. The
3410 compiler sometimes emits cleanups without variables and we will
3411 have to deal with those too. */
3413 static void
3414 bc_expand_decl (decl, cleanup)
3415 tree decl;
3416 tree cleanup;
3418 tree type;
3420 if (!decl)
3422 /* A cleanup with no variable. */
3423 if (!cleanup)
3424 abort ();
3426 return;
3429 /* Only auto variables need any work. */
3430 if (TREE_CODE (decl) != VAR_DECL || TREE_STATIC (decl) || DECL_EXTERNAL (decl))
3431 return;
3433 type = TREE_TYPE (decl);
3435 if (type == error_mark_node)
3436 DECL_RTL (decl) = bc_gen_rtx ((char *) 0, 0, (struct bc_label *) 0);
3438 else if (DECL_SIZE (decl) == 0)
3440 /* Variable with incomplete type. The stack offset herein will be
3441 fixed later in expand_decl_init (). */
3442 DECL_RTL (decl) = bc_gen_rtx ((char *) 0, 0, (struct bc_label *) 0);
3444 else if (TREE_CONSTANT (DECL_SIZE (decl)))
3446 DECL_RTL (decl) = bc_allocate_local (TREE_INT_CST_LOW (DECL_SIZE (decl)) / BITS_PER_UNIT,
3447 DECL_ALIGN (decl));
3449 else
3450 DECL_RTL (decl) = bc_allocate_variable_array (DECL_SIZE (decl));
3453 /* Emit code to perform the initialization of a declaration DECL. */
3455 void
3456 expand_decl_init (decl)
3457 tree decl;
3459 int was_used = TREE_USED (decl);
3461 if (output_bytecode)
3463 bc_expand_decl_init (decl);
3464 return;
3467 /* If this is a CONST_DECL, we don't have to generate any code, but
3468 if DECL_INITIAL is a constant, call expand_expr to force TREE_CST_RTL
3469 to be set while in the obstack containing the constant. If we don't
3470 do this, we can lose if we have functions nested three deep and the middle
3471 function makes a CONST_DECL whose DECL_INITIAL is a STRING_CST while
3472 the innermost function is the first to expand that STRING_CST. */
3473 if (TREE_CODE (decl) == CONST_DECL)
3475 if (DECL_INITIAL (decl) && TREE_CONSTANT (DECL_INITIAL (decl)))
3476 expand_expr (DECL_INITIAL (decl), NULL_RTX, VOIDmode,
3477 EXPAND_INITIALIZER);
3478 return;
3481 if (TREE_STATIC (decl))
3482 return;
3484 /* Compute and store the initial value now. */
3486 if (DECL_INITIAL (decl) == error_mark_node)
3488 enum tree_code code = TREE_CODE (TREE_TYPE (decl));
3489 if (code == INTEGER_TYPE || code == REAL_TYPE || code == ENUMERAL_TYPE
3490 || code == POINTER_TYPE)
3491 expand_assignment (decl, convert (TREE_TYPE (decl), integer_zero_node),
3492 0, 0);
3493 emit_queue ();
3495 else if (DECL_INITIAL (decl) && TREE_CODE (DECL_INITIAL (decl)) != TREE_LIST)
3497 emit_line_note (DECL_SOURCE_FILE (decl), DECL_SOURCE_LINE (decl));
3498 expand_assignment (decl, DECL_INITIAL (decl), 0, 0);
3499 emit_queue ();
3502 /* Don't let the initialization count as "using" the variable. */
3503 TREE_USED (decl) = was_used;
3505 /* Free any temporaries we made while initializing the decl. */
3506 free_temp_slots ();
3509 /* Expand initialization for variable-sized types. Allocate array
3510 using newlocalSI and set local variable, which is a pointer to the
3511 storage. */
3513 static void
3514 bc_expand_variable_local_init (decl)
3515 tree decl;
3517 /* Evaluate size expression and coerce to SI */
3518 bc_expand_expr (DECL_SIZE (decl));
3520 /* Type sizes are always (?) of TREE_CODE INTEGER_CST, so
3521 no coercion is necessary (?) */
3523 /* emit_typecode_conversion (preferred_typecode (TYPE_MODE (DECL_SIZE (decl)),
3524 TREE_UNSIGNED (DECL_SIZE (decl))), SIcode); */
3526 /* Emit code to allocate array */
3527 bc_emit_instruction (newlocalSI);
3529 /* Store array pointer in local variable. This is the only instance
3530 where we actually want the address of the pointer to the
3531 variable-size block, rather than the pointer itself. We avoid
3532 using expand_address() since that would cause the pointer to be
3533 pushed rather than its address. Hence the hard-coded reference;
3534 notice also that the variable is always local (no global
3535 variable-size type variables). */
3537 bc_load_localaddr (DECL_RTL (decl));
3538 bc_emit_instruction (storeP);
3542 /* Emit code to initialize a declaration. */
3544 static void
3545 bc_expand_decl_init (decl)
3546 tree decl;
3548 int org_stack_depth;
3550 /* Statical initializers are handled elsewhere */
3552 if (TREE_STATIC (decl))
3553 return;
3555 /* Memory original stack depth */
3556 org_stack_depth = stack_depth;
3558 /* If the type is variable-size, we first create its space (we ASSUME
3559 it CAN'T be static). We do this regardless of whether there's an
3560 initializer assignment or not. */
3562 if (TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
3563 bc_expand_variable_local_init (decl);
3565 /* Expand initializer assignment */
3566 if (DECL_INITIAL (decl) == error_mark_node)
3568 enum tree_code code = TREE_CODE (TREE_TYPE (decl));
3570 if (code == INTEGER_TYPE || code == REAL_TYPE || code == ENUMERAL_TYPE
3571 || code == POINTER_TYPE)
3573 expand_assignment (TREE_TYPE (decl), decl, 0, 0);
3575 else if (DECL_INITIAL (decl))
3576 expand_assignment (TREE_TYPE (decl), decl, 0, 0);
3578 /* Restore stack depth */
3579 if (org_stack_depth > stack_depth)
3580 abort ();
3582 bc_adjust_stack (stack_depth - org_stack_depth);
3586 /* CLEANUP is an expression to be executed at exit from this binding contour;
3587 for example, in C++, it might call the destructor for this variable.
3589 If CLEANUP contains any SAVE_EXPRs, then you must preevaluate them
3590 either before or after calling `expand_decl' but before compiling
3591 any subsequent expressions. This is because CLEANUP may be expanded
3592 more than once, on different branches of execution.
3593 For the same reason, CLEANUP may not contain a CALL_EXPR
3594 except as its topmost node--else `preexpand_calls' would get confused.
3596 If CLEANUP is nonzero and DECL is zero, we record a cleanup
3597 that is not associated with any particular variable. */
3600 expand_decl_cleanup (decl, cleanup)
3601 tree decl, cleanup;
3603 struct nesting *thisblock = block_stack;
3605 /* Error if we are not in any block. */
3606 if (thisblock == 0)
3607 return 0;
3609 /* Record the cleanup if there is one. */
3611 if (cleanup != 0)
3613 thisblock->data.block.cleanups
3614 = temp_tree_cons (decl, cleanup, thisblock->data.block.cleanups);
3615 /* If this block has a cleanup, it belongs in stack_block_stack. */
3616 stack_block_stack = thisblock;
3617 (*interim_eh_hook) (NULL_TREE);
3619 return 1;
3622 /* DECL is an anonymous union. CLEANUP is a cleanup for DECL.
3623 DECL_ELTS is the list of elements that belong to DECL's type.
3624 In each, the TREE_VALUE is a VAR_DECL, and the TREE_PURPOSE a cleanup. */
3626 void
3627 expand_anon_union_decl (decl, cleanup, decl_elts)
3628 tree decl, cleanup, decl_elts;
3630 struct nesting *thisblock = block_stack;
3631 rtx x;
3633 expand_decl (decl, cleanup);
3634 x = DECL_RTL (decl);
3636 while (decl_elts)
3638 tree decl_elt = TREE_VALUE (decl_elts);
3639 tree cleanup_elt = TREE_PURPOSE (decl_elts);
3640 enum machine_mode mode = TYPE_MODE (TREE_TYPE (decl_elt));
3642 /* Propagate the union's alignment to the elements. */
3643 DECL_ALIGN (decl_elt) = DECL_ALIGN (decl);
3645 /* If the element has BLKmode and the union doesn't, the union is
3646 aligned such that the element doesn't need to have BLKmode, so
3647 change the element's mode to the appropriate one for its size. */
3648 if (mode == BLKmode && DECL_MODE (decl) != BLKmode)
3649 DECL_MODE (decl_elt) = mode
3650 = mode_for_size (TREE_INT_CST_LOW (DECL_SIZE (decl_elt)),
3651 MODE_INT, 1);
3653 /* (SUBREG (MEM ...)) at RTL generation time is invalid, so we
3654 instead create a new MEM rtx with the proper mode. */
3655 if (GET_CODE (x) == MEM)
3657 if (mode == GET_MODE (x))
3658 DECL_RTL (decl_elt) = x;
3659 else
3661 DECL_RTL (decl_elt) = gen_rtx (MEM, mode, copy_rtx (XEXP (x, 0)));
3662 MEM_IN_STRUCT_P (DECL_RTL (decl_elt)) = MEM_IN_STRUCT_P (x);
3663 RTX_UNCHANGING_P (DECL_RTL (decl_elt)) = RTX_UNCHANGING_P (x);
3666 else if (GET_CODE (x) == REG)
3668 if (mode == GET_MODE (x))
3669 DECL_RTL (decl_elt) = x;
3670 else
3671 DECL_RTL (decl_elt) = gen_rtx (SUBREG, mode, x, 0);
3673 else
3674 abort ();
3676 /* Record the cleanup if there is one. */
3678 if (cleanup != 0)
3679 thisblock->data.block.cleanups
3680 = temp_tree_cons (decl_elt, cleanup_elt,
3681 thisblock->data.block.cleanups);
3683 decl_elts = TREE_CHAIN (decl_elts);
3687 /* Expand a list of cleanups LIST.
3688 Elements may be expressions or may be nested lists.
3690 If DONT_DO is nonnull, then any list-element
3691 whose TREE_PURPOSE matches DONT_DO is omitted.
3692 This is sometimes used to avoid a cleanup associated with
3693 a value that is being returned out of the scope.
3695 If IN_FIXUP is non-zero, we are generating this cleanup for a fixup
3696 goto and handle protection regions specially in that case.
3698 If REACHABLE, we emit code, otherwise just inform the exception handling
3699 code about this finalization. */
3701 static void
3702 expand_cleanups (list, dont_do, in_fixup, reachable)
3703 tree list;
3704 tree dont_do;
3705 int in_fixup;
3706 int reachable;
3708 tree tail;
3709 for (tail = list; tail; tail = TREE_CHAIN (tail))
3710 if (dont_do == 0 || TREE_PURPOSE (tail) != dont_do)
3712 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3713 expand_cleanups (TREE_VALUE (tail), dont_do, in_fixup, reachable);
3714 else
3716 if (! in_fixup)
3717 (*interim_eh_hook) (TREE_VALUE (tail));
3719 if (reachable)
3721 /* Cleanups may be run multiple times. For example,
3722 when exiting a binding contour, we expand the
3723 cleanups associated with that contour. When a goto
3724 within that binding contour has a target outside that
3725 contour, it will expand all cleanups from its scope to
3726 the target. Though the cleanups are expanded multiple
3727 times, the control paths are non-overlapping so the
3728 cleanups will not be executed twice. */
3729 expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
3730 free_temp_slots ();
3736 /* Move all cleanups from the current block_stack
3737 to the containing block_stack, where they are assumed to
3738 have been created. If anything can cause a temporary to
3739 be created, but not expanded for more than one level of
3740 block_stacks, then this code will have to change. */
3742 void
3743 move_cleanups_up ()
3745 struct nesting *block = block_stack;
3746 struct nesting *outer = block->next;
3748 outer->data.block.cleanups
3749 = chainon (block->data.block.cleanups,
3750 outer->data.block.cleanups);
3751 block->data.block.cleanups = 0;
3754 tree
3755 last_cleanup_this_contour ()
3757 if (block_stack == 0)
3758 return 0;
3760 return block_stack->data.block.cleanups;
3763 /* Return 1 if there are any pending cleanups at this point.
3764 If THIS_CONTOUR is nonzero, check the current contour as well.
3765 Otherwise, look only at the contours that enclose this one. */
3768 any_pending_cleanups (this_contour)
3769 int this_contour;
3771 struct nesting *block;
3773 if (block_stack == 0)
3774 return 0;
3776 if (this_contour && block_stack->data.block.cleanups != NULL)
3777 return 1;
3778 if (block_stack->data.block.cleanups == 0
3779 && (block_stack->data.block.outer_cleanups == 0
3780 #if 0
3781 || block_stack->data.block.outer_cleanups == empty_cleanup_list
3782 #endif
3784 return 0;
3786 for (block = block_stack->next; block; block = block->next)
3787 if (block->data.block.cleanups != 0)
3788 return 1;
3790 return 0;
3793 /* Enter a case (Pascal) or switch (C) statement.
3794 Push a block onto case_stack and nesting_stack
3795 to accumulate the case-labels that are seen
3796 and to record the labels generated for the statement.
3798 EXIT_FLAG is nonzero if `exit_something' should exit this case stmt.
3799 Otherwise, this construct is transparent for `exit_something'.
3801 EXPR is the index-expression to be dispatched on.
3802 TYPE is its nominal type. We could simply convert EXPR to this type,
3803 but instead we take short cuts. */
3805 void
3806 expand_start_case (exit_flag, expr, type, printname)
3807 int exit_flag;
3808 tree expr;
3809 tree type;
3810 char *printname;
3812 register struct nesting *thiscase = ALLOC_NESTING ();
3814 /* Make an entry on case_stack for the case we are entering. */
3816 thiscase->next = case_stack;
3817 thiscase->all = nesting_stack;
3818 thiscase->depth = ++nesting_depth;
3819 thiscase->exit_label = exit_flag ? gen_label_rtx () : 0;
3820 thiscase->data.case_stmt.case_list = 0;
3821 thiscase->data.case_stmt.index_expr = expr;
3822 thiscase->data.case_stmt.nominal_type = type;
3823 thiscase->data.case_stmt.default_label = 0;
3824 thiscase->data.case_stmt.num_ranges = 0;
3825 thiscase->data.case_stmt.printname = printname;
3826 thiscase->data.case_stmt.seenlabel = 0;
3827 case_stack = thiscase;
3828 nesting_stack = thiscase;
3830 if (output_bytecode)
3832 bc_expand_start_case (thiscase, expr, type, printname);
3833 return;
3836 do_pending_stack_adjust ();
3838 /* Make sure case_stmt.start points to something that won't
3839 need any transformation before expand_end_case. */
3840 if (GET_CODE (get_last_insn ()) != NOTE)
3841 emit_note (NULL_PTR, NOTE_INSN_DELETED);
3843 thiscase->data.case_stmt.start = get_last_insn ();
3847 /* Enter a case statement. It is assumed that the caller has pushed
3848 the current context onto the case stack. */
3850 static void
3851 bc_expand_start_case (thiscase, expr, type, printname)
3852 struct nesting *thiscase;
3853 tree expr;
3854 tree type;
3855 char *printname;
3857 bc_expand_expr (expr);
3858 bc_expand_conversion (TREE_TYPE (expr), type);
3860 /* For cases, the skip is a place we jump to that's emitted after
3861 the size of the jump table is known. */
3863 thiscase->data.case_stmt.skip_label = gen_label_rtx ();
3864 bc_emit_bytecode (jump);
3865 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (thiscase->data.case_stmt.skip_label));
3867 #ifdef DEBUG_PRINT_CODE
3868 fputc ('\n', stderr);
3869 #endif
3873 /* Start a "dummy case statement" within which case labels are invalid
3874 and are not connected to any larger real case statement.
3875 This can be used if you don't want to let a case statement jump
3876 into the middle of certain kinds of constructs. */
3878 void
3879 expand_start_case_dummy ()
3881 register struct nesting *thiscase = ALLOC_NESTING ();
3883 /* Make an entry on case_stack for the dummy. */
3885 thiscase->next = case_stack;
3886 thiscase->all = nesting_stack;
3887 thiscase->depth = ++nesting_depth;
3888 thiscase->exit_label = 0;
3889 thiscase->data.case_stmt.case_list = 0;
3890 thiscase->data.case_stmt.start = 0;
3891 thiscase->data.case_stmt.nominal_type = 0;
3892 thiscase->data.case_stmt.default_label = 0;
3893 thiscase->data.case_stmt.num_ranges = 0;
3894 case_stack = thiscase;
3895 nesting_stack = thiscase;
3898 /* End a dummy case statement. */
3900 void
3901 expand_end_case_dummy ()
3903 POPSTACK (case_stack);
3906 /* Return the data type of the index-expression
3907 of the innermost case statement, or null if none. */
3909 tree
3910 case_index_expr_type ()
3912 if (case_stack)
3913 return TREE_TYPE (case_stack->data.case_stmt.index_expr);
3914 return 0;
3917 /* Accumulate one case or default label inside a case or switch statement.
3918 VALUE is the value of the case (a null pointer, for a default label).
3919 The function CONVERTER, when applied to arguments T and V,
3920 converts the value V to the type T.
3922 If not currently inside a case or switch statement, return 1 and do
3923 nothing. The caller will print a language-specific error message.
3924 If VALUE is a duplicate or overlaps, return 2 and do nothing
3925 except store the (first) duplicate node in *DUPLICATE.
3926 If VALUE is out of range, return 3 and do nothing.
3927 If we are jumping into the scope of a cleaup or var-sized array, return 5.
3928 Return 0 on success.
3930 Extended to handle range statements. */
3933 pushcase (value, converter, label, duplicate)
3934 register tree value;
3935 tree (*converter) PROTO((tree, tree));
3936 register tree label;
3937 tree *duplicate;
3939 register struct case_node **l;
3940 register struct case_node *n;
3941 tree index_type;
3942 tree nominal_type;
3944 if (output_bytecode)
3945 return bc_pushcase (value, label);
3947 /* Fail if not inside a real case statement. */
3948 if (! (case_stack && case_stack->data.case_stmt.start))
3949 return 1;
3951 if (stack_block_stack
3952 && stack_block_stack->depth > case_stack->depth)
3953 return 5;
3955 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
3956 nominal_type = case_stack->data.case_stmt.nominal_type;
3958 /* If the index is erroneous, avoid more problems: pretend to succeed. */
3959 if (index_type == error_mark_node)
3960 return 0;
3962 /* Convert VALUE to the type in which the comparisons are nominally done. */
3963 if (value != 0)
3964 value = (*converter) (nominal_type, value);
3966 /* If this is the first label, warn if any insns have been emitted. */
3967 if (case_stack->data.case_stmt.seenlabel == 0)
3969 rtx insn;
3970 for (insn = case_stack->data.case_stmt.start;
3971 insn;
3972 insn = NEXT_INSN (insn))
3974 if (GET_CODE (insn) == CODE_LABEL)
3975 break;
3976 if (GET_CODE (insn) != NOTE
3977 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn)) != USE))
3979 warning ("unreachable code at beginning of %s",
3980 case_stack->data.case_stmt.printname);
3981 break;
3985 case_stack->data.case_stmt.seenlabel = 1;
3987 /* Fail if this value is out of range for the actual type of the index
3988 (which may be narrower than NOMINAL_TYPE). */
3989 if (value != 0 && ! int_fits_type_p (value, index_type))
3990 return 3;
3992 /* Fail if this is a duplicate or overlaps another entry. */
3993 if (value == 0)
3995 if (case_stack->data.case_stmt.default_label != 0)
3997 *duplicate = case_stack->data.case_stmt.default_label;
3998 return 2;
4000 case_stack->data.case_stmt.default_label = label;
4002 else
4004 /* Find the elt in the chain before which to insert the new value,
4005 to keep the chain sorted in increasing order.
4006 But report an error if this element is a duplicate. */
4007 for (l = &case_stack->data.case_stmt.case_list;
4008 /* Keep going past elements distinctly less than VALUE. */
4009 *l != 0 && tree_int_cst_lt ((*l)->high, value);
4010 l = &(*l)->right)
4012 if (*l)
4014 /* Element we will insert before must be distinctly greater;
4015 overlap means error. */
4016 if (! tree_int_cst_lt (value, (*l)->low))
4018 *duplicate = (*l)->code_label;
4019 return 2;
4023 /* Add this label to the chain, and succeed.
4024 Copy VALUE so it is on temporary rather than momentary
4025 obstack and will thus survive till the end of the case statement. */
4026 n = (struct case_node *) oballoc (sizeof (struct case_node));
4027 n->left = 0;
4028 n->right = *l;
4029 n->high = n->low = copy_node (value);
4030 n->code_label = label;
4031 *l = n;
4034 expand_label (label);
4035 return 0;
4038 /* Like pushcase but this case applies to all values
4039 between VALUE1 and VALUE2 (inclusive).
4040 The return value is the same as that of pushcase
4041 but there is one additional error code:
4042 4 means the specified range was empty. */
4045 pushcase_range (value1, value2, converter, label, duplicate)
4046 register tree value1, value2;
4047 tree (*converter) PROTO((tree, tree));
4048 register tree label;
4049 tree *duplicate;
4051 register struct case_node **l;
4052 register struct case_node *n;
4053 tree index_type;
4054 tree nominal_type;
4056 /* Fail if not inside a real case statement. */
4057 if (! (case_stack && case_stack->data.case_stmt.start))
4058 return 1;
4060 if (stack_block_stack
4061 && stack_block_stack->depth > case_stack->depth)
4062 return 5;
4064 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4065 nominal_type = case_stack->data.case_stmt.nominal_type;
4067 /* If the index is erroneous, avoid more problems: pretend to succeed. */
4068 if (index_type == error_mark_node)
4069 return 0;
4071 /* If this is the first label, warn if any insns have been emitted. */
4072 if (case_stack->data.case_stmt.seenlabel == 0)
4074 rtx insn;
4075 for (insn = case_stack->data.case_stmt.start;
4076 insn;
4077 insn = NEXT_INSN (insn))
4079 if (GET_CODE (insn) == CODE_LABEL)
4080 break;
4081 if (GET_CODE (insn) != NOTE
4082 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn)) != USE))
4084 warning ("unreachable code at beginning of %s",
4085 case_stack->data.case_stmt.printname);
4086 break;
4090 case_stack->data.case_stmt.seenlabel = 1;
4092 /* Convert VALUEs to type in which the comparisons are nominally done. */
4093 if (value1 == 0) /* Negative infinity. */
4094 value1 = TYPE_MIN_VALUE(index_type);
4095 value1 = (*converter) (nominal_type, value1);
4097 if (value2 == 0) /* Positive infinity. */
4098 value2 = TYPE_MAX_VALUE(index_type);
4099 value2 = (*converter) (nominal_type, value2);
4101 /* Fail if these values are out of range. */
4102 if (! int_fits_type_p (value1, index_type))
4103 return 3;
4105 if (! int_fits_type_p (value2, index_type))
4106 return 3;
4108 /* Fail if the range is empty. */
4109 if (tree_int_cst_lt (value2, value1))
4110 return 4;
4112 /* If the bounds are equal, turn this into the one-value case. */
4113 if (tree_int_cst_equal (value1, value2))
4114 return pushcase (value1, converter, label, duplicate);
4116 /* Find the elt in the chain before which to insert the new value,
4117 to keep the chain sorted in increasing order.
4118 But report an error if this element is a duplicate. */
4119 for (l = &case_stack->data.case_stmt.case_list;
4120 /* Keep going past elements distinctly less than this range. */
4121 *l != 0 && tree_int_cst_lt ((*l)->high, value1);
4122 l = &(*l)->right)
4124 if (*l)
4126 /* Element we will insert before must be distinctly greater;
4127 overlap means error. */
4128 if (! tree_int_cst_lt (value2, (*l)->low))
4130 *duplicate = (*l)->code_label;
4131 return 2;
4135 /* Add this label to the chain, and succeed.
4136 Copy VALUE1, VALUE2 so they are on temporary rather than momentary
4137 obstack and will thus survive till the end of the case statement. */
4139 n = (struct case_node *) oballoc (sizeof (struct case_node));
4140 n->left = 0;
4141 n->right = *l;
4142 n->low = copy_node (value1);
4143 n->high = copy_node (value2);
4144 n->code_label = label;
4145 *l = n;
4147 expand_label (label);
4149 case_stack->data.case_stmt.num_ranges++;
4151 return 0;
4155 /* Accumulate one case or default label; VALUE is the value of the
4156 case, or nil for a default label. If not currently inside a case,
4157 return 1 and do nothing. If VALUE is a duplicate or overlaps, return
4158 2 and do nothing. If VALUE is out of range, return 3 and do nothing.
4159 Return 0 on success. This function is a leftover from the earlier
4160 bytecode compiler, which was based on gcc 1.37. It should be
4161 merged into pushcase. */
4163 static int
4164 bc_pushcase (value, label)
4165 tree value;
4166 tree label;
4168 struct nesting *thiscase = case_stack;
4169 struct case_node *case_label, *new_label;
4171 if (! thiscase)
4172 return 1;
4174 /* Fail if duplicate, overlap, or out of type range. */
4175 if (value)
4177 value = convert (thiscase->data.case_stmt.nominal_type, value);
4178 if (! int_fits_type_p (value, thiscase->data.case_stmt.nominal_type))
4179 return 3;
4181 for (case_label = thiscase->data.case_stmt.case_list;
4182 case_label->left; case_label = case_label->left)
4183 if (! tree_int_cst_lt (case_label->left->high, value))
4184 break;
4186 if (case_label != thiscase->data.case_stmt.case_list
4187 && ! tree_int_cst_lt (case_label->high, value)
4188 || case_label->left && ! tree_int_cst_lt (value, case_label->left->low))
4189 return 2;
4191 new_label = (struct case_node *) oballoc (sizeof (struct case_node));
4192 new_label->low = new_label->high = copy_node (value);
4193 new_label->code_label = label;
4194 new_label->left = case_label->left;
4196 case_label->left = new_label;
4197 thiscase->data.case_stmt.num_ranges++;
4199 else
4201 if (thiscase->data.case_stmt.default_label)
4202 return 2;
4203 thiscase->data.case_stmt.default_label = label;
4206 expand_label (label);
4207 return 0;
4210 /* Returns the number of possible values of TYPE.
4211 Returns -1 if the number is unknown or variable.
4212 Returns -2 if the number does not fit in a HOST_WIDE_INT.
4213 Sets *SPARENESS to 2 if TYPE is an ENUMERAL_TYPE whose values
4214 do not increase monotonically (there may be duplicates);
4215 to 1 if the values increase monotonically, but not always by 1;
4216 otherwise sets it to 0. */
4218 HOST_WIDE_INT
4219 all_cases_count (type, spareness)
4220 tree type;
4221 int *spareness;
4223 HOST_WIDE_INT count, count_high = 0;
4224 *spareness = 0;
4226 switch (TREE_CODE (type))
4228 tree t;
4229 case BOOLEAN_TYPE:
4230 count = 2;
4231 break;
4232 case CHAR_TYPE:
4233 count = 1 << BITS_PER_UNIT;
4234 break;
4235 default:
4236 case INTEGER_TYPE:
4237 if (TREE_CODE (TYPE_MIN_VALUE (type)) != INTEGER_CST
4238 || TREE_CODE (TYPE_MIN_VALUE (type)) != INTEGER_CST)
4239 return -1;
4240 else
4242 /* count
4243 = TREE_INT_CST_LOW (TYPE_MAX_VALUE (type))
4244 - TREE_INT_CST_LOW (TYPE_MIN_VALUE (type)) + 1
4245 but with overflow checking. */
4246 tree mint = TYPE_MIN_VALUE (type);
4247 tree maxt = TYPE_MAX_VALUE (type);
4248 HOST_WIDE_INT lo, hi;
4249 neg_double(TREE_INT_CST_LOW (mint), TREE_INT_CST_HIGH (mint),
4250 &lo, &hi);
4251 add_double(TREE_INT_CST_LOW (maxt), TREE_INT_CST_HIGH (maxt),
4252 lo, hi, &lo, &hi);
4253 add_double (lo, hi, 1, 0, &lo, &hi);
4254 if (hi != 0 || lo < 0)
4255 return -2;
4256 count = lo;
4258 break;
4259 case ENUMERAL_TYPE:
4260 count = 0;
4261 for (t = TYPE_VALUES (type); t != NULL_TREE; t = TREE_CHAIN (t))
4263 if (TREE_CODE (TYPE_MIN_VALUE (type)) != INTEGER_CST
4264 || TREE_CODE (TREE_VALUE (t)) != INTEGER_CST
4265 || TREE_INT_CST_LOW (TYPE_MIN_VALUE (type)) + count
4266 != TREE_INT_CST_LOW (TREE_VALUE (t)))
4267 *spareness = 1;
4268 count++;
4270 if (*spareness == 1)
4272 tree prev = TREE_VALUE (TYPE_VALUES (type));
4273 for (t = TYPE_VALUES (type); t = TREE_CHAIN (t), t != NULL_TREE; )
4275 if (! tree_int_cst_lt (prev, TREE_VALUE (t)))
4277 *spareness = 2;
4278 break;
4280 prev = TREE_VALUE (t);
4285 return count;
4289 #define BITARRAY_TEST(ARRAY, INDEX) \
4290 ((ARRAY)[(unsigned)(INDEX) / HOST_BITS_PER_CHAR]\
4291 & (1 << ((unsigned)(INDEX) % HOST_BITS_PER_CHAR)))
4292 #define BITARRAY_SET(ARRAY, INDEX) \
4293 ((ARRAY)[(unsigned)(INDEX) / HOST_BITS_PER_CHAR]\
4294 |= 1 << ((unsigned)(INDEX) % HOST_BITS_PER_CHAR))
4296 /* Set the elements of the bitstring CASES_SEEN (which has length COUNT),
4297 with the case values we have seen, assuming the case expression
4298 has the given TYPE.
4299 SPARSENESS is as determined by all_cases_count.
4301 The time needed is propotional to COUNT, unless
4302 SPARSENESS is 2, in which case quadratic time is needed. */
4304 void
4305 mark_seen_cases (type, cases_seen, count, sparseness)
4306 tree type;
4307 unsigned char *cases_seen;
4308 long count;
4309 int sparseness;
4311 long i;
4313 tree next_node_to_try = NULL_TREE;
4314 long next_node_offset = 0;
4316 register struct case_node *n;
4317 tree val = make_node (INTEGER_CST);
4318 TREE_TYPE (val) = type;
4319 for (n = case_stack->data.case_stmt.case_list; n;
4320 n = n->right)
4322 TREE_INT_CST_LOW (val) = TREE_INT_CST_LOW (n->low);
4323 TREE_INT_CST_HIGH (val) = TREE_INT_CST_HIGH (n->low);
4324 while ( ! tree_int_cst_lt (n->high, val))
4326 /* Calculate (into xlo) the "offset" of the integer (val).
4327 The element with lowest value has offset 0, the next smallest
4328 element has offset 1, etc. */
4330 HOST_WIDE_INT xlo, xhi;
4331 tree t;
4332 if (sparseness == 2)
4334 /* This less efficient loop is only needed to handle
4335 duplicate case values (multiple enum constants
4336 with the same value). */
4337 for (t = TYPE_VALUES (type), xlo = 0; t != NULL_TREE;
4338 t = TREE_CHAIN (t), xlo++)
4340 if (tree_int_cst_equal (val, TREE_VALUE (t)))
4341 BITARRAY_SET (cases_seen, xlo);
4344 else
4346 if (sparseness && TYPE_VALUES (type) != NULL_TREE)
4348 /* The TYPE_VALUES will be in increasing order, so
4349 starting searching where we last ended. */
4350 t = next_node_to_try;
4351 xlo = next_node_offset;
4352 xhi = 0;
4353 for (;;)
4355 if (t == NULL_TREE)
4357 t = TYPE_VALUES (type);
4358 xlo = 0;
4360 if (tree_int_cst_equal (val, TREE_VALUE (t)))
4362 next_node_to_try = TREE_CHAIN (t);
4363 next_node_offset = xlo + 1;
4364 break;
4366 xlo++;
4367 t = TREE_CHAIN (t);
4368 if (t == next_node_to_try)
4369 break;
4372 else
4374 t = TYPE_MIN_VALUE (type);
4375 if (t)
4376 neg_double (TREE_INT_CST_LOW (t), TREE_INT_CST_HIGH (t),
4377 &xlo, &xhi);
4378 else
4379 xlo = xhi = 0;
4380 add_double (xlo, xhi,
4381 TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
4382 &xlo, &xhi);
4385 if (xhi == 0 && xlo >= 0 && xlo < count)
4386 BITARRAY_SET (cases_seen, xlo);
4388 add_double (TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
4389 1, 0,
4390 &TREE_INT_CST_LOW (val), &TREE_INT_CST_HIGH (val));
4395 /* Called when the index of a switch statement is an enumerated type
4396 and there is no default label.
4398 Checks that all enumeration literals are covered by the case
4399 expressions of a switch. Also, warn if there are any extra
4400 switch cases that are *not* elements of the enumerated type.
4402 If all enumeration literals were covered by the case expressions,
4403 turn one of the expressions into the default expression since it should
4404 not be possible to fall through such a switch. */
4406 void
4407 check_for_full_enumeration_handling (type)
4408 tree type;
4410 register struct case_node *n;
4411 register struct case_node **l;
4412 register tree chain;
4413 int all_values = 1;
4415 /* True iff the selector type is a numbered set mode. */
4416 int sparseness = 0;
4418 /* The number of possible selector values. */
4419 HOST_WIDE_INT size;
4421 /* For each possible selector value. a one iff it has been matched
4422 by a case value alternative. */
4423 unsigned char *cases_seen;
4425 /* The allocated size of cases_seen, in chars. */
4426 long bytes_needed;
4427 tree t;
4429 if (output_bytecode)
4431 bc_check_for_full_enumeration_handling (type);
4432 return;
4435 if (! warn_switch)
4436 return;
4438 size = all_cases_count (type, &sparseness);
4439 bytes_needed = (size + HOST_BITS_PER_CHAR) / HOST_BITS_PER_CHAR;
4441 if (size > 0 && size < 600000
4442 /* We deliberately use malloc here - not xmalloc. */
4443 && (cases_seen = (unsigned char *) malloc (bytes_needed)) != NULL)
4445 long i;
4446 tree v = TYPE_VALUES (type);
4447 bzero (cases_seen, bytes_needed);
4449 /* The time complexity of this code is normally O(N), where
4450 N being the number of members in the enumerated type.
4451 However, if type is a ENUMERAL_TYPE whose values do not
4452 increase monotonically, quadratic time may be needed. */
4454 mark_seen_cases (type, cases_seen, size, sparseness);
4456 for (i = 0; v != NULL_TREE && i < size; i++, v = TREE_CHAIN (v))
4458 if (BITARRAY_TEST(cases_seen, i) == 0)
4459 warning ("enumeration value `%s' not handled in switch",
4460 IDENTIFIER_POINTER (TREE_PURPOSE (v)));
4463 free (cases_seen);
4466 /* Now we go the other way around; we warn if there are case
4467 expressions that don't correspond to enumerators. This can
4468 occur since C and C++ don't enforce type-checking of
4469 assignments to enumeration variables. */
4471 if (warn_switch)
4472 for (n = case_stack->data.case_stmt.case_list; n; n = n->right)
4474 for (chain = TYPE_VALUES (type);
4475 chain && !tree_int_cst_equal (n->low, TREE_VALUE (chain));
4476 chain = TREE_CHAIN (chain))
4479 if (!chain)
4481 if (TYPE_NAME (type) == 0)
4482 warning ("case value `%d' not in enumerated type",
4483 TREE_INT_CST_LOW (n->low));
4484 else
4485 warning ("case value `%d' not in enumerated type `%s'",
4486 TREE_INT_CST_LOW (n->low),
4487 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
4488 == IDENTIFIER_NODE)
4489 ? TYPE_NAME (type)
4490 : DECL_NAME (TYPE_NAME (type))));
4492 if (!tree_int_cst_equal (n->low, n->high))
4494 for (chain = TYPE_VALUES (type);
4495 chain && !tree_int_cst_equal (n->high, TREE_VALUE (chain));
4496 chain = TREE_CHAIN (chain))
4499 if (!chain)
4501 if (TYPE_NAME (type) == 0)
4502 warning ("case value `%d' not in enumerated type",
4503 TREE_INT_CST_LOW (n->high));
4504 else
4505 warning ("case value `%d' not in enumerated type `%s'",
4506 TREE_INT_CST_LOW (n->high),
4507 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
4508 == IDENTIFIER_NODE)
4509 ? TYPE_NAME (type)
4510 : DECL_NAME (TYPE_NAME (type))));
4515 #if 0
4516 /* ??? This optimization is disabled because it causes valid programs to
4517 fail. ANSI C does not guarantee that an expression with enum type
4518 will have a value that is the same as one of the enumation literals. */
4520 /* If all values were found as case labels, make one of them the default
4521 label. Thus, this switch will never fall through. We arbitrarily pick
4522 the last one to make the default since this is likely the most
4523 efficient choice. */
4525 if (all_values)
4527 for (l = &case_stack->data.case_stmt.case_list;
4528 (*l)->right != 0;
4529 l = &(*l)->right)
4532 case_stack->data.case_stmt.default_label = (*l)->code_label;
4533 *l = 0;
4535 #endif /* 0 */
4539 /* Check that all enumeration literals are covered by the case
4540 expressions of a switch. Also warn if there are any cases
4541 that are not elements of the enumerated type. */
4543 static void
4544 bc_check_for_full_enumeration_handling (type)
4545 tree type;
4547 struct nesting *thiscase = case_stack;
4548 struct case_node *c;
4549 tree e;
4551 /* Check for enums not handled. */
4552 for (e = TYPE_VALUES (type); e; e = TREE_CHAIN (e))
4554 for (c = thiscase->data.case_stmt.case_list->left;
4555 c && tree_int_cst_lt (c->high, TREE_VALUE (e));
4556 c = c->left)
4558 if (! (c && tree_int_cst_equal (c->low, TREE_VALUE (e))))
4559 warning ("enumerated value `%s' not handled in switch",
4560 IDENTIFIER_POINTER (TREE_PURPOSE (e)));
4563 /* Check for cases not in the enumeration. */
4564 for (c = thiscase->data.case_stmt.case_list->left; c; c = c->left)
4566 for (e = TYPE_VALUES (type);
4567 e && !tree_int_cst_equal (c->low, TREE_VALUE (e));
4568 e = TREE_CHAIN (e))
4570 if (! e)
4571 warning ("case value `%d' not in enumerated type `%s'",
4572 TREE_INT_CST_LOW (c->low),
4573 IDENTIFIER_POINTER (TREE_CODE (TYPE_NAME (type)) == IDENTIFIER_NODE
4574 ? TYPE_NAME (type)
4575 : DECL_NAME (TYPE_NAME (type))));
4579 /* Terminate a case (Pascal) or switch (C) statement
4580 in which ORIG_INDEX is the expression to be tested.
4581 Generate the code to test it and jump to the right place. */
4583 void
4584 expand_end_case (orig_index)
4585 tree orig_index;
4587 tree minval, maxval, range, orig_minval;
4588 rtx default_label = 0;
4589 register struct case_node *n;
4590 int count;
4591 rtx index;
4592 rtx table_label;
4593 int ncases;
4594 rtx *labelvec;
4595 register int i;
4596 rtx before_case;
4597 register struct nesting *thiscase = case_stack;
4598 tree index_expr, index_type;
4599 int unsignedp;
4601 if (output_bytecode)
4603 bc_expand_end_case (orig_index);
4604 return;
4607 table_label = gen_label_rtx ();
4608 index_expr = thiscase->data.case_stmt.index_expr;
4609 index_type = TREE_TYPE (index_expr);
4610 unsignedp = TREE_UNSIGNED (index_type);
4612 do_pending_stack_adjust ();
4614 /* An ERROR_MARK occurs for various reasons including invalid data type. */
4615 if (index_type != error_mark_node)
4617 /* If switch expression was an enumerated type, check that all
4618 enumeration literals are covered by the cases.
4619 No sense trying this if there's a default case, however. */
4621 if (!thiscase->data.case_stmt.default_label
4622 && TREE_CODE (TREE_TYPE (orig_index)) == ENUMERAL_TYPE
4623 && TREE_CODE (index_expr) != INTEGER_CST)
4624 check_for_full_enumeration_handling (TREE_TYPE (orig_index));
4626 /* If this is the first label, warn if any insns have been emitted. */
4627 if (thiscase->data.case_stmt.seenlabel == 0)
4629 rtx insn;
4630 for (insn = get_last_insn ();
4631 insn != case_stack->data.case_stmt.start;
4632 insn = PREV_INSN (insn))
4633 if (GET_CODE (insn) != NOTE
4634 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn))!= USE))
4636 warning ("unreachable code at beginning of %s",
4637 case_stack->data.case_stmt.printname);
4638 break;
4642 /* If we don't have a default-label, create one here,
4643 after the body of the switch. */
4644 if (thiscase->data.case_stmt.default_label == 0)
4646 thiscase->data.case_stmt.default_label
4647 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
4648 expand_label (thiscase->data.case_stmt.default_label);
4650 default_label = label_rtx (thiscase->data.case_stmt.default_label);
4652 before_case = get_last_insn ();
4654 /* Simplify the case-list before we count it. */
4655 group_case_nodes (thiscase->data.case_stmt.case_list);
4657 /* Get upper and lower bounds of case values.
4658 Also convert all the case values to the index expr's data type. */
4660 count = 0;
4661 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
4663 /* Check low and high label values are integers. */
4664 if (TREE_CODE (n->low) != INTEGER_CST)
4665 abort ();
4666 if (TREE_CODE (n->high) != INTEGER_CST)
4667 abort ();
4669 n->low = convert (index_type, n->low);
4670 n->high = convert (index_type, n->high);
4672 /* Count the elements and track the largest and smallest
4673 of them (treating them as signed even if they are not). */
4674 if (count++ == 0)
4676 minval = n->low;
4677 maxval = n->high;
4679 else
4681 if (INT_CST_LT (n->low, minval))
4682 minval = n->low;
4683 if (INT_CST_LT (maxval, n->high))
4684 maxval = n->high;
4686 /* A range counts double, since it requires two compares. */
4687 if (! tree_int_cst_equal (n->low, n->high))
4688 count++;
4691 orig_minval = minval;
4693 /* Compute span of values. */
4694 if (count != 0)
4695 range = fold (build (MINUS_EXPR, index_type, maxval, minval));
4697 if (count == 0)
4699 expand_expr (index_expr, const0_rtx, VOIDmode, 0);
4700 emit_queue ();
4701 emit_jump (default_label);
4704 /* If range of values is much bigger than number of values,
4705 make a sequence of conditional branches instead of a dispatch.
4706 If the switch-index is a constant, do it this way
4707 because we can optimize it. */
4709 #ifndef CASE_VALUES_THRESHOLD
4710 #ifdef HAVE_casesi
4711 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
4712 #else
4713 /* If machine does not have a case insn that compares the
4714 bounds, this means extra overhead for dispatch tables
4715 which raises the threshold for using them. */
4716 #define CASE_VALUES_THRESHOLD 5
4717 #endif /* HAVE_casesi */
4718 #endif /* CASE_VALUES_THRESHOLD */
4720 else if (TREE_INT_CST_HIGH (range) != 0
4721 || count < CASE_VALUES_THRESHOLD
4722 || ((unsigned HOST_WIDE_INT) (TREE_INT_CST_LOW (range))
4723 > 10 * count)
4724 || TREE_CODE (index_expr) == INTEGER_CST
4725 /* These will reduce to a constant. */
4726 || (TREE_CODE (index_expr) == CALL_EXPR
4727 && TREE_CODE (TREE_OPERAND (index_expr, 0)) == ADDR_EXPR
4728 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == FUNCTION_DECL
4729 && DECL_FUNCTION_CODE (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == BUILT_IN_CLASSIFY_TYPE)
4730 || (TREE_CODE (index_expr) == COMPOUND_EXPR
4731 && TREE_CODE (TREE_OPERAND (index_expr, 1)) == INTEGER_CST))
4733 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
4735 /* If the index is a short or char that we do not have
4736 an insn to handle comparisons directly, convert it to
4737 a full integer now, rather than letting each comparison
4738 generate the conversion. */
4740 if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT
4741 && (cmp_optab->handlers[(int) GET_MODE(index)].insn_code
4742 == CODE_FOR_nothing))
4744 enum machine_mode wider_mode;
4745 for (wider_mode = GET_MODE (index); wider_mode != VOIDmode;
4746 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
4747 if (cmp_optab->handlers[(int) wider_mode].insn_code
4748 != CODE_FOR_nothing)
4750 index = convert_to_mode (wider_mode, index, unsignedp);
4751 break;
4755 emit_queue ();
4756 do_pending_stack_adjust ();
4758 index = protect_from_queue (index, 0);
4759 if (GET_CODE (index) == MEM)
4760 index = copy_to_reg (index);
4761 if (GET_CODE (index) == CONST_INT
4762 || TREE_CODE (index_expr) == INTEGER_CST)
4764 /* Make a tree node with the proper constant value
4765 if we don't already have one. */
4766 if (TREE_CODE (index_expr) != INTEGER_CST)
4768 index_expr
4769 = build_int_2 (INTVAL (index),
4770 unsignedp || INTVAL (index) >= 0 ? 0 : -1);
4771 index_expr = convert (index_type, index_expr);
4774 /* For constant index expressions we need only
4775 issue a unconditional branch to the appropriate
4776 target code. The job of removing any unreachable
4777 code is left to the optimisation phase if the
4778 "-O" option is specified. */
4779 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
4780 if (! tree_int_cst_lt (index_expr, n->low)
4781 && ! tree_int_cst_lt (n->high, index_expr))
4782 break;
4784 if (n)
4785 emit_jump (label_rtx (n->code_label));
4786 else
4787 emit_jump (default_label);
4789 else
4791 /* If the index expression is not constant we generate
4792 a binary decision tree to select the appropriate
4793 target code. This is done as follows:
4795 The list of cases is rearranged into a binary tree,
4796 nearly optimal assuming equal probability for each case.
4798 The tree is transformed into RTL, eliminating
4799 redundant test conditions at the same time.
4801 If program flow could reach the end of the
4802 decision tree an unconditional jump to the
4803 default code is emitted. */
4805 use_cost_table
4806 = (TREE_CODE (TREE_TYPE (orig_index)) != ENUMERAL_TYPE
4807 && estimate_case_costs (thiscase->data.case_stmt.case_list));
4808 balance_case_nodes (&thiscase->data.case_stmt.case_list,
4809 NULL_PTR);
4810 emit_case_nodes (index, thiscase->data.case_stmt.case_list,
4811 default_label, index_type);
4812 emit_jump_if_reachable (default_label);
4815 else
4817 int win = 0;
4818 #ifdef HAVE_casesi
4819 if (HAVE_casesi)
4821 enum machine_mode index_mode = SImode;
4822 int index_bits = GET_MODE_BITSIZE (index_mode);
4823 rtx op1, op2;
4824 enum machine_mode op_mode;
4826 /* Convert the index to SImode. */
4827 if (GET_MODE_BITSIZE (TYPE_MODE (index_type))
4828 > GET_MODE_BITSIZE (index_mode))
4830 enum machine_mode omode = TYPE_MODE (index_type);
4831 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
4833 /* We must handle the endpoints in the original mode. */
4834 index_expr = build (MINUS_EXPR, index_type,
4835 index_expr, minval);
4836 minval = integer_zero_node;
4837 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
4838 emit_cmp_insn (rangertx, index, LTU, NULL_RTX, omode, 1, 0);
4839 emit_jump_insn (gen_bltu (default_label));
4840 /* Now we can safely truncate. */
4841 index = convert_to_mode (index_mode, index, 0);
4843 else
4845 if (TYPE_MODE (index_type) != index_mode)
4847 index_expr = convert (type_for_size (index_bits, 0),
4848 index_expr);
4849 index_type = TREE_TYPE (index_expr);
4852 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
4854 emit_queue ();
4855 index = protect_from_queue (index, 0);
4856 do_pending_stack_adjust ();
4858 op_mode = insn_operand_mode[(int)CODE_FOR_casesi][0];
4859 if (! (*insn_operand_predicate[(int)CODE_FOR_casesi][0])
4860 (index, op_mode))
4861 index = copy_to_mode_reg (op_mode, index);
4863 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
4865 op_mode = insn_operand_mode[(int)CODE_FOR_casesi][1];
4866 if (! (*insn_operand_predicate[(int)CODE_FOR_casesi][1])
4867 (op1, op_mode))
4868 op1 = copy_to_mode_reg (op_mode, op1);
4870 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
4872 op_mode = insn_operand_mode[(int)CODE_FOR_casesi][2];
4873 if (! (*insn_operand_predicate[(int)CODE_FOR_casesi][2])
4874 (op2, op_mode))
4875 op2 = copy_to_mode_reg (op_mode, op2);
4877 emit_jump_insn (gen_casesi (index, op1, op2,
4878 table_label, default_label));
4879 win = 1;
4881 #endif
4882 #ifdef HAVE_tablejump
4883 if (! win && HAVE_tablejump)
4885 index_expr = convert (thiscase->data.case_stmt.nominal_type,
4886 fold (build (MINUS_EXPR, index_type,
4887 index_expr, minval)));
4888 index_type = TREE_TYPE (index_expr);
4889 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
4890 emit_queue ();
4891 index = protect_from_queue (index, 0);
4892 do_pending_stack_adjust ();
4894 do_tablejump (index, TYPE_MODE (index_type),
4895 expand_expr (range, NULL_RTX, VOIDmode, 0),
4896 table_label, default_label);
4897 win = 1;
4899 #endif
4900 if (! win)
4901 abort ();
4903 /* Get table of labels to jump to, in order of case index. */
4905 ncases = TREE_INT_CST_LOW (range) + 1;
4906 labelvec = (rtx *) alloca (ncases * sizeof (rtx));
4907 bzero ((char *) labelvec, ncases * sizeof (rtx));
4909 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
4911 register HOST_WIDE_INT i
4912 = TREE_INT_CST_LOW (n->low) - TREE_INT_CST_LOW (orig_minval);
4914 while (1)
4916 labelvec[i]
4917 = gen_rtx (LABEL_REF, Pmode, label_rtx (n->code_label));
4918 if (i + TREE_INT_CST_LOW (orig_minval)
4919 == TREE_INT_CST_LOW (n->high))
4920 break;
4921 i++;
4925 /* Fill in the gaps with the default. */
4926 for (i = 0; i < ncases; i++)
4927 if (labelvec[i] == 0)
4928 labelvec[i] = gen_rtx (LABEL_REF, Pmode, default_label);
4930 /* Output the table */
4931 emit_label (table_label);
4933 /* This would be a lot nicer if CASE_VECTOR_PC_RELATIVE
4934 were an expression, instead of an #ifdef/#ifndef. */
4935 if (
4936 #ifdef CASE_VECTOR_PC_RELATIVE
4937 1 ||
4938 #endif
4939 flag_pic)
4940 emit_jump_insn (gen_rtx (ADDR_DIFF_VEC, CASE_VECTOR_MODE,
4941 gen_rtx (LABEL_REF, Pmode, table_label),
4942 gen_rtvec_v (ncases, labelvec)));
4943 else
4944 emit_jump_insn (gen_rtx (ADDR_VEC, CASE_VECTOR_MODE,
4945 gen_rtvec_v (ncases, labelvec)));
4947 /* If the case insn drops through the table,
4948 after the table we must jump to the default-label.
4949 Otherwise record no drop-through after the table. */
4950 #ifdef CASE_DROPS_THROUGH
4951 emit_jump (default_label);
4952 #else
4953 emit_barrier ();
4954 #endif
4957 before_case = squeeze_notes (NEXT_INSN (before_case), get_last_insn ());
4958 reorder_insns (before_case, get_last_insn (),
4959 thiscase->data.case_stmt.start);
4962 if (thiscase->exit_label)
4963 emit_label (thiscase->exit_label);
4965 POPSTACK (case_stack);
4967 free_temp_slots ();
4971 /* Terminate a case statement. EXPR is the original index
4972 expression. */
4974 static void
4975 bc_expand_end_case (expr)
4976 tree expr;
4978 struct nesting *thiscase = case_stack;
4979 enum bytecode_opcode opcode;
4980 struct bc_label *jump_label;
4981 struct case_node *c;
4983 bc_emit_bytecode (jump);
4984 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (thiscase->exit_label));
4986 #ifdef DEBUG_PRINT_CODE
4987 fputc ('\n', stderr);
4988 #endif
4990 /* Now that the size of the jump table is known, emit the actual
4991 indexed jump instruction. */
4992 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thiscase->data.case_stmt.skip_label));
4994 opcode = TYPE_MODE (thiscase->data.case_stmt.nominal_type) == SImode
4995 ? TREE_UNSIGNED (thiscase->data.case_stmt.nominal_type) ? caseSU : caseSI
4996 : TREE_UNSIGNED (thiscase->data.case_stmt.nominal_type) ? caseDU : caseDI;
4998 bc_emit_bytecode (opcode);
5000 /* Now emit the case instructions literal arguments, in order.
5001 In addition to the value on the stack, it uses:
5002 1. The address of the jump table.
5003 2. The size of the jump table.
5004 3. The default label. */
5006 jump_label = bc_get_bytecode_label ();
5007 bc_emit_bytecode_labelref (jump_label);
5008 bc_emit_bytecode_const ((char *) &thiscase->data.case_stmt.num_ranges,
5009 sizeof thiscase->data.case_stmt.num_ranges);
5011 if (thiscase->data.case_stmt.default_label)
5012 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (DECL_RTL (thiscase->data.case_stmt.default_label)));
5013 else
5014 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (thiscase->exit_label));
5016 /* Output the jump table. */
5018 bc_align_bytecode (3 /* PTR_ALIGN */);
5019 bc_emit_bytecode_labeldef (jump_label);
5021 if (TYPE_MODE (thiscase->data.case_stmt.nominal_type) == SImode)
5022 for (c = thiscase->data.case_stmt.case_list->left; c; c = c->left)
5024 opcode = TREE_INT_CST_LOW (c->low);
5025 bc_emit_bytecode_const ((char *) &opcode, sizeof opcode);
5027 opcode = TREE_INT_CST_LOW (c->high);
5028 bc_emit_bytecode_const ((char *) &opcode, sizeof opcode);
5030 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (DECL_RTL (c->code_label)));
5032 else
5033 if (TYPE_MODE (thiscase->data.case_stmt.nominal_type) == DImode)
5034 for (c = thiscase->data.case_stmt.case_list->left; c; c = c->left)
5036 bc_emit_bytecode_DI_const (c->low);
5037 bc_emit_bytecode_DI_const (c->high);
5039 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (DECL_RTL (c->code_label)));
5041 else
5042 /* Bad mode */
5043 abort ();
5046 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thiscase->exit_label));
5048 /* Possibly issue enumeration warnings. */
5050 if (!thiscase->data.case_stmt.default_label
5051 && TREE_CODE (TREE_TYPE (expr)) == ENUMERAL_TYPE
5052 && TREE_CODE (expr) != INTEGER_CST
5053 && warn_switch)
5054 check_for_full_enumeration_handling (TREE_TYPE (expr));
5057 #ifdef DEBUG_PRINT_CODE
5058 fputc ('\n', stderr);
5059 #endif
5061 POPSTACK (case_stack);
5065 /* Return unique bytecode ID. */
5067 int
5068 bc_new_uid ()
5070 static int bc_uid = 0;
5072 return (++bc_uid);
5075 /* Generate code to jump to LABEL if OP1 and OP2 are equal. */
5077 static void
5078 do_jump_if_equal (op1, op2, label, unsignedp)
5079 rtx op1, op2, label;
5080 int unsignedp;
5082 if (GET_CODE (op1) == CONST_INT
5083 && GET_CODE (op2) == CONST_INT)
5085 if (INTVAL (op1) == INTVAL (op2))
5086 emit_jump (label);
5088 else
5090 enum machine_mode mode = GET_MODE (op1);
5091 if (mode == VOIDmode)
5092 mode = GET_MODE (op2);
5093 emit_cmp_insn (op1, op2, EQ, NULL_RTX, mode, unsignedp, 0);
5094 emit_jump_insn (gen_beq (label));
5098 /* Not all case values are encountered equally. This function
5099 uses a heuristic to weight case labels, in cases where that
5100 looks like a reasonable thing to do.
5102 Right now, all we try to guess is text, and we establish the
5103 following weights:
5105 chars above space: 16
5106 digits: 16
5107 default: 12
5108 space, punct: 8
5109 tab: 4
5110 newline: 2
5111 other "\" chars: 1
5112 remaining chars: 0
5114 If we find any cases in the switch that are not either -1 or in the range
5115 of valid ASCII characters, or are control characters other than those
5116 commonly used with "\", don't treat this switch scanning text.
5118 Return 1 if these nodes are suitable for cost estimation, otherwise
5119 return 0. */
5121 static int
5122 estimate_case_costs (node)
5123 case_node_ptr node;
5125 tree min_ascii = build_int_2 (-1, -1);
5126 tree max_ascii = convert (TREE_TYPE (node->high), build_int_2 (127, 0));
5127 case_node_ptr n;
5128 int i;
5130 /* If we haven't already made the cost table, make it now. Note that the
5131 lower bound of the table is -1, not zero. */
5133 if (cost_table == NULL)
5135 cost_table = ((short *) xmalloc (129 * sizeof (short))) + 1;
5136 bzero ((char *) (cost_table - 1), 129 * sizeof (short));
5138 for (i = 0; i < 128; i++)
5140 if (isalnum (i))
5141 cost_table[i] = 16;
5142 else if (ispunct (i))
5143 cost_table[i] = 8;
5144 else if (iscntrl (i))
5145 cost_table[i] = -1;
5148 cost_table[' '] = 8;
5149 cost_table['\t'] = 4;
5150 cost_table['\0'] = 4;
5151 cost_table['\n'] = 2;
5152 cost_table['\f'] = 1;
5153 cost_table['\v'] = 1;
5154 cost_table['\b'] = 1;
5157 /* See if all the case expressions look like text. It is text if the
5158 constant is >= -1 and the highest constant is <= 127. Do all comparisons
5159 as signed arithmetic since we don't want to ever access cost_table with a
5160 value less than -1. Also check that none of the constants in a range
5161 are strange control characters. */
5163 for (n = node; n; n = n->right)
5165 if ((INT_CST_LT (n->low, min_ascii)) || INT_CST_LT (max_ascii, n->high))
5166 return 0;
5168 for (i = TREE_INT_CST_LOW (n->low); i <= TREE_INT_CST_LOW (n->high); i++)
5169 if (cost_table[i] < 0)
5170 return 0;
5173 /* All interesting values are within the range of interesting
5174 ASCII characters. */
5175 return 1;
5178 /* Scan an ordered list of case nodes
5179 combining those with consecutive values or ranges.
5181 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
5183 static void
5184 group_case_nodes (head)
5185 case_node_ptr head;
5187 case_node_ptr node = head;
5189 while (node)
5191 rtx lb = next_real_insn (label_rtx (node->code_label));
5192 case_node_ptr np = node;
5194 /* Try to group the successors of NODE with NODE. */
5195 while (((np = np->right) != 0)
5196 /* Do they jump to the same place? */
5197 && next_real_insn (label_rtx (np->code_label)) == lb
5198 /* Are their ranges consecutive? */
5199 && tree_int_cst_equal (np->low,
5200 fold (build (PLUS_EXPR,
5201 TREE_TYPE (node->high),
5202 node->high,
5203 integer_one_node)))
5204 /* An overflow is not consecutive. */
5205 && tree_int_cst_lt (node->high,
5206 fold (build (PLUS_EXPR,
5207 TREE_TYPE (node->high),
5208 node->high,
5209 integer_one_node))))
5211 node->high = np->high;
5213 /* NP is the first node after NODE which can't be grouped with it.
5214 Delete the nodes in between, and move on to that node. */
5215 node->right = np;
5216 node = np;
5220 /* Take an ordered list of case nodes
5221 and transform them into a near optimal binary tree,
5222 on the assumption that any target code selection value is as
5223 likely as any other.
5225 The transformation is performed by splitting the ordered
5226 list into two equal sections plus a pivot. The parts are
5227 then attached to the pivot as left and right branches. Each
5228 branch is is then transformed recursively. */
5230 static void
5231 balance_case_nodes (head, parent)
5232 case_node_ptr *head;
5233 case_node_ptr parent;
5235 register case_node_ptr np;
5237 np = *head;
5238 if (np)
5240 int cost = 0;
5241 int i = 0;
5242 int ranges = 0;
5243 register case_node_ptr *npp;
5244 case_node_ptr left;
5246 /* Count the number of entries on branch. Also count the ranges. */
5248 while (np)
5250 if (!tree_int_cst_equal (np->low, np->high))
5252 ranges++;
5253 if (use_cost_table)
5254 cost += cost_table[TREE_INT_CST_LOW (np->high)];
5257 if (use_cost_table)
5258 cost += cost_table[TREE_INT_CST_LOW (np->low)];
5260 i++;
5261 np = np->right;
5264 if (i > 2)
5266 /* Split this list if it is long enough for that to help. */
5267 npp = head;
5268 left = *npp;
5269 if (use_cost_table)
5271 /* Find the place in the list that bisects the list's total cost,
5272 Here I gets half the total cost. */
5273 int n_moved = 0;
5274 i = (cost + 1) / 2;
5275 while (1)
5277 /* Skip nodes while their cost does not reach that amount. */
5278 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5279 i -= cost_table[TREE_INT_CST_LOW ((*npp)->high)];
5280 i -= cost_table[TREE_INT_CST_LOW ((*npp)->low)];
5281 if (i <= 0)
5282 break;
5283 npp = &(*npp)->right;
5284 n_moved += 1;
5286 if (n_moved == 0)
5288 /* Leave this branch lopsided, but optimize left-hand
5289 side and fill in `parent' fields for right-hand side. */
5290 np = *head;
5291 np->parent = parent;
5292 balance_case_nodes (&np->left, np);
5293 for (; np->right; np = np->right)
5294 np->right->parent = np;
5295 return;
5298 /* If there are just three nodes, split at the middle one. */
5299 else if (i == 3)
5300 npp = &(*npp)->right;
5301 else
5303 /* Find the place in the list that bisects the list's total cost,
5304 where ranges count as 2.
5305 Here I gets half the total cost. */
5306 i = (i + ranges + 1) / 2;
5307 while (1)
5309 /* Skip nodes while their cost does not reach that amount. */
5310 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5311 i--;
5312 i--;
5313 if (i <= 0)
5314 break;
5315 npp = &(*npp)->right;
5318 *head = np = *npp;
5319 *npp = 0;
5320 np->parent = parent;
5321 np->left = left;
5323 /* Optimize each of the two split parts. */
5324 balance_case_nodes (&np->left, np);
5325 balance_case_nodes (&np->right, np);
5327 else
5329 /* Else leave this branch as one level,
5330 but fill in `parent' fields. */
5331 np = *head;
5332 np->parent = parent;
5333 for (; np->right; np = np->right)
5334 np->right->parent = np;
5339 /* Search the parent sections of the case node tree
5340 to see if a test for the lower bound of NODE would be redundant.
5341 INDEX_TYPE is the type of the index expression.
5343 The instructions to generate the case decision tree are
5344 output in the same order as nodes are processed so it is
5345 known that if a parent node checks the range of the current
5346 node minus one that the current node is bounded at its lower
5347 span. Thus the test would be redundant. */
5349 static int
5350 node_has_low_bound (node, index_type)
5351 case_node_ptr node;
5352 tree index_type;
5354 tree low_minus_one;
5355 case_node_ptr pnode;
5357 /* If the lower bound of this node is the lowest value in the index type,
5358 we need not test it. */
5360 if (tree_int_cst_equal (node->low, TYPE_MIN_VALUE (index_type)))
5361 return 1;
5363 /* If this node has a left branch, the value at the left must be less
5364 than that at this node, so it cannot be bounded at the bottom and
5365 we need not bother testing any further. */
5367 if (node->left)
5368 return 0;
5370 low_minus_one = fold (build (MINUS_EXPR, TREE_TYPE (node->low),
5371 node->low, integer_one_node));
5373 /* If the subtraction above overflowed, we can't verify anything.
5374 Otherwise, look for a parent that tests our value - 1. */
5376 if (! tree_int_cst_lt (low_minus_one, node->low))
5377 return 0;
5379 for (pnode = node->parent; pnode; pnode = pnode->parent)
5380 if (tree_int_cst_equal (low_minus_one, pnode->high))
5381 return 1;
5383 return 0;
5386 /* Search the parent sections of the case node tree
5387 to see if a test for the upper bound of NODE would be redundant.
5388 INDEX_TYPE is the type of the index expression.
5390 The instructions to generate the case decision tree are
5391 output in the same order as nodes are processed so it is
5392 known that if a parent node checks the range of the current
5393 node plus one that the current node is bounded at its upper
5394 span. Thus the test would be redundant. */
5396 static int
5397 node_has_high_bound (node, index_type)
5398 case_node_ptr node;
5399 tree index_type;
5401 tree high_plus_one;
5402 case_node_ptr pnode;
5404 /* If the upper bound of this node is the highest value in the type
5405 of the index expression, we need not test against it. */
5407 if (tree_int_cst_equal (node->high, TYPE_MAX_VALUE (index_type)))
5408 return 1;
5410 /* If this node has a right branch, the value at the right must be greater
5411 than that at this node, so it cannot be bounded at the top and
5412 we need not bother testing any further. */
5414 if (node->right)
5415 return 0;
5417 high_plus_one = fold (build (PLUS_EXPR, TREE_TYPE (node->high),
5418 node->high, integer_one_node));
5420 /* If the addition above overflowed, we can't verify anything.
5421 Otherwise, look for a parent that tests our value + 1. */
5423 if (! tree_int_cst_lt (node->high, high_plus_one))
5424 return 0;
5426 for (pnode = node->parent; pnode; pnode = pnode->parent)
5427 if (tree_int_cst_equal (high_plus_one, pnode->low))
5428 return 1;
5430 return 0;
5433 /* Search the parent sections of the
5434 case node tree to see if both tests for the upper and lower
5435 bounds of NODE would be redundant. */
5437 static int
5438 node_is_bounded (node, index_type)
5439 case_node_ptr node;
5440 tree index_type;
5442 return (node_has_low_bound (node, index_type)
5443 && node_has_high_bound (node, index_type));
5446 /* Emit an unconditional jump to LABEL unless it would be dead code. */
5448 static void
5449 emit_jump_if_reachable (label)
5450 rtx label;
5452 if (GET_CODE (get_last_insn ()) != BARRIER)
5453 emit_jump (label);
5456 /* Emit step-by-step code to select a case for the value of INDEX.
5457 The thus generated decision tree follows the form of the
5458 case-node binary tree NODE, whose nodes represent test conditions.
5459 INDEX_TYPE is the type of the index of the switch.
5461 Care is taken to prune redundant tests from the decision tree
5462 by detecting any boundary conditions already checked by
5463 emitted rtx. (See node_has_high_bound, node_has_low_bound
5464 and node_is_bounded, above.)
5466 Where the test conditions can be shown to be redundant we emit
5467 an unconditional jump to the target code. As a further
5468 optimization, the subordinates of a tree node are examined to
5469 check for bounded nodes. In this case conditional and/or
5470 unconditional jumps as a result of the boundary check for the
5471 current node are arranged to target the subordinates associated
5472 code for out of bound conditions on the current node node.
5474 We can assume that when control reaches the code generated here,
5475 the index value has already been compared with the parents
5476 of this node, and determined to be on the same side of each parent
5477 as this node is. Thus, if this node tests for the value 51,
5478 and a parent tested for 52, we don't need to consider
5479 the possibility of a value greater than 51. If another parent
5480 tests for the value 50, then this node need not test anything. */
5482 static void
5483 emit_case_nodes (index, node, default_label, index_type)
5484 rtx index;
5485 case_node_ptr node;
5486 rtx default_label;
5487 tree index_type;
5489 /* If INDEX has an unsigned type, we must make unsigned branches. */
5490 int unsignedp = TREE_UNSIGNED (index_type);
5491 typedef rtx rtx_function ();
5492 rtx_function *gen_bgt_pat = unsignedp ? gen_bgtu : gen_bgt;
5493 rtx_function *gen_bge_pat = unsignedp ? gen_bgeu : gen_bge;
5494 rtx_function *gen_blt_pat = unsignedp ? gen_bltu : gen_blt;
5495 rtx_function *gen_ble_pat = unsignedp ? gen_bleu : gen_ble;
5496 enum machine_mode mode = GET_MODE (index);
5498 /* See if our parents have already tested everything for us.
5499 If they have, emit an unconditional jump for this node. */
5500 if (node_is_bounded (node, index_type))
5501 emit_jump (label_rtx (node->code_label));
5503 else if (tree_int_cst_equal (node->low, node->high))
5505 /* Node is single valued. First see if the index expression matches
5506 this node and then check our children, if any. */
5508 do_jump_if_equal (index, expand_expr (node->low, NULL_RTX, VOIDmode, 0),
5509 label_rtx (node->code_label), unsignedp);
5511 if (node->right != 0 && node->left != 0)
5513 /* This node has children on both sides.
5514 Dispatch to one side or the other
5515 by comparing the index value with this node's value.
5516 If one subtree is bounded, check that one first,
5517 so we can avoid real branches in the tree. */
5519 if (node_is_bounded (node->right, index_type))
5521 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5522 VOIDmode, 0),
5523 GT, NULL_RTX, mode, unsignedp, 0);
5525 emit_jump_insn ((*gen_bgt_pat) (label_rtx (node->right->code_label)));
5526 emit_case_nodes (index, node->left, default_label, index_type);
5529 else if (node_is_bounded (node->left, index_type))
5531 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5532 VOIDmode, 0),
5533 LT, NULL_RTX, mode, unsignedp, 0);
5534 emit_jump_insn ((*gen_blt_pat) (label_rtx (node->left->code_label)));
5535 emit_case_nodes (index, node->right, default_label, index_type);
5538 else
5540 /* Neither node is bounded. First distinguish the two sides;
5541 then emit the code for one side at a time. */
5543 tree test_label
5544 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
5546 /* See if the value is on the right. */
5547 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5548 VOIDmode, 0),
5549 GT, NULL_RTX, mode, unsignedp, 0);
5550 emit_jump_insn ((*gen_bgt_pat) (label_rtx (test_label)));
5552 /* Value must be on the left.
5553 Handle the left-hand subtree. */
5554 emit_case_nodes (index, node->left, default_label, index_type);
5555 /* If left-hand subtree does nothing,
5556 go to default. */
5557 emit_jump_if_reachable (default_label);
5559 /* Code branches here for the right-hand subtree. */
5560 expand_label (test_label);
5561 emit_case_nodes (index, node->right, default_label, index_type);
5565 else if (node->right != 0 && node->left == 0)
5567 /* Here we have a right child but no left so we issue conditional
5568 branch to default and process the right child.
5570 Omit the conditional branch to default if we it avoid only one
5571 right child; it costs too much space to save so little time. */
5573 if (node->right->right || node->right->left
5574 || !tree_int_cst_equal (node->right->low, node->right->high))
5576 if (!node_has_low_bound (node, index_type))
5578 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5579 VOIDmode, 0),
5580 LT, NULL_RTX, mode, unsignedp, 0);
5581 emit_jump_insn ((*gen_blt_pat) (default_label));
5584 emit_case_nodes (index, node->right, default_label, index_type);
5586 else
5587 /* We cannot process node->right normally
5588 since we haven't ruled out the numbers less than
5589 this node's value. So handle node->right explicitly. */
5590 do_jump_if_equal (index,
5591 expand_expr (node->right->low, NULL_RTX,
5592 VOIDmode, 0),
5593 label_rtx (node->right->code_label), unsignedp);
5596 else if (node->right == 0 && node->left != 0)
5598 /* Just one subtree, on the left. */
5600 #if 0 /* The following code and comment were formerly part
5601 of the condition here, but they didn't work
5602 and I don't understand what the idea was. -- rms. */
5603 /* If our "most probable entry" is less probable
5604 than the default label, emit a jump to
5605 the default label using condition codes
5606 already lying around. With no right branch,
5607 a branch-greater-than will get us to the default
5608 label correctly. */
5609 if (use_cost_table
5610 && cost_table[TREE_INT_CST_LOW (node->high)] < 12)
5612 #endif /* 0 */
5613 if (node->left->left || node->left->right
5614 || !tree_int_cst_equal (node->left->low, node->left->high))
5616 if (!node_has_high_bound (node, index_type))
5618 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5619 VOIDmode, 0),
5620 GT, NULL_RTX, mode, unsignedp, 0);
5621 emit_jump_insn ((*gen_bgt_pat) (default_label));
5624 emit_case_nodes (index, node->left, default_label, index_type);
5626 else
5627 /* We cannot process node->left normally
5628 since we haven't ruled out the numbers less than
5629 this node's value. So handle node->left explicitly. */
5630 do_jump_if_equal (index,
5631 expand_expr (node->left->low, NULL_RTX,
5632 VOIDmode, 0),
5633 label_rtx (node->left->code_label), unsignedp);
5636 else
5638 /* Node is a range. These cases are very similar to those for a single
5639 value, except that we do not start by testing whether this node
5640 is the one to branch to. */
5642 if (node->right != 0 && node->left != 0)
5644 /* Node has subtrees on both sides.
5645 If the right-hand subtree is bounded,
5646 test for it first, since we can go straight there.
5647 Otherwise, we need to make a branch in the control structure,
5648 then handle the two subtrees. */
5649 tree test_label = 0;
5651 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5652 VOIDmode, 0),
5653 GT, NULL_RTX, mode, unsignedp, 0);
5655 if (node_is_bounded (node->right, index_type))
5656 /* Right hand node is fully bounded so we can eliminate any
5657 testing and branch directly to the target code. */
5658 emit_jump_insn ((*gen_bgt_pat) (label_rtx (node->right->code_label)));
5659 else
5661 /* Right hand node requires testing.
5662 Branch to a label where we will handle it later. */
5664 test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
5665 emit_jump_insn ((*gen_bgt_pat) (label_rtx (test_label)));
5668 /* Value belongs to this node or to the left-hand subtree. */
5670 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX, VOIDmode, 0),
5671 GE, NULL_RTX, mode, unsignedp, 0);
5672 emit_jump_insn ((*gen_bge_pat) (label_rtx (node->code_label)));
5674 /* Handle the left-hand subtree. */
5675 emit_case_nodes (index, node->left, default_label, index_type);
5677 /* If right node had to be handled later, do that now. */
5679 if (test_label)
5681 /* If the left-hand subtree fell through,
5682 don't let it fall into the right-hand subtree. */
5683 emit_jump_if_reachable (default_label);
5685 expand_label (test_label);
5686 emit_case_nodes (index, node->right, default_label, index_type);
5690 else if (node->right != 0 && node->left == 0)
5692 /* Deal with values to the left of this node,
5693 if they are possible. */
5694 if (!node_has_low_bound (node, index_type))
5696 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX,
5697 VOIDmode, 0),
5698 LT, NULL_RTX, mode, unsignedp, 0);
5699 emit_jump_insn ((*gen_blt_pat) (default_label));
5702 /* Value belongs to this node or to the right-hand subtree. */
5704 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5705 VOIDmode, 0),
5706 LE, NULL_RTX, mode, unsignedp, 0);
5707 emit_jump_insn ((*gen_ble_pat) (label_rtx (node->code_label)));
5709 emit_case_nodes (index, node->right, default_label, index_type);
5712 else if (node->right == 0 && node->left != 0)
5714 /* Deal with values to the right of this node,
5715 if they are possible. */
5716 if (!node_has_high_bound (node, index_type))
5718 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5719 VOIDmode, 0),
5720 GT, NULL_RTX, mode, unsignedp, 0);
5721 emit_jump_insn ((*gen_bgt_pat) (default_label));
5724 /* Value belongs to this node or to the left-hand subtree. */
5726 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX, VOIDmode, 0),
5727 GE, NULL_RTX, mode, unsignedp, 0);
5728 emit_jump_insn ((*gen_bge_pat) (label_rtx (node->code_label)));
5730 emit_case_nodes (index, node->left, default_label, index_type);
5733 else
5735 /* Node has no children so we check low and high bounds to remove
5736 redundant tests. Only one of the bounds can exist,
5737 since otherwise this node is bounded--a case tested already. */
5739 if (!node_has_high_bound (node, index_type))
5741 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5742 VOIDmode, 0),
5743 GT, NULL_RTX, mode, unsignedp, 0);
5744 emit_jump_insn ((*gen_bgt_pat) (default_label));
5747 if (!node_has_low_bound (node, index_type))
5749 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX,
5750 VOIDmode, 0),
5751 LT, NULL_RTX, mode, unsignedp, 0);
5752 emit_jump_insn ((*gen_blt_pat) (default_label));
5755 emit_jump (label_rtx (node->code_label));
5760 /* These routines are used by the loop unrolling code. They copy BLOCK trees
5761 so that the debugging info will be correct for the unrolled loop. */
5763 /* Indexed by block number, contains a pointer to the N'th block node. */
5765 static tree *block_vector;
5767 void
5768 find_loop_tree_blocks ()
5770 tree block = DECL_INITIAL (current_function_decl);
5772 /* There first block is for the function body, and does not have
5773 corresponding block notes. Don't include it in the block vector. */
5774 block = BLOCK_SUBBLOCKS (block);
5776 block_vector = identify_blocks (block, get_insns ());
5779 void
5780 unroll_block_trees ()
5782 tree block = DECL_INITIAL (current_function_decl);
5784 reorder_blocks (block_vector, block, get_insns ());