* stmt.c (group_case_nodes): Recognize more opportunities to
[official-gcc.git] / gcc / stmt.c
bloba3f42d5a0cc973161166a96a8c594a47a226c6f5
1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 88, 89, 92-5, 1996 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
22 /* This file handles the generation of rtl code from tree structure
23 above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
24 It also creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
27 The functions whose names start with `expand_' are called by the
28 parser to generate RTL instructions for various kinds of constructs.
30 Some control and binding constructs require calling several such
31 functions at different times. For example, a simple if-then
32 is expanded by calling `expand_start_cond' (with the condition-expression
33 as argument) before parsing the then-clause and calling `expand_end_cond'
34 after parsing the then-clause. */
36 #include "config.h"
38 #include <stdio.h>
39 #include <ctype.h>
41 #include "rtl.h"
42 #include "tree.h"
43 #include "flags.h"
44 #include "except.h"
45 #include "function.h"
46 #include "insn-flags.h"
47 #include "insn-config.h"
48 #include "insn-codes.h"
49 #include "expr.h"
50 #include "hard-reg-set.h"
51 #include "obstack.h"
52 #include "loop.h"
53 #include "recog.h"
54 #include "machmode.h"
56 #include "bytecode.h"
57 #include "bc-typecd.h"
58 #include "bc-opcode.h"
59 #include "bc-optab.h"
60 #include "bc-emit.h"
62 #define obstack_chunk_alloc xmalloc
63 #define obstack_chunk_free free
64 struct obstack stmt_obstack;
66 /* Filename and line number of last line-number note,
67 whether we actually emitted it or not. */
68 char *emit_filename;
69 int emit_lineno;
71 /* Nonzero if within a ({...}) grouping, in which case we must
72 always compute a value for each expr-stmt in case it is the last one. */
74 int expr_stmts_for_value;
76 /* Each time we expand an expression-statement,
77 record the expr's type and its RTL value here. */
79 static tree last_expr_type;
80 static rtx last_expr_value;
82 /* Each time we expand the end of a binding contour (in `expand_end_bindings')
83 and we emit a new NOTE_INSN_BLOCK_END note, we save a pointer to it here.
84 This is used by the `remember_end_note' function to record the endpoint
85 of each generated block in its associated BLOCK node. */
87 static rtx last_block_end_note;
89 /* Number of binding contours started so far in this function. */
91 int block_start_count;
93 /* Nonzero if function being compiled needs to
94 return the address of where it has put a structure value. */
96 extern int current_function_returns_pcc_struct;
98 /* Label that will go on parm cleanup code, if any.
99 Jumping to this label runs cleanup code for parameters, if
100 such code must be run. Following this code is the logical return label. */
102 extern rtx cleanup_label;
104 /* Label that will go on function epilogue.
105 Jumping to this label serves as a "return" instruction
106 on machines which require execution of the epilogue on all returns. */
108 extern rtx return_label;
110 /* Offset to end of allocated area of stack frame.
111 If stack grows down, this is the address of the last stack slot allocated.
112 If stack grows up, this is the address for the next slot. */
113 extern int frame_offset;
115 /* Label to jump back to for tail recursion, or 0 if we have
116 not yet needed one for this function. */
117 extern rtx tail_recursion_label;
119 /* Place after which to insert the tail_recursion_label if we need one. */
120 extern rtx tail_recursion_reentry;
122 /* Location at which to save the argument pointer if it will need to be
123 referenced. There are two cases where this is done: if nonlocal gotos
124 exist, or if vars whose is an offset from the argument pointer will be
125 needed by inner routines. */
127 extern rtx arg_pointer_save_area;
129 /* Chain of all RTL_EXPRs that have insns in them. */
130 extern tree rtl_expr_chain;
132 #if 0 /* Turned off because 0 seems to work just as well. */
133 /* Cleanup lists are required for binding levels regardless of whether
134 that binding level has cleanups or not. This node serves as the
135 cleanup list whenever an empty list is required. */
136 static tree empty_cleanup_list;
137 #endif
139 /* Functions and data structures for expanding case statements. */
141 /* Case label structure, used to hold info on labels within case
142 statements. We handle "range" labels; for a single-value label
143 as in C, the high and low limits are the same.
145 An AVL tree of case nodes is initially created, and later transformed
146 to a list linked via the RIGHT fields in the nodes. Nodes with
147 higher case values are later in the list.
149 Switch statements can be output in one of two forms. A branch table
150 is used if there are more than a few labels and the labels are dense
151 within the range between the smallest and largest case value. If a
152 branch table is used, no further manipulations are done with the case
153 node chain.
155 The alternative to the use of a branch table is to generate a series
156 of compare and jump insns. When that is done, we use the LEFT, RIGHT,
157 and PARENT fields to hold a binary tree. Initially the tree is
158 totally unbalanced, with everything on the right. We balance the tree
159 with nodes on the left having lower case values than the parent
160 and nodes on the right having higher values. We then output the tree
161 in order. */
163 struct case_node
165 struct case_node *left; /* Left son in binary tree */
166 struct case_node *right; /* Right son in binary tree; also node chain */
167 struct case_node *parent; /* Parent of node in binary tree */
168 tree low; /* Lowest index value for this label */
169 tree high; /* Highest index value for this label */
170 tree code_label; /* Label to jump to when node matches */
171 int balance;
174 typedef struct case_node case_node;
175 typedef struct case_node *case_node_ptr;
177 /* These are used by estimate_case_costs and balance_case_nodes. */
179 /* This must be a signed type, and non-ANSI compilers lack signed char. */
180 static short *cost_table;
181 static int use_cost_table;
183 /* Stack of control and binding constructs we are currently inside.
185 These constructs begin when you call `expand_start_WHATEVER'
186 and end when you call `expand_end_WHATEVER'. This stack records
187 info about how the construct began that tells the end-function
188 what to do. It also may provide information about the construct
189 to alter the behavior of other constructs within the body.
190 For example, they may affect the behavior of C `break' and `continue'.
192 Each construct gets one `struct nesting' object.
193 All of these objects are chained through the `all' field.
194 `nesting_stack' points to the first object (innermost construct).
195 The position of an entry on `nesting_stack' is in its `depth' field.
197 Each type of construct has its own individual stack.
198 For example, loops have `loop_stack'. Each object points to the
199 next object of the same type through the `next' field.
201 Some constructs are visible to `break' exit-statements and others
202 are not. Which constructs are visible depends on the language.
203 Therefore, the data structure allows each construct to be visible
204 or not, according to the args given when the construct is started.
205 The construct is visible if the `exit_label' field is non-null.
206 In that case, the value should be a CODE_LABEL rtx. */
208 struct nesting
210 struct nesting *all;
211 struct nesting *next;
212 int depth;
213 rtx exit_label;
214 union
216 /* For conds (if-then and if-then-else statements). */
217 struct
219 /* Label for the end of the if construct.
220 There is none if EXITFLAG was not set
221 and no `else' has been seen yet. */
222 rtx endif_label;
223 /* Label for the end of this alternative.
224 This may be the end of the if or the next else/elseif. */
225 rtx next_label;
226 } cond;
227 /* For loops. */
228 struct
230 /* Label at the top of the loop; place to loop back to. */
231 rtx start_label;
232 /* Label at the end of the whole construct. */
233 rtx end_label;
234 /* Label before a jump that branches to the end of the whole
235 construct. This is where destructors go if any. */
236 rtx alt_end_label;
237 /* Label for `continue' statement to jump to;
238 this is in front of the stepper of the loop. */
239 rtx continue_label;
240 } loop;
241 /* For variable binding contours. */
242 struct
244 /* Sequence number of this binding contour within the function,
245 in order of entry. */
246 int block_start_count;
247 /* Nonzero => value to restore stack to on exit. Complemented by
248 bc_stack_level (see below) when generating bytecodes. */
249 rtx stack_level;
250 /* The NOTE that starts this contour.
251 Used by expand_goto to check whether the destination
252 is within each contour or not. */
253 rtx first_insn;
254 /* Innermost containing binding contour that has a stack level. */
255 struct nesting *innermost_stack_block;
256 /* List of cleanups to be run on exit from this contour.
257 This is a list of expressions to be evaluated.
258 The TREE_PURPOSE of each link is the ..._DECL node
259 which the cleanup pertains to. */
260 tree cleanups;
261 /* List of cleanup-lists of blocks containing this block,
262 as they were at the locus where this block appears.
263 There is an element for each containing block,
264 ordered innermost containing block first.
265 The tail of this list can be 0 (was empty_cleanup_list),
266 if all remaining elements would be empty lists.
267 The element's TREE_VALUE is the cleanup-list of that block,
268 which may be null. */
269 tree outer_cleanups;
270 /* Chain of labels defined inside this binding contour.
271 For contours that have stack levels or cleanups. */
272 struct label_chain *label_chain;
273 /* Number of function calls seen, as of start of this block. */
274 int function_call_count;
275 /* Bytecode specific: stack level to restore stack to on exit. */
276 int bc_stack_level;
277 } block;
278 /* For switch (C) or case (Pascal) statements,
279 and also for dummies (see `expand_start_case_dummy'). */
280 struct
282 /* The insn after which the case dispatch should finally
283 be emitted. Zero for a dummy. */
284 rtx start;
285 /* For bytecodes, the case table is in-lined right in the code.
286 A label is needed for skipping over this block. It is only
287 used when generating bytecodes. */
288 rtx skip_label;
289 /* A list of case labels; it is first built as an AVL tree.
290 During expand_end_case, this is converted to a list, and may be
291 rearranged into a nearly balanced binary tree. */
292 struct case_node *case_list;
293 /* Label to jump to if no case matches. */
294 tree default_label;
295 /* The expression to be dispatched on. */
296 tree index_expr;
297 /* Type that INDEX_EXPR should be converted to. */
298 tree nominal_type;
299 /* Number of range exprs in case statement. */
300 int num_ranges;
301 /* Name of this kind of statement, for warnings. */
302 char *printname;
303 /* Nonzero if a case label has been seen in this case stmt. */
304 char seenlabel;
305 } case_stmt;
306 } data;
309 /* Chain of all pending binding contours. */
310 struct nesting *block_stack;
312 /* If any new stacks are added here, add them to POPSTACKS too. */
314 /* Chain of all pending binding contours that restore stack levels
315 or have cleanups. */
316 struct nesting *stack_block_stack;
318 /* Chain of all pending conditional statements. */
319 struct nesting *cond_stack;
321 /* Chain of all pending loops. */
322 struct nesting *loop_stack;
324 /* Chain of all pending case or switch statements. */
325 struct nesting *case_stack;
327 /* Separate chain including all of the above,
328 chained through the `all' field. */
329 struct nesting *nesting_stack;
331 /* Number of entries on nesting_stack now. */
332 int nesting_depth;
334 /* Allocate and return a new `struct nesting'. */
336 #define ALLOC_NESTING() \
337 (struct nesting *) obstack_alloc (&stmt_obstack, sizeof (struct nesting))
339 /* Pop the nesting stack element by element until we pop off
340 the element which is at the top of STACK.
341 Update all the other stacks, popping off elements from them
342 as we pop them from nesting_stack. */
344 #define POPSTACK(STACK) \
345 do { struct nesting *target = STACK; \
346 struct nesting *this; \
347 do { this = nesting_stack; \
348 if (loop_stack == this) \
349 loop_stack = loop_stack->next; \
350 if (cond_stack == this) \
351 cond_stack = cond_stack->next; \
352 if (block_stack == this) \
353 block_stack = block_stack->next; \
354 if (stack_block_stack == this) \
355 stack_block_stack = stack_block_stack->next; \
356 if (case_stack == this) \
357 case_stack = case_stack->next; \
358 nesting_depth = nesting_stack->depth - 1; \
359 nesting_stack = this->all; \
360 obstack_free (&stmt_obstack, this); } \
361 while (this != target); } while (0)
363 /* In some cases it is impossible to generate code for a forward goto
364 until the label definition is seen. This happens when it may be necessary
365 for the goto to reset the stack pointer: we don't yet know how to do that.
366 So expand_goto puts an entry on this fixup list.
367 Each time a binding contour that resets the stack is exited,
368 we check each fixup.
369 If the target label has now been defined, we can insert the proper code. */
371 struct goto_fixup
373 /* Points to following fixup. */
374 struct goto_fixup *next;
375 /* Points to the insn before the jump insn.
376 If more code must be inserted, it goes after this insn. */
377 rtx before_jump;
378 /* The LABEL_DECL that this jump is jumping to, or 0
379 for break, continue or return. */
380 tree target;
381 /* The BLOCK for the place where this goto was found. */
382 tree context;
383 /* The CODE_LABEL rtx that this is jumping to. */
384 rtx target_rtl;
385 /* Number of binding contours started in current function
386 before the label reference. */
387 int block_start_count;
388 /* The outermost stack level that should be restored for this jump.
389 Each time a binding contour that resets the stack is exited,
390 if the target label is *not* yet defined, this slot is updated. */
391 rtx stack_level;
392 /* List of lists of cleanup expressions to be run by this goto.
393 There is one element for each block that this goto is within.
394 The tail of this list can be 0 (was empty_cleanup_list),
395 if all remaining elements would be empty.
396 The TREE_VALUE contains the cleanup list of that block as of the
397 time this goto was seen.
398 The TREE_ADDRESSABLE flag is 1 for a block that has been exited. */
399 tree cleanup_list_list;
401 /* Bytecode specific members follow */
403 /* The label that this jump is jumping to, or 0 for break, continue
404 or return. */
405 struct bc_label *bc_target;
407 /* The label we use for the fixup patch */
408 struct bc_label *label;
410 /* True (non-0) if fixup has been handled */
411 int bc_handled:1;
413 /* Like stack_level above, except refers to the interpreter stack */
414 int bc_stack_level;
417 static struct goto_fixup *goto_fixup_chain;
419 /* Within any binding contour that must restore a stack level,
420 all labels are recorded with a chain of these structures. */
422 struct label_chain
424 /* Points to following fixup. */
425 struct label_chain *next;
426 tree label;
428 static void expand_goto_internal PROTO((tree, rtx, rtx));
429 static void bc_expand_goto_internal PROTO((enum bytecode_opcode,
430 struct bc_label *, tree));
431 static int expand_fixup PROTO((tree, rtx, rtx));
432 static void bc_expand_fixup PROTO((enum bytecode_opcode,
433 struct bc_label *, int));
434 static void fixup_gotos PROTO((struct nesting *, rtx, tree,
435 rtx, int));
436 static void bc_fixup_gotos PROTO((struct nesting *, int, tree,
437 rtx, int));
438 static void bc_expand_start_cond PROTO((tree, int));
439 static void bc_expand_end_cond PROTO((void));
440 static void bc_expand_start_else PROTO((void));
441 static void bc_expand_end_loop PROTO((void));
442 static void bc_expand_end_bindings PROTO((tree, int, int));
443 static void bc_expand_decl PROTO((tree, tree));
444 static void bc_expand_variable_local_init PROTO((tree));
445 static void bc_expand_decl_init PROTO((tree));
446 static void expand_null_return_1 PROTO((rtx, int));
447 static void expand_value_return PROTO((rtx));
448 static int tail_recursion_args PROTO((tree, tree));
449 static void expand_cleanups PROTO((tree, tree, int, int));
450 static void bc_expand_start_case PROTO((struct nesting *, tree,
451 tree, char *));
452 static int bc_pushcase PROTO((tree, tree));
453 static void bc_check_for_full_enumeration_handling PROTO((tree));
454 static void bc_expand_end_case PROTO((tree));
455 static void do_jump_if_equal PROTO((rtx, rtx, rtx, int));
456 static int estimate_case_costs PROTO((case_node_ptr));
457 static void group_case_nodes PROTO((case_node_ptr));
458 static void balance_case_nodes PROTO((case_node_ptr *,
459 case_node_ptr));
460 static int node_has_low_bound PROTO((case_node_ptr, tree));
461 static int node_has_high_bound PROTO((case_node_ptr, tree));
462 static int node_is_bounded PROTO((case_node_ptr, tree));
463 static void emit_jump_if_reachable PROTO((rtx));
464 static void emit_case_nodes PROTO((rtx, case_node_ptr, rtx, tree));
465 static int add_case_node PROTO((tree, tree, tree, tree *));
466 static struct case_node *case_tree2list PROTO((case_node *, case_node *));
468 extern rtx bc_allocate_local ();
469 extern rtx bc_allocate_variable_array ();
471 void
472 init_stmt ()
474 gcc_obstack_init (&stmt_obstack);
475 init_eh ();
478 void
479 init_stmt_for_function ()
481 /* We are not currently within any block, conditional, loop or case. */
482 block_stack = 0;
483 stack_block_stack = 0;
484 loop_stack = 0;
485 case_stack = 0;
486 cond_stack = 0;
487 nesting_stack = 0;
488 nesting_depth = 0;
490 block_start_count = 0;
492 /* No gotos have been expanded yet. */
493 goto_fixup_chain = 0;
495 /* We are not processing a ({...}) grouping. */
496 expr_stmts_for_value = 0;
497 last_expr_type = 0;
499 init_eh_for_function ();
502 void
503 save_stmt_status (p)
504 struct function *p;
506 p->block_stack = block_stack;
507 p->stack_block_stack = stack_block_stack;
508 p->cond_stack = cond_stack;
509 p->loop_stack = loop_stack;
510 p->case_stack = case_stack;
511 p->nesting_stack = nesting_stack;
512 p->nesting_depth = nesting_depth;
513 p->block_start_count = block_start_count;
514 p->last_expr_type = last_expr_type;
515 p->last_expr_value = last_expr_value;
516 p->expr_stmts_for_value = expr_stmts_for_value;
517 p->emit_filename = emit_filename;
518 p->emit_lineno = emit_lineno;
519 p->goto_fixup_chain = goto_fixup_chain;
520 save_eh_status (p);
523 void
524 restore_stmt_status (p)
525 struct function *p;
527 block_stack = p->block_stack;
528 stack_block_stack = p->stack_block_stack;
529 cond_stack = p->cond_stack;
530 loop_stack = p->loop_stack;
531 case_stack = p->case_stack;
532 nesting_stack = p->nesting_stack;
533 nesting_depth = p->nesting_depth;
534 block_start_count = p->block_start_count;
535 last_expr_type = p->last_expr_type;
536 last_expr_value = p->last_expr_value;
537 expr_stmts_for_value = p->expr_stmts_for_value;
538 emit_filename = p->emit_filename;
539 emit_lineno = p->emit_lineno;
540 goto_fixup_chain = p->goto_fixup_chain;
541 restore_eh_status (p);
544 /* Emit a no-op instruction. */
546 void
547 emit_nop ()
549 rtx last_insn;
551 if (!output_bytecode)
553 last_insn = get_last_insn ();
554 if (!optimize
555 && (GET_CODE (last_insn) == CODE_LABEL
556 || (GET_CODE (last_insn) == NOTE
557 && prev_real_insn (last_insn) == 0)))
558 emit_insn (gen_nop ());
562 /* Return the rtx-label that corresponds to a LABEL_DECL,
563 creating it if necessary. */
566 label_rtx (label)
567 tree label;
569 if (TREE_CODE (label) != LABEL_DECL)
570 abort ();
572 if (DECL_RTL (label))
573 return DECL_RTL (label);
575 return DECL_RTL (label) = gen_label_rtx ();
578 /* Add an unconditional jump to LABEL as the next sequential instruction. */
580 void
581 emit_jump (label)
582 rtx label;
584 do_pending_stack_adjust ();
585 emit_jump_insn (gen_jump (label));
586 emit_barrier ();
589 /* Emit code to jump to the address
590 specified by the pointer expression EXP. */
592 void
593 expand_computed_goto (exp)
594 tree exp;
596 if (output_bytecode)
598 bc_expand_expr (exp);
599 bc_emit_instruction (jumpP);
601 else
603 rtx x = expand_expr (exp, NULL_RTX, VOIDmode, 0);
605 #ifdef POINTERS_EXTEND_UNSIGNED
606 x = convert_memory_address (Pmode, x);
607 #endif
609 emit_queue ();
610 do_pending_stack_adjust ();
611 emit_indirect_jump (x);
615 /* Handle goto statements and the labels that they can go to. */
617 /* Specify the location in the RTL code of a label LABEL,
618 which is a LABEL_DECL tree node.
620 This is used for the kind of label that the user can jump to with a
621 goto statement, and for alternatives of a switch or case statement.
622 RTL labels generated for loops and conditionals don't go through here;
623 they are generated directly at the RTL level, by other functions below.
625 Note that this has nothing to do with defining label *names*.
626 Languages vary in how they do that and what that even means. */
628 void
629 expand_label (label)
630 tree label;
632 struct label_chain *p;
634 if (output_bytecode)
636 if (! DECL_RTL (label))
637 DECL_RTL (label) = bc_gen_rtx ((char *) 0, 0, bc_get_bytecode_label ());
638 if (! bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (DECL_RTL (label))))
639 error ("multiply defined label");
640 return;
643 do_pending_stack_adjust ();
644 emit_label (label_rtx (label));
645 if (DECL_NAME (label))
646 LABEL_NAME (DECL_RTL (label)) = IDENTIFIER_POINTER (DECL_NAME (label));
648 if (stack_block_stack != 0)
650 p = (struct label_chain *) oballoc (sizeof (struct label_chain));
651 p->next = stack_block_stack->data.block.label_chain;
652 stack_block_stack->data.block.label_chain = p;
653 p->label = label;
657 /* Declare that LABEL (a LABEL_DECL) may be used for nonlocal gotos
658 from nested functions. */
660 void
661 declare_nonlocal_label (label)
662 tree label;
664 nonlocal_labels = tree_cons (NULL_TREE, label, nonlocal_labels);
665 LABEL_PRESERVE_P (label_rtx (label)) = 1;
666 if (nonlocal_goto_handler_slot == 0)
668 nonlocal_goto_handler_slot
669 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
670 emit_stack_save (SAVE_NONLOCAL,
671 &nonlocal_goto_stack_level,
672 PREV_INSN (tail_recursion_reentry));
676 /* Generate RTL code for a `goto' statement with target label LABEL.
677 LABEL should be a LABEL_DECL tree node that was or will later be
678 defined with `expand_label'. */
680 void
681 expand_goto (label)
682 tree label;
684 tree context;
686 if (output_bytecode)
688 expand_goto_internal (label, label_rtx (label), NULL_RTX);
689 return;
692 /* Check for a nonlocal goto to a containing function. */
693 context = decl_function_context (label);
694 if (context != 0 && context != current_function_decl)
696 struct function *p = find_function_data (context);
697 rtx label_ref = gen_rtx (LABEL_REF, Pmode, label_rtx (label));
698 rtx temp;
700 p->has_nonlocal_label = 1;
701 current_function_has_nonlocal_goto = 1;
702 LABEL_REF_NONLOCAL_P (label_ref) = 1;
704 /* Copy the rtl for the slots so that they won't be shared in
705 case the virtual stack vars register gets instantiated differently
706 in the parent than in the child. */
708 #if HAVE_nonlocal_goto
709 if (HAVE_nonlocal_goto)
710 emit_insn (gen_nonlocal_goto (lookup_static_chain (label),
711 copy_rtx (p->nonlocal_goto_handler_slot),
712 copy_rtx (p->nonlocal_goto_stack_level),
713 label_ref));
714 else
715 #endif
717 rtx addr;
719 /* Restore frame pointer for containing function.
720 This sets the actual hard register used for the frame pointer
721 to the location of the function's incoming static chain info.
722 The non-local goto handler will then adjust it to contain the
723 proper value and reload the argument pointer, if needed. */
724 emit_move_insn (hard_frame_pointer_rtx, lookup_static_chain (label));
726 /* We have now loaded the frame pointer hardware register with
727 the address of that corresponds to the start of the virtual
728 stack vars. So replace virtual_stack_vars_rtx in all
729 addresses we use with stack_pointer_rtx. */
731 /* Get addr of containing function's current nonlocal goto handler,
732 which will do any cleanups and then jump to the label. */
733 addr = copy_rtx (p->nonlocal_goto_handler_slot);
734 temp = copy_to_reg (replace_rtx (addr, virtual_stack_vars_rtx,
735 hard_frame_pointer_rtx));
737 /* Restore the stack pointer. Note this uses fp just restored. */
738 addr = p->nonlocal_goto_stack_level;
739 if (addr)
740 addr = replace_rtx (copy_rtx (addr),
741 virtual_stack_vars_rtx,
742 hard_frame_pointer_rtx);
744 emit_stack_restore (SAVE_NONLOCAL, addr, NULL_RTX);
746 /* Put in the static chain register the nonlocal label address. */
747 emit_move_insn (static_chain_rtx, label_ref);
748 /* USE of hard_frame_pointer_rtx added for consistency; not clear if
749 really needed. */
750 emit_insn (gen_rtx (USE, VOIDmode, hard_frame_pointer_rtx));
751 emit_insn (gen_rtx (USE, VOIDmode, stack_pointer_rtx));
752 emit_insn (gen_rtx (USE, VOIDmode, static_chain_rtx));
753 emit_indirect_jump (temp);
756 else
757 expand_goto_internal (label, label_rtx (label), NULL_RTX);
760 /* Generate RTL code for a `goto' statement with target label BODY.
761 LABEL should be a LABEL_REF.
762 LAST_INSN, if non-0, is the rtx we should consider as the last
763 insn emitted (for the purposes of cleaning up a return). */
765 static void
766 expand_goto_internal (body, label, last_insn)
767 tree body;
768 rtx label;
769 rtx last_insn;
771 struct nesting *block;
772 rtx stack_level = 0;
774 /* NOTICE! If a bytecode instruction other than `jump' is needed,
775 then the caller has to call bc_expand_goto_internal()
776 directly. This is rather an exceptional case, and there aren't
777 that many places where this is necessary. */
778 if (output_bytecode)
780 expand_goto_internal (body, label, last_insn);
781 return;
784 if (GET_CODE (label) != CODE_LABEL)
785 abort ();
787 /* If label has already been defined, we can tell now
788 whether and how we must alter the stack level. */
790 if (PREV_INSN (label) != 0)
792 /* Find the innermost pending block that contains the label.
793 (Check containment by comparing insn-uids.)
794 Then restore the outermost stack level within that block,
795 and do cleanups of all blocks contained in it. */
796 for (block = block_stack; block; block = block->next)
798 if (INSN_UID (block->data.block.first_insn) < INSN_UID (label))
799 break;
800 if (block->data.block.stack_level != 0)
801 stack_level = block->data.block.stack_level;
802 /* Execute the cleanups for blocks we are exiting. */
803 if (block->data.block.cleanups != 0)
805 expand_cleanups (block->data.block.cleanups, NULL_TREE, 1, 1);
806 do_pending_stack_adjust ();
810 if (stack_level)
812 /* Ensure stack adjust isn't done by emit_jump, as this
813 would clobber the stack pointer. This one should be
814 deleted as dead by flow. */
815 clear_pending_stack_adjust ();
816 do_pending_stack_adjust ();
817 emit_stack_restore (SAVE_BLOCK, stack_level, NULL_RTX);
820 if (body != 0 && DECL_TOO_LATE (body))
821 error ("jump to `%s' invalidly jumps into binding contour",
822 IDENTIFIER_POINTER (DECL_NAME (body)));
824 /* Label not yet defined: may need to put this goto
825 on the fixup list. */
826 else if (! expand_fixup (body, label, last_insn))
828 /* No fixup needed. Record that the label is the target
829 of at least one goto that has no fixup. */
830 if (body != 0)
831 TREE_ADDRESSABLE (body) = 1;
834 emit_jump (label);
837 /* Generate a jump with OPCODE to the given bytecode LABEL which is
838 found within BODY. */
840 static void
841 bc_expand_goto_internal (opcode, label, body)
842 enum bytecode_opcode opcode;
843 struct bc_label *label;
844 tree body;
846 struct nesting *block;
847 int stack_level = -1;
849 /* If the label is defined, adjust the stack as necessary.
850 If it's not defined, we have to push the reference on the
851 fixup list. */
853 if (label->defined)
856 /* Find the innermost pending block that contains the label.
857 (Check containment by comparing bytecode uids.) Then restore the
858 outermost stack level within that block. */
860 for (block = block_stack; block; block = block->next)
862 if (BYTECODE_BC_LABEL (block->data.block.first_insn)->uid < label->uid)
863 break;
864 if (block->data.block.bc_stack_level)
865 stack_level = block->data.block.bc_stack_level;
867 /* Execute the cleanups for blocks we are exiting. */
868 if (block->data.block.cleanups != 0)
870 expand_cleanups (block->data.block.cleanups, NULL_TREE, 1, 1);
871 do_pending_stack_adjust ();
875 /* Restore the stack level. If we need to adjust the stack, we
876 must do so after the jump, since the jump may depend on
877 what's on the stack. Thus, any stack-modifying conditional
878 jumps (these are the only ones that rely on what's on the
879 stack) go into the fixup list. */
881 if (stack_level >= 0
882 && stack_depth != stack_level
883 && opcode != jump)
885 bc_expand_fixup (opcode, label, stack_level);
886 else
888 if (stack_level >= 0)
889 bc_adjust_stack (stack_depth - stack_level);
891 if (body && DECL_BIT_FIELD (body))
892 error ("jump to `%s' invalidly jumps into binding contour",
893 IDENTIFIER_POINTER (DECL_NAME (body)));
895 /* Emit immediate jump */
896 bc_emit_bytecode (opcode);
897 bc_emit_bytecode_labelref (label);
899 #ifdef DEBUG_PRINT_CODE
900 fputc ('\n', stderr);
901 #endif
904 else
905 /* Put goto in the fixup list */
906 bc_expand_fixup (opcode, label, stack_level);
909 /* Generate if necessary a fixup for a goto
910 whose target label in tree structure (if any) is TREE_LABEL
911 and whose target in rtl is RTL_LABEL.
913 If LAST_INSN is nonzero, we pretend that the jump appears
914 after insn LAST_INSN instead of at the current point in the insn stream.
916 The fixup will be used later to insert insns just before the goto.
917 Those insns will restore the stack level as appropriate for the
918 target label, and will (in the case of C++) also invoke any object
919 destructors which have to be invoked when we exit the scopes which
920 are exited by the goto.
922 Value is nonzero if a fixup is made. */
924 static int
925 expand_fixup (tree_label, rtl_label, last_insn)
926 tree tree_label;
927 rtx rtl_label;
928 rtx last_insn;
930 struct nesting *block, *end_block;
932 /* See if we can recognize which block the label will be output in.
933 This is possible in some very common cases.
934 If we succeed, set END_BLOCK to that block.
935 Otherwise, set it to 0. */
937 if (cond_stack
938 && (rtl_label == cond_stack->data.cond.endif_label
939 || rtl_label == cond_stack->data.cond.next_label))
940 end_block = cond_stack;
941 /* If we are in a loop, recognize certain labels which
942 are likely targets. This reduces the number of fixups
943 we need to create. */
944 else if (loop_stack
945 && (rtl_label == loop_stack->data.loop.start_label
946 || rtl_label == loop_stack->data.loop.end_label
947 || rtl_label == loop_stack->data.loop.continue_label))
948 end_block = loop_stack;
949 else
950 end_block = 0;
952 /* Now set END_BLOCK to the binding level to which we will return. */
954 if (end_block)
956 struct nesting *next_block = end_block->all;
957 block = block_stack;
959 /* First see if the END_BLOCK is inside the innermost binding level.
960 If so, then no cleanups or stack levels are relevant. */
961 while (next_block && next_block != block)
962 next_block = next_block->all;
964 if (next_block)
965 return 0;
967 /* Otherwise, set END_BLOCK to the innermost binding level
968 which is outside the relevant control-structure nesting. */
969 next_block = block_stack->next;
970 for (block = block_stack; block != end_block; block = block->all)
971 if (block == next_block)
972 next_block = next_block->next;
973 end_block = next_block;
976 /* Does any containing block have a stack level or cleanups?
977 If not, no fixup is needed, and that is the normal case
978 (the only case, for standard C). */
979 for (block = block_stack; block != end_block; block = block->next)
980 if (block->data.block.stack_level != 0
981 || block->data.block.cleanups != 0)
982 break;
984 if (block != end_block)
986 /* Ok, a fixup is needed. Add a fixup to the list of such. */
987 struct goto_fixup *fixup
988 = (struct goto_fixup *) oballoc (sizeof (struct goto_fixup));
989 /* In case an old stack level is restored, make sure that comes
990 after any pending stack adjust. */
991 /* ?? If the fixup isn't to come at the present position,
992 doing the stack adjust here isn't useful. Doing it with our
993 settings at that location isn't useful either. Let's hope
994 someone does it! */
995 if (last_insn == 0)
996 do_pending_stack_adjust ();
997 fixup->target = tree_label;
998 fixup->target_rtl = rtl_label;
1000 /* Create a BLOCK node and a corresponding matched set of
1001 NOTE_INSN_BEGIN_BLOCK and NOTE_INSN_END_BLOCK notes at
1002 this point. The notes will encapsulate any and all fixup
1003 code which we might later insert at this point in the insn
1004 stream. Also, the BLOCK node will be the parent (i.e. the
1005 `SUPERBLOCK') of any other BLOCK nodes which we might create
1006 later on when we are expanding the fixup code. */
1009 register rtx original_before_jump
1010 = last_insn ? last_insn : get_last_insn ();
1012 start_sequence ();
1013 pushlevel (0);
1014 fixup->before_jump = emit_note (NULL_PTR, NOTE_INSN_BLOCK_BEG);
1015 last_block_end_note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_END);
1016 fixup->context = poplevel (1, 0, 0); /* Create the BLOCK node now! */
1017 end_sequence ();
1018 emit_insns_after (fixup->before_jump, original_before_jump);
1021 fixup->block_start_count = block_start_count;
1022 fixup->stack_level = 0;
1023 fixup->cleanup_list_list
1024 = (((block->data.block.outer_cleanups
1025 #if 0
1026 && block->data.block.outer_cleanups != empty_cleanup_list
1027 #endif
1029 || block->data.block.cleanups)
1030 ? tree_cons (NULL_TREE, block->data.block.cleanups,
1031 block->data.block.outer_cleanups)
1032 : 0);
1033 fixup->next = goto_fixup_chain;
1034 goto_fixup_chain = fixup;
1037 return block != 0;
1041 /* Generate bytecode jump with OPCODE to a fixup routine that links to LABEL.
1042 Make the fixup restore the stack level to STACK_LEVEL. */
1044 static void
1045 bc_expand_fixup (opcode, label, stack_level)
1046 enum bytecode_opcode opcode;
1047 struct bc_label *label;
1048 int stack_level;
1050 struct goto_fixup *fixup
1051 = (struct goto_fixup *) oballoc (sizeof (struct goto_fixup));
1053 fixup->label = bc_get_bytecode_label ();
1054 fixup->bc_target = label;
1055 fixup->bc_stack_level = stack_level;
1056 fixup->bc_handled = FALSE;
1058 fixup->next = goto_fixup_chain;
1059 goto_fixup_chain = fixup;
1061 /* Insert a jump to the fixup code */
1062 bc_emit_bytecode (opcode);
1063 bc_emit_bytecode_labelref (fixup->label);
1065 #ifdef DEBUG_PRINT_CODE
1066 fputc ('\n', stderr);
1067 #endif
1070 /* Expand any needed fixups in the outputmost binding level of the
1071 function. FIRST_INSN is the first insn in the function. */
1073 void
1074 expand_fixups (first_insn)
1075 rtx first_insn;
1077 fixup_gotos (NULL_PTR, NULL_RTX, NULL_TREE, first_insn, 0);
1080 /* When exiting a binding contour, process all pending gotos requiring fixups.
1081 THISBLOCK is the structure that describes the block being exited.
1082 STACK_LEVEL is the rtx for the stack level to restore exiting this contour.
1083 CLEANUP_LIST is a list of expressions to evaluate on exiting this contour.
1084 FIRST_INSN is the insn that began this contour.
1086 Gotos that jump out of this contour must restore the
1087 stack level and do the cleanups before actually jumping.
1089 DONT_JUMP_IN nonzero means report error there is a jump into this
1090 contour from before the beginning of the contour.
1091 This is also done if STACK_LEVEL is nonzero. */
1093 static void
1094 fixup_gotos (thisblock, stack_level, cleanup_list, first_insn, dont_jump_in)
1095 struct nesting *thisblock;
1096 rtx stack_level;
1097 tree cleanup_list;
1098 rtx first_insn;
1099 int dont_jump_in;
1101 register struct goto_fixup *f, *prev;
1103 if (output_bytecode)
1105 /* ??? The second arg is the bc stack level, which is not the same
1106 as STACK_LEVEL. I have no idea what should go here, so I'll
1107 just pass 0. */
1108 bc_fixup_gotos (thisblock, 0, cleanup_list, first_insn, dont_jump_in);
1109 return;
1112 /* F is the fixup we are considering; PREV is the previous one. */
1113 /* We run this loop in two passes so that cleanups of exited blocks
1114 are run first, and blocks that are exited are marked so
1115 afterwards. */
1117 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1119 /* Test for a fixup that is inactive because it is already handled. */
1120 if (f->before_jump == 0)
1122 /* Delete inactive fixup from the chain, if that is easy to do. */
1123 if (prev != 0)
1124 prev->next = f->next;
1126 /* Has this fixup's target label been defined?
1127 If so, we can finalize it. */
1128 else if (PREV_INSN (f->target_rtl) != 0)
1130 register rtx cleanup_insns;
1132 /* Get the first non-label after the label
1133 this goto jumps to. If that's before this scope begins,
1134 we don't have a jump into the scope. */
1135 rtx after_label = f->target_rtl;
1136 while (after_label != 0 && GET_CODE (after_label) == CODE_LABEL)
1137 after_label = NEXT_INSN (after_label);
1139 /* If this fixup jumped into this contour from before the beginning
1140 of this contour, report an error. */
1141 /* ??? Bug: this does not detect jumping in through intermediate
1142 blocks that have stack levels or cleanups.
1143 It detects only a problem with the innermost block
1144 around the label. */
1145 if (f->target != 0
1146 && (dont_jump_in || stack_level || cleanup_list)
1147 /* If AFTER_LABEL is 0, it means the jump goes to the end
1148 of the rtl, which means it jumps into this scope. */
1149 && (after_label == 0
1150 || INSN_UID (first_insn) < INSN_UID (after_label))
1151 && INSN_UID (first_insn) > INSN_UID (f->before_jump)
1152 && ! DECL_ERROR_ISSUED (f->target))
1154 error_with_decl (f->target,
1155 "label `%s' used before containing binding contour");
1156 /* Prevent multiple errors for one label. */
1157 DECL_ERROR_ISSUED (f->target) = 1;
1160 /* We will expand the cleanups into a sequence of their own and
1161 then later on we will attach this new sequence to the insn
1162 stream just ahead of the actual jump insn. */
1164 start_sequence ();
1166 /* Temporarily restore the lexical context where we will
1167 logically be inserting the fixup code. We do this for the
1168 sake of getting the debugging information right. */
1170 pushlevel (0);
1171 set_block (f->context);
1173 /* Expand the cleanups for blocks this jump exits. */
1174 if (f->cleanup_list_list)
1176 tree lists;
1177 for (lists = f->cleanup_list_list; lists; lists = TREE_CHAIN (lists))
1178 /* Marked elements correspond to blocks that have been closed.
1179 Do their cleanups. */
1180 if (TREE_ADDRESSABLE (lists)
1181 && TREE_VALUE (lists) != 0)
1183 expand_cleanups (TREE_VALUE (lists), NULL_TREE, 1, 1);
1184 /* Pop any pushes done in the cleanups,
1185 in case function is about to return. */
1186 do_pending_stack_adjust ();
1190 /* Restore stack level for the biggest contour that this
1191 jump jumps out of. */
1192 if (f->stack_level)
1193 emit_stack_restore (SAVE_BLOCK, f->stack_level, f->before_jump);
1195 /* Finish up the sequence containing the insns which implement the
1196 necessary cleanups, and then attach that whole sequence to the
1197 insn stream just ahead of the actual jump insn. Attaching it
1198 at that point insures that any cleanups which are in fact
1199 implicit C++ object destructions (which must be executed upon
1200 leaving the block) appear (to the debugger) to be taking place
1201 in an area of the generated code where the object(s) being
1202 destructed are still "in scope". */
1204 cleanup_insns = get_insns ();
1205 poplevel (1, 0, 0);
1207 end_sequence ();
1208 emit_insns_after (cleanup_insns, f->before_jump);
1211 f->before_jump = 0;
1215 /* For any still-undefined labels, do the cleanups for this block now.
1216 We must do this now since items in the cleanup list may go out
1217 of scope when the block ends. */
1218 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1219 if (f->before_jump != 0
1220 && PREV_INSN (f->target_rtl) == 0
1221 /* Label has still not appeared. If we are exiting a block with
1222 a stack level to restore, that started before the fixup,
1223 mark this stack level as needing restoration
1224 when the fixup is later finalized. */
1225 && thisblock != 0
1226 /* Note: if THISBLOCK == 0 and we have a label that hasn't appeared, it
1227 means the label is undefined. That's erroneous, but possible. */
1228 && (thisblock->data.block.block_start_count
1229 <= f->block_start_count))
1231 tree lists = f->cleanup_list_list;
1232 rtx cleanup_insns;
1234 for (; lists; lists = TREE_CHAIN (lists))
1235 /* If the following elt. corresponds to our containing block
1236 then the elt. must be for this block. */
1237 if (TREE_CHAIN (lists) == thisblock->data.block.outer_cleanups)
1239 start_sequence ();
1240 pushlevel (0);
1241 set_block (f->context);
1242 expand_cleanups (TREE_VALUE (lists), NULL_TREE, 1, 1);
1243 do_pending_stack_adjust ();
1244 cleanup_insns = get_insns ();
1245 poplevel (1, 0, 0);
1246 end_sequence ();
1247 if (cleanup_insns != 0)
1248 f->before_jump
1249 = emit_insns_after (cleanup_insns, f->before_jump);
1251 f->cleanup_list_list = TREE_CHAIN (lists);
1254 if (stack_level)
1255 f->stack_level = stack_level;
1260 /* When exiting a binding contour, process all pending gotos requiring fixups.
1261 Note: STACK_DEPTH is not altered.
1263 The arguments are currently not used in the bytecode compiler, but we may
1264 need them one day for languages other than C.
1266 THISBLOCK is the structure that describes the block being exited.
1267 STACK_LEVEL is the rtx for the stack level to restore exiting this contour.
1268 CLEANUP_LIST is a list of expressions to evaluate on exiting this contour.
1269 FIRST_INSN is the insn that began this contour.
1271 Gotos that jump out of this contour must restore the
1272 stack level and do the cleanups before actually jumping.
1274 DONT_JUMP_IN nonzero means report error there is a jump into this
1275 contour from before the beginning of the contour.
1276 This is also done if STACK_LEVEL is nonzero. */
1278 static void
1279 bc_fixup_gotos (thisblock, stack_level, cleanup_list, first_insn, dont_jump_in)
1280 struct nesting *thisblock;
1281 int stack_level;
1282 tree cleanup_list;
1283 rtx first_insn;
1284 int dont_jump_in;
1286 register struct goto_fixup *f, *prev;
1287 int saved_stack_depth;
1289 /* F is the fixup we are considering; PREV is the previous one. */
1291 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1293 /* Test for a fixup that is inactive because it is already handled. */
1294 if (f->before_jump == 0)
1296 /* Delete inactive fixup from the chain, if that is easy to do. */
1297 if (prev)
1298 prev->next = f->next;
1301 /* Emit code to restore the stack and continue */
1302 bc_emit_bytecode_labeldef (f->label);
1304 /* Save stack_depth across call, since bc_adjust_stack () will alter
1305 the perceived stack depth via the instructions generated. */
1307 if (f->bc_stack_level >= 0)
1309 saved_stack_depth = stack_depth;
1310 bc_adjust_stack (stack_depth - f->bc_stack_level);
1311 stack_depth = saved_stack_depth;
1314 bc_emit_bytecode (jump);
1315 bc_emit_bytecode_labelref (f->bc_target);
1317 #ifdef DEBUG_PRINT_CODE
1318 fputc ('\n', stderr);
1319 #endif
1322 goto_fixup_chain = NULL;
1325 /* Generate RTL for an asm statement (explicit assembler code).
1326 BODY is a STRING_CST node containing the assembler code text,
1327 or an ADDR_EXPR containing a STRING_CST. */
1329 void
1330 expand_asm (body)
1331 tree body;
1333 if (output_bytecode)
1335 error ("`asm' is invalid when generating bytecode");
1336 return;
1339 if (TREE_CODE (body) == ADDR_EXPR)
1340 body = TREE_OPERAND (body, 0);
1342 emit_insn (gen_rtx (ASM_INPUT, VOIDmode,
1343 TREE_STRING_POINTER (body)));
1344 last_expr_type = 0;
1347 /* Generate RTL for an asm statement with arguments.
1348 STRING is the instruction template.
1349 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
1350 Each output or input has an expression in the TREE_VALUE and
1351 a constraint-string in the TREE_PURPOSE.
1352 CLOBBERS is a list of STRING_CST nodes each naming a hard register
1353 that is clobbered by this insn.
1355 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
1356 Some elements of OUTPUTS may be replaced with trees representing temporary
1357 values. The caller should copy those temporary values to the originally
1358 specified lvalues.
1360 VOL nonzero means the insn is volatile; don't optimize it. */
1362 void
1363 expand_asm_operands (string, outputs, inputs, clobbers, vol, filename, line)
1364 tree string, outputs, inputs, clobbers;
1365 int vol;
1366 char *filename;
1367 int line;
1369 rtvec argvec, constraints;
1370 rtx body;
1371 int ninputs = list_length (inputs);
1372 int noutputs = list_length (outputs);
1373 int ninout = 0;
1374 int nclobbers;
1375 tree tail;
1376 register int i;
1377 /* Vector of RTX's of evaluated output operands. */
1378 rtx *output_rtx = (rtx *) alloca (noutputs * sizeof (rtx));
1379 int *inout_opnum = (int *) alloca (noutputs * sizeof (int));
1380 enum machine_mode *inout_mode
1381 = (enum machine_mode *) alloca (noutputs * sizeof (enum machine_mode));
1382 /* The insn we have emitted. */
1383 rtx insn;
1385 if (output_bytecode)
1387 error ("`asm' is invalid when generating bytecode");
1388 return;
1391 /* Count the number of meaningful clobbered registers, ignoring what
1392 we would ignore later. */
1393 nclobbers = 0;
1394 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1396 char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1397 i = decode_reg_name (regname);
1398 if (i >= 0 || i == -4)
1399 ++nclobbers;
1400 else if (i == -2)
1401 error ("unknown register name `%s' in `asm'", regname);
1404 last_expr_type = 0;
1406 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1408 tree val = TREE_VALUE (tail);
1409 tree type = TREE_TYPE (val);
1410 tree val1;
1411 int j;
1412 int found_equal = 0;
1413 int found_plus = 0;
1414 int allows_reg = 0;
1416 /* If there's an erroneous arg, emit no insn. */
1417 if (TREE_TYPE (val) == error_mark_node)
1418 return;
1420 /* Make sure constraint has `=' and does not have `+'. Also, see
1421 if it allows any register. Be liberal on the latter test, since
1422 the worst that happens if we get it wrong is we issue an error
1423 message. */
1425 for (j = 0; j < TREE_STRING_LENGTH (TREE_PURPOSE (tail)) - 1; j++)
1426 switch (TREE_STRING_POINTER (TREE_PURPOSE (tail))[j])
1428 case '+':
1429 /* Make sure we can specify the matching operand. */
1430 if (i > 9)
1432 error ("output operand constraint %d contains `+'", i);
1433 return;
1436 /* Replace '+' with '='. */
1437 TREE_STRING_POINTER (TREE_PURPOSE (tail))[j] = '=';
1438 found_plus = 1;
1439 break;
1441 case '=':
1442 found_equal = 1;
1443 break;
1445 case '?': case '!': case '*': case '%': case '&':
1446 case 'V': case 'm': case 'o': case '<': case '>':
1447 case 'E': case 'F': case 'G': case 'H': case 'X':
1448 case 's': case 'i': case 'n':
1449 case 'I': case 'J': case 'K': case 'L': case 'M':
1450 case 'N': case 'O': case 'P': case ',':
1451 #ifdef EXTRA_CONSTRAINT
1452 case 'Q': case 'R': case 'S': case 'T': case 'U':
1453 #endif
1454 break;
1456 case '0': case '1': case '2': case '3': case '4':
1457 case '5': case '6': case '7': case '8': case '9':
1458 error ("matching constraint not valid in output operand");
1459 break;
1461 case 'p': case 'g': case 'r':
1462 default:
1463 allows_reg = 1;
1464 break;
1467 if (! found_equal && ! found_plus)
1469 error ("output operand constraint lacks `='");
1470 return;
1473 /* If an output operand is not a decl or indirect ref and our constraint
1474 allows a register, make a temporary to act as an intermediate.
1475 Make the asm insn write into that, then our caller will copy it to
1476 the real output operand. Likewise for promoted variables. */
1478 if (TREE_CODE (val) == INDIRECT_REF
1479 || (TREE_CODE_CLASS (TREE_CODE (val)) == 'd'
1480 && ! (GET_CODE (DECL_RTL (val)) == REG
1481 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
1482 || ! allows_reg
1483 || found_plus)
1485 if (! allows_reg)
1486 mark_addressable (TREE_VALUE (tail));
1488 output_rtx[i]
1489 = expand_expr (TREE_VALUE (tail), NULL_RTX, VOIDmode, 0);
1491 if (! allows_reg && GET_CODE (output_rtx[i]) != MEM)
1492 error ("output number %d not directly addressable", i);
1494 else
1496 output_rtx[i] = assign_temp (type, 0, 0, 0);
1497 TREE_VALUE (tail) = make_tree (type, output_rtx[i]);
1500 if (found_plus)
1502 inout_mode[ninout] = TYPE_MODE (TREE_TYPE (TREE_VALUE (tail)));
1503 inout_opnum[ninout++] = i;
1507 ninputs += ninout;
1508 if (ninputs + noutputs > MAX_RECOG_OPERANDS)
1510 error ("more than %d operands in `asm'", MAX_RECOG_OPERANDS);
1511 return;
1514 /* Make vectors for the expression-rtx and constraint strings. */
1516 argvec = rtvec_alloc (ninputs);
1517 constraints = rtvec_alloc (ninputs);
1519 body = gen_rtx (ASM_OPERANDS, VOIDmode,
1520 TREE_STRING_POINTER (string), "", 0, argvec, constraints,
1521 filename, line);
1522 MEM_VOLATILE_P (body) = vol;
1524 /* Eval the inputs and put them into ARGVEC.
1525 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
1527 i = 0;
1528 for (tail = inputs; tail; tail = TREE_CHAIN (tail))
1530 int j;
1531 int allows_reg = 0;
1533 /* If there's an erroneous arg, emit no insn,
1534 because the ASM_INPUT would get VOIDmode
1535 and that could cause a crash in reload. */
1536 if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
1537 return;
1538 if (TREE_PURPOSE (tail) == NULL_TREE)
1540 error ("hard register `%s' listed as input operand to `asm'",
1541 TREE_STRING_POINTER (TREE_VALUE (tail)) );
1542 return;
1545 /* Make sure constraint has neither `=' nor `+'. */
1547 for (j = 0; j < TREE_STRING_LENGTH (TREE_PURPOSE (tail)) - 1; j++)
1548 switch (TREE_STRING_POINTER (TREE_PURPOSE (tail))[j])
1550 case '+': case '=':
1551 error ("input operand constraint contains `%c'",
1552 TREE_STRING_POINTER (TREE_PURPOSE (tail))[j]);
1553 return;
1555 case '?': case '!': case '*': case '%': case '&':
1556 case 'V': case 'm': case 'o': case '<': case '>':
1557 case 'E': case 'F': case 'G': case 'H': case 'X':
1558 case 's': case 'i': case 'n':
1559 case 'I': case 'J': case 'K': case 'L': case 'M':
1560 case 'N': case 'O': case 'P': case ',':
1561 #ifdef EXTRA_CONSTRAINT
1562 case 'Q': case 'R': case 'S': case 'T': case 'U':
1563 #endif
1564 break;
1566 /* Whether or not a numeric constraint allows a register is
1567 decided by the matching constraint, and so there is no need
1568 to do anything special with them. We must handle them in
1569 the default case, so that we don't unnecessarily force
1570 operands to memory. */
1571 case '0': case '1': case '2': case '3': case '4':
1572 case '5': case '6': case '7': case '8': case '9':
1573 if (TREE_STRING_POINTER (TREE_PURPOSE (tail))[j]
1574 >= '0' + noutputs)
1575 error ("matching constraint references invalid operand number");
1577 /* ... fall through ... */
1579 case 'p': case 'g': case 'r':
1580 default:
1581 allows_reg = 1;
1582 break;
1585 if (! allows_reg)
1586 mark_addressable (TREE_VALUE (tail));
1588 XVECEXP (body, 3, i) /* argvec */
1589 = expand_expr (TREE_VALUE (tail), NULL_RTX, VOIDmode, 0);
1590 if (CONSTANT_P (XVECEXP (body, 3, i))
1591 && ! general_operand (XVECEXP (body, 3, i),
1592 TYPE_MODE (TREE_TYPE (TREE_VALUE (tail)))))
1594 if (allows_reg)
1595 XVECEXP (body, 3, i)
1596 = force_reg (TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))),
1597 XVECEXP (body, 3, i));
1598 else
1599 XVECEXP (body, 3, i)
1600 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))),
1601 XVECEXP (body, 3, i));
1604 if (! allows_reg
1605 && (GET_CODE (XVECEXP (body, 3, i)) == REG
1606 || GET_CODE (XVECEXP (body, 3, i)) == SUBREG
1607 || GET_CODE (XVECEXP (body, 3, i)) == CONCAT))
1609 tree type = TREE_TYPE (TREE_VALUE (tail));
1610 rtx memloc = assign_temp (type, 1, 1, 1);
1612 emit_move_insn (memloc, XVECEXP (body, 3, i));
1613 XVECEXP (body, 3, i) = memloc;
1616 XVECEXP (body, 4, i) /* constraints */
1617 = gen_rtx (ASM_INPUT, TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))),
1618 TREE_STRING_POINTER (TREE_PURPOSE (tail)));
1619 i++;
1622 /* Protect all the operands from the queue,
1623 now that they have all been evaluated. */
1625 for (i = 0; i < ninputs - ninout; i++)
1626 XVECEXP (body, 3, i) = protect_from_queue (XVECEXP (body, 3, i), 0);
1628 for (i = 0; i < noutputs; i++)
1629 output_rtx[i] = protect_from_queue (output_rtx[i], 1);
1631 /* For in-out operands, copy output rtx to input rtx. */
1632 for (i = 0; i < ninout; i++)
1634 static char match[9+1][2]
1635 = {"0", "1", "2", "3", "4", "5", "6", "7", "8", "9"};
1636 int j = inout_opnum[i];
1638 XVECEXP (body, 3, ninputs - ninout + i) /* argvec */
1639 = output_rtx[j];
1640 XVECEXP (body, 4, ninputs - ninout + i) /* constraints */
1641 = gen_rtx (ASM_INPUT, inout_mode[j], match[j]);
1644 /* Now, for each output, construct an rtx
1645 (set OUTPUT (asm_operands INSN OUTPUTNUMBER OUTPUTCONSTRAINT
1646 ARGVEC CONSTRAINTS))
1647 If there is more than one, put them inside a PARALLEL. */
1649 if (noutputs == 1 && nclobbers == 0)
1651 XSTR (body, 1) = TREE_STRING_POINTER (TREE_PURPOSE (outputs));
1652 insn = emit_insn (gen_rtx (SET, VOIDmode, output_rtx[0], body));
1654 else if (noutputs == 0 && nclobbers == 0)
1656 /* No output operands: put in a raw ASM_OPERANDS rtx. */
1657 insn = emit_insn (body);
1659 else
1661 rtx obody = body;
1662 int num = noutputs;
1663 if (num == 0) num = 1;
1664 body = gen_rtx (PARALLEL, VOIDmode, rtvec_alloc (num + nclobbers));
1666 /* For each output operand, store a SET. */
1668 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1670 XVECEXP (body, 0, i)
1671 = gen_rtx (SET, VOIDmode,
1672 output_rtx[i],
1673 gen_rtx (ASM_OPERANDS, VOIDmode,
1674 TREE_STRING_POINTER (string),
1675 TREE_STRING_POINTER (TREE_PURPOSE (tail)),
1676 i, argvec, constraints,
1677 filename, line));
1678 MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
1681 /* If there are no outputs (but there are some clobbers)
1682 store the bare ASM_OPERANDS into the PARALLEL. */
1684 if (i == 0)
1685 XVECEXP (body, 0, i++) = obody;
1687 /* Store (clobber REG) for each clobbered register specified. */
1689 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1691 char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1692 int j = decode_reg_name (regname);
1694 if (j < 0)
1696 if (j == -3) /* `cc', which is not a register */
1697 continue;
1699 if (j == -4) /* `memory', don't cache memory across asm */
1701 XVECEXP (body, 0, i++)
1702 = gen_rtx (CLOBBER, VOIDmode,
1703 gen_rtx (MEM, BLKmode,
1704 gen_rtx (SCRATCH, VOIDmode, 0)));
1705 continue;
1708 /* Ignore unknown register, error already signalled. */
1709 continue;
1712 /* Use QImode since that's guaranteed to clobber just one reg. */
1713 XVECEXP (body, 0, i++)
1714 = gen_rtx (CLOBBER, VOIDmode, gen_rtx (REG, QImode, j));
1717 insn = emit_insn (body);
1720 free_temp_slots ();
1723 /* Generate RTL to evaluate the expression EXP
1724 and remember it in case this is the VALUE in a ({... VALUE; }) constr. */
1726 void
1727 expand_expr_stmt (exp)
1728 tree exp;
1730 if (output_bytecode)
1732 int org_stack_depth = stack_depth;
1734 bc_expand_expr (exp);
1736 /* Restore stack depth */
1737 if (stack_depth < org_stack_depth)
1738 abort ();
1740 bc_emit_instruction (drop);
1742 last_expr_type = TREE_TYPE (exp);
1743 return;
1746 /* If -W, warn about statements with no side effects,
1747 except for an explicit cast to void (e.g. for assert()), and
1748 except inside a ({...}) where they may be useful. */
1749 if (expr_stmts_for_value == 0 && exp != error_mark_node)
1751 if (! TREE_SIDE_EFFECTS (exp) && (extra_warnings || warn_unused)
1752 && !(TREE_CODE (exp) == CONVERT_EXPR
1753 && TREE_TYPE (exp) == void_type_node))
1754 warning_with_file_and_line (emit_filename, emit_lineno,
1755 "statement with no effect");
1756 else if (warn_unused)
1757 warn_if_unused_value (exp);
1760 /* If EXP is of function type and we are expanding statements for
1761 value, convert it to pointer-to-function. */
1762 if (expr_stmts_for_value && TREE_CODE (TREE_TYPE (exp)) == FUNCTION_TYPE)
1763 exp = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (exp)), exp);
1765 last_expr_type = TREE_TYPE (exp);
1766 if (! flag_syntax_only)
1767 last_expr_value = expand_expr (exp,
1768 (expr_stmts_for_value
1769 ? NULL_RTX : const0_rtx),
1770 VOIDmode, 0);
1772 /* If all we do is reference a volatile value in memory,
1773 copy it to a register to be sure it is actually touched. */
1774 if (last_expr_value != 0 && GET_CODE (last_expr_value) == MEM
1775 && TREE_THIS_VOLATILE (exp))
1777 if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode)
1779 else if (TYPE_MODE (TREE_TYPE (exp)) != BLKmode)
1780 copy_to_reg (last_expr_value);
1781 else
1783 rtx lab = gen_label_rtx ();
1785 /* Compare the value with itself to reference it. */
1786 emit_cmp_insn (last_expr_value, last_expr_value, EQ,
1787 expand_expr (TYPE_SIZE (last_expr_type),
1788 NULL_RTX, VOIDmode, 0),
1789 BLKmode, 0,
1790 TYPE_ALIGN (last_expr_type) / BITS_PER_UNIT);
1791 emit_jump_insn ((*bcc_gen_fctn[(int) EQ]) (lab));
1792 emit_label (lab);
1796 /* If this expression is part of a ({...}) and is in memory, we may have
1797 to preserve temporaries. */
1798 preserve_temp_slots (last_expr_value);
1800 /* Free any temporaries used to evaluate this expression. Any temporary
1801 used as a result of this expression will already have been preserved
1802 above. */
1803 free_temp_slots ();
1805 emit_queue ();
1808 /* Warn if EXP contains any computations whose results are not used.
1809 Return 1 if a warning is printed; 0 otherwise. */
1812 warn_if_unused_value (exp)
1813 tree exp;
1815 if (TREE_USED (exp))
1816 return 0;
1818 switch (TREE_CODE (exp))
1820 case PREINCREMENT_EXPR:
1821 case POSTINCREMENT_EXPR:
1822 case PREDECREMENT_EXPR:
1823 case POSTDECREMENT_EXPR:
1824 case MODIFY_EXPR:
1825 case INIT_EXPR:
1826 case TARGET_EXPR:
1827 case CALL_EXPR:
1828 case METHOD_CALL_EXPR:
1829 case RTL_EXPR:
1830 case WITH_CLEANUP_EXPR:
1831 case EXIT_EXPR:
1832 /* We don't warn about COND_EXPR because it may be a useful
1833 construct if either arm contains a side effect. */
1834 case COND_EXPR:
1835 return 0;
1837 case BIND_EXPR:
1838 /* For a binding, warn if no side effect within it. */
1839 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1841 case SAVE_EXPR:
1842 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1844 case TRUTH_ORIF_EXPR:
1845 case TRUTH_ANDIF_EXPR:
1846 /* In && or ||, warn if 2nd operand has no side effect. */
1847 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1849 case COMPOUND_EXPR:
1850 if (TREE_NO_UNUSED_WARNING (exp))
1851 return 0;
1852 if (warn_if_unused_value (TREE_OPERAND (exp, 0)))
1853 return 1;
1854 /* Let people do `(foo (), 0)' without a warning. */
1855 if (TREE_CONSTANT (TREE_OPERAND (exp, 1)))
1856 return 0;
1857 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1859 case NOP_EXPR:
1860 case CONVERT_EXPR:
1861 case NON_LVALUE_EXPR:
1862 /* Don't warn about values cast to void. */
1863 if (TREE_TYPE (exp) == void_type_node)
1864 return 0;
1865 /* Don't warn about conversions not explicit in the user's program. */
1866 if (TREE_NO_UNUSED_WARNING (exp))
1867 return 0;
1868 /* Assignment to a cast usually results in a cast of a modify.
1869 Don't complain about that. There can be an arbitrary number of
1870 casts before the modify, so we must loop until we find the first
1871 non-cast expression and then test to see if that is a modify. */
1873 tree tem = TREE_OPERAND (exp, 0);
1875 while (TREE_CODE (tem) == CONVERT_EXPR || TREE_CODE (tem) == NOP_EXPR)
1876 tem = TREE_OPERAND (tem, 0);
1878 if (TREE_CODE (tem) == MODIFY_EXPR || TREE_CODE (tem) == INIT_EXPR
1879 || TREE_CODE (tem) == CALL_EXPR)
1880 return 0;
1882 goto warn;
1884 case INDIRECT_REF:
1885 /* Don't warn about automatic dereferencing of references, since
1886 the user cannot control it. */
1887 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == REFERENCE_TYPE)
1888 return warn_if_unused_value (TREE_OPERAND (exp, 0));
1889 /* ... fall through ... */
1891 default:
1892 /* Referencing a volatile value is a side effect, so don't warn. */
1893 if ((TREE_CODE_CLASS (TREE_CODE (exp)) == 'd'
1894 || TREE_CODE_CLASS (TREE_CODE (exp)) == 'r')
1895 && TREE_THIS_VOLATILE (exp))
1896 return 0;
1897 warn:
1898 warning_with_file_and_line (emit_filename, emit_lineno,
1899 "value computed is not used");
1900 return 1;
1904 /* Clear out the memory of the last expression evaluated. */
1906 void
1907 clear_last_expr ()
1909 last_expr_type = 0;
1912 /* Begin a statement which will return a value.
1913 Return the RTL_EXPR for this statement expr.
1914 The caller must save that value and pass it to expand_end_stmt_expr. */
1916 tree
1917 expand_start_stmt_expr ()
1919 int momentary;
1920 tree t;
1922 /* When generating bytecode just note down the stack depth */
1923 if (output_bytecode)
1924 return (build_int_2 (stack_depth, 0));
1926 /* Make the RTL_EXPR node temporary, not momentary,
1927 so that rtl_expr_chain doesn't become garbage. */
1928 momentary = suspend_momentary ();
1929 t = make_node (RTL_EXPR);
1930 resume_momentary (momentary);
1931 do_pending_stack_adjust ();
1932 start_sequence_for_rtl_expr (t);
1933 NO_DEFER_POP;
1934 expr_stmts_for_value++;
1935 return t;
1938 /* Restore the previous state at the end of a statement that returns a value.
1939 Returns a tree node representing the statement's value and the
1940 insns to compute the value.
1942 The nodes of that expression have been freed by now, so we cannot use them.
1943 But we don't want to do that anyway; the expression has already been
1944 evaluated and now we just want to use the value. So generate a RTL_EXPR
1945 with the proper type and RTL value.
1947 If the last substatement was not an expression,
1948 return something with type `void'. */
1950 tree
1951 expand_end_stmt_expr (t)
1952 tree t;
1954 if (output_bytecode)
1956 int i;
1957 tree t;
1960 /* At this point, all expressions have been evaluated in order.
1961 However, all expression values have been popped when evaluated,
1962 which means we have to recover the last expression value. This is
1963 the last value removed by means of a `drop' instruction. Instead
1964 of adding code to inhibit dropping the last expression value, it
1965 is here recovered by undoing the `drop'. Since `drop' is
1966 equivalent to `adjustackSI [1]', it can be undone with `adjstackSI
1967 [-1]'. */
1969 bc_adjust_stack (-1);
1971 if (!last_expr_type)
1972 last_expr_type = void_type_node;
1974 t = make_node (RTL_EXPR);
1975 TREE_TYPE (t) = last_expr_type;
1976 RTL_EXPR_RTL (t) = NULL;
1977 RTL_EXPR_SEQUENCE (t) = NULL;
1979 /* Don't consider deleting this expr or containing exprs at tree level. */
1980 TREE_THIS_VOLATILE (t) = 1;
1982 last_expr_type = 0;
1983 return t;
1986 OK_DEFER_POP;
1988 if (last_expr_type == 0)
1990 last_expr_type = void_type_node;
1991 last_expr_value = const0_rtx;
1993 else if (last_expr_value == 0)
1994 /* There are some cases where this can happen, such as when the
1995 statement is void type. */
1996 last_expr_value = const0_rtx;
1997 else if (GET_CODE (last_expr_value) != REG && ! CONSTANT_P (last_expr_value))
1998 /* Remove any possible QUEUED. */
1999 last_expr_value = protect_from_queue (last_expr_value, 0);
2001 emit_queue ();
2003 TREE_TYPE (t) = last_expr_type;
2004 RTL_EXPR_RTL (t) = last_expr_value;
2005 RTL_EXPR_SEQUENCE (t) = get_insns ();
2007 rtl_expr_chain = tree_cons (NULL_TREE, t, rtl_expr_chain);
2009 end_sequence ();
2011 /* Don't consider deleting this expr or containing exprs at tree level. */
2012 TREE_SIDE_EFFECTS (t) = 1;
2013 /* Propagate volatility of the actual RTL expr. */
2014 TREE_THIS_VOLATILE (t) = volatile_refs_p (last_expr_value);
2016 last_expr_type = 0;
2017 expr_stmts_for_value--;
2019 return t;
2022 /* Generate RTL for the start of an if-then. COND is the expression
2023 whose truth should be tested.
2025 If EXITFLAG is nonzero, this conditional is visible to
2026 `exit_something'. */
2028 void
2029 expand_start_cond (cond, exitflag)
2030 tree cond;
2031 int exitflag;
2033 struct nesting *thiscond = ALLOC_NESTING ();
2035 /* Make an entry on cond_stack for the cond we are entering. */
2037 thiscond->next = cond_stack;
2038 thiscond->all = nesting_stack;
2039 thiscond->depth = ++nesting_depth;
2040 thiscond->data.cond.next_label = gen_label_rtx ();
2041 /* Before we encounter an `else', we don't need a separate exit label
2042 unless there are supposed to be exit statements
2043 to exit this conditional. */
2044 thiscond->exit_label = exitflag ? gen_label_rtx () : 0;
2045 thiscond->data.cond.endif_label = thiscond->exit_label;
2046 cond_stack = thiscond;
2047 nesting_stack = thiscond;
2049 if (output_bytecode)
2050 bc_expand_start_cond (cond, exitflag);
2051 else
2052 do_jump (cond, thiscond->data.cond.next_label, NULL_RTX);
2055 /* Generate RTL between then-clause and the elseif-clause
2056 of an if-then-elseif-.... */
2058 void
2059 expand_start_elseif (cond)
2060 tree cond;
2062 if (cond_stack->data.cond.endif_label == 0)
2063 cond_stack->data.cond.endif_label = gen_label_rtx ();
2064 emit_jump (cond_stack->data.cond.endif_label);
2065 emit_label (cond_stack->data.cond.next_label);
2066 cond_stack->data.cond.next_label = gen_label_rtx ();
2067 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
2070 /* Generate RTL between the then-clause and the else-clause
2071 of an if-then-else. */
2073 void
2074 expand_start_else ()
2076 if (cond_stack->data.cond.endif_label == 0)
2077 cond_stack->data.cond.endif_label = gen_label_rtx ();
2079 if (output_bytecode)
2081 bc_expand_start_else ();
2082 return;
2085 emit_jump (cond_stack->data.cond.endif_label);
2086 emit_label (cond_stack->data.cond.next_label);
2087 cond_stack->data.cond.next_label = 0; /* No more _else or _elseif calls. */
2090 /* After calling expand_start_else, turn this "else" into an "else if"
2091 by providing another condition. */
2093 void
2094 expand_elseif (cond)
2095 tree cond;
2097 cond_stack->data.cond.next_label = gen_label_rtx ();
2098 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
2101 /* Generate RTL for the end of an if-then.
2102 Pop the record for it off of cond_stack. */
2104 void
2105 expand_end_cond ()
2107 struct nesting *thiscond = cond_stack;
2109 if (output_bytecode)
2110 bc_expand_end_cond ();
2111 else
2113 do_pending_stack_adjust ();
2114 if (thiscond->data.cond.next_label)
2115 emit_label (thiscond->data.cond.next_label);
2116 if (thiscond->data.cond.endif_label)
2117 emit_label (thiscond->data.cond.endif_label);
2120 POPSTACK (cond_stack);
2121 last_expr_type = 0;
2125 /* Generate code for the start of an if-then. COND is the expression
2126 whose truth is to be tested; if EXITFLAG is nonzero this conditional
2127 is to be visible to exit_something. It is assumed that the caller
2128 has pushed the previous context on the cond stack. */
2130 static void
2131 bc_expand_start_cond (cond, exitflag)
2132 tree cond;
2133 int exitflag;
2135 struct nesting *thiscond = cond_stack;
2137 thiscond->data.case_stmt.nominal_type = cond;
2138 if (! exitflag)
2139 thiscond->exit_label = gen_label_rtx ();
2140 bc_expand_expr (cond);
2141 bc_emit_bytecode (xjumpifnot);
2142 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (thiscond->exit_label));
2144 #ifdef DEBUG_PRINT_CODE
2145 fputc ('\n', stderr);
2146 #endif
2149 /* Generate the label for the end of an if with
2150 no else- clause. */
2152 static void
2153 bc_expand_end_cond ()
2155 struct nesting *thiscond = cond_stack;
2157 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thiscond->exit_label));
2160 /* Generate code for the start of the else- clause of
2161 an if-then-else. */
2163 static void
2164 bc_expand_start_else ()
2166 struct nesting *thiscond = cond_stack;
2168 thiscond->data.cond.endif_label = thiscond->exit_label;
2169 thiscond->exit_label = gen_label_rtx ();
2170 bc_emit_bytecode (jump);
2171 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (thiscond->exit_label));
2173 #ifdef DEBUG_PRINT_CODE
2174 fputc ('\n', stderr);
2175 #endif
2177 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thiscond->data.cond.endif_label));
2180 /* Generate RTL for the start of a loop. EXIT_FLAG is nonzero if this
2181 loop should be exited by `exit_something'. This is a loop for which
2182 `expand_continue' will jump to the top of the loop.
2184 Make an entry on loop_stack to record the labels associated with
2185 this loop. */
2187 struct nesting *
2188 expand_start_loop (exit_flag)
2189 int exit_flag;
2191 register struct nesting *thisloop = ALLOC_NESTING ();
2193 /* Make an entry on loop_stack for the loop we are entering. */
2195 thisloop->next = loop_stack;
2196 thisloop->all = nesting_stack;
2197 thisloop->depth = ++nesting_depth;
2198 thisloop->data.loop.start_label = gen_label_rtx ();
2199 thisloop->data.loop.end_label = gen_label_rtx ();
2200 thisloop->data.loop.alt_end_label = 0;
2201 thisloop->data.loop.continue_label = thisloop->data.loop.start_label;
2202 thisloop->exit_label = exit_flag ? thisloop->data.loop.end_label : 0;
2203 loop_stack = thisloop;
2204 nesting_stack = thisloop;
2206 if (output_bytecode)
2208 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thisloop->data.loop.start_label));
2209 return thisloop;
2212 do_pending_stack_adjust ();
2213 emit_queue ();
2214 emit_note (NULL_PTR, NOTE_INSN_LOOP_BEG);
2215 emit_label (thisloop->data.loop.start_label);
2217 return thisloop;
2220 /* Like expand_start_loop but for a loop where the continuation point
2221 (for expand_continue_loop) will be specified explicitly. */
2223 struct nesting *
2224 expand_start_loop_continue_elsewhere (exit_flag)
2225 int exit_flag;
2227 struct nesting *thisloop = expand_start_loop (exit_flag);
2228 loop_stack->data.loop.continue_label = gen_label_rtx ();
2229 return thisloop;
2232 /* Specify the continuation point for a loop started with
2233 expand_start_loop_continue_elsewhere.
2234 Use this at the point in the code to which a continue statement
2235 should jump. */
2237 void
2238 expand_loop_continue_here ()
2240 if (output_bytecode)
2242 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (loop_stack->data.loop.continue_label));
2243 return;
2245 do_pending_stack_adjust ();
2246 emit_note (NULL_PTR, NOTE_INSN_LOOP_CONT);
2247 emit_label (loop_stack->data.loop.continue_label);
2250 /* End a loop. */
2252 static void
2253 bc_expand_end_loop ()
2255 struct nesting *thisloop = loop_stack;
2257 bc_emit_bytecode (jump);
2258 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (thisloop->data.loop.start_label));
2260 #ifdef DEBUG_PRINT_CODE
2261 fputc ('\n', stderr);
2262 #endif
2264 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thisloop->exit_label));
2265 POPSTACK (loop_stack);
2266 last_expr_type = 0;
2270 /* Finish a loop. Generate a jump back to the top and the loop-exit label.
2271 Pop the block off of loop_stack. */
2273 void
2274 expand_end_loop ()
2276 register rtx insn;
2277 register rtx start_label;
2278 rtx last_test_insn = 0;
2279 int num_insns = 0;
2281 if (output_bytecode)
2283 bc_expand_end_loop ();
2284 return;
2287 insn = get_last_insn ();
2288 start_label = loop_stack->data.loop.start_label;
2290 /* Mark the continue-point at the top of the loop if none elsewhere. */
2291 if (start_label == loop_stack->data.loop.continue_label)
2292 emit_note_before (NOTE_INSN_LOOP_CONT, start_label);
2294 do_pending_stack_adjust ();
2296 /* If optimizing, perhaps reorder the loop. If the loop
2297 starts with a conditional exit, roll that to the end
2298 where it will optimize together with the jump back.
2300 We look for the last conditional branch to the exit that we encounter
2301 before hitting 30 insns or a CALL_INSN. If we see an unconditional
2302 branch to the exit first, use it.
2304 We must also stop at NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes
2305 because moving them is not valid. */
2307 if (optimize
2309 ! (GET_CODE (insn) == JUMP_INSN
2310 && GET_CODE (PATTERN (insn)) == SET
2311 && SET_DEST (PATTERN (insn)) == pc_rtx
2312 && GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE))
2314 /* Scan insns from the top of the loop looking for a qualified
2315 conditional exit. */
2316 for (insn = NEXT_INSN (loop_stack->data.loop.start_label); insn;
2317 insn = NEXT_INSN (insn))
2319 if (GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == CODE_LABEL)
2320 break;
2322 if (GET_CODE (insn) == NOTE
2323 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2324 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
2325 break;
2327 if (GET_CODE (insn) == JUMP_INSN || GET_CODE (insn) == INSN)
2328 num_insns++;
2330 if (last_test_insn && num_insns > 30)
2331 break;
2333 if (GET_CODE (insn) == JUMP_INSN && GET_CODE (PATTERN (insn)) == SET
2334 && SET_DEST (PATTERN (insn)) == pc_rtx
2335 && GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE
2336 && ((GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 1)) == LABEL_REF
2337 && ((XEXP (XEXP (SET_SRC (PATTERN (insn)), 1), 0)
2338 == loop_stack->data.loop.end_label)
2339 || (XEXP (XEXP (SET_SRC (PATTERN (insn)), 1), 0)
2340 == loop_stack->data.loop.alt_end_label)))
2341 || (GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 2)) == LABEL_REF
2342 && ((XEXP (XEXP (SET_SRC (PATTERN (insn)), 2), 0)
2343 == loop_stack->data.loop.end_label)
2344 || (XEXP (XEXP (SET_SRC (PATTERN (insn)), 2), 0)
2345 == loop_stack->data.loop.alt_end_label)))))
2346 last_test_insn = insn;
2348 if (last_test_insn == 0 && GET_CODE (insn) == JUMP_INSN
2349 && GET_CODE (PATTERN (insn)) == SET
2350 && SET_DEST (PATTERN (insn)) == pc_rtx
2351 && GET_CODE (SET_SRC (PATTERN (insn))) == LABEL_REF
2352 && ((XEXP (SET_SRC (PATTERN (insn)), 0)
2353 == loop_stack->data.loop.end_label)
2354 || (XEXP (SET_SRC (PATTERN (insn)), 0)
2355 == loop_stack->data.loop.alt_end_label)))
2356 /* Include BARRIER. */
2357 last_test_insn = NEXT_INSN (insn);
2360 if (last_test_insn != 0 && last_test_insn != get_last_insn ())
2362 /* We found one. Move everything from there up
2363 to the end of the loop, and add a jump into the loop
2364 to jump to there. */
2365 register rtx newstart_label = gen_label_rtx ();
2366 register rtx start_move = start_label;
2368 /* If the start label is preceded by a NOTE_INSN_LOOP_CONT note,
2369 then we want to move this note also. */
2370 if (GET_CODE (PREV_INSN (start_move)) == NOTE
2371 && (NOTE_LINE_NUMBER (PREV_INSN (start_move))
2372 == NOTE_INSN_LOOP_CONT))
2373 start_move = PREV_INSN (start_move);
2375 emit_label_after (newstart_label, PREV_INSN (start_move));
2376 reorder_insns (start_move, last_test_insn, get_last_insn ());
2377 emit_jump_insn_after (gen_jump (start_label),
2378 PREV_INSN (newstart_label));
2379 emit_barrier_after (PREV_INSN (newstart_label));
2380 start_label = newstart_label;
2384 emit_jump (start_label);
2385 emit_note (NULL_PTR, NOTE_INSN_LOOP_END);
2386 emit_label (loop_stack->data.loop.end_label);
2388 POPSTACK (loop_stack);
2390 last_expr_type = 0;
2393 /* Generate a jump to the current loop's continue-point.
2394 This is usually the top of the loop, but may be specified
2395 explicitly elsewhere. If not currently inside a loop,
2396 return 0 and do nothing; caller will print an error message. */
2399 expand_continue_loop (whichloop)
2400 struct nesting *whichloop;
2402 last_expr_type = 0;
2403 if (whichloop == 0)
2404 whichloop = loop_stack;
2405 if (whichloop == 0)
2406 return 0;
2407 expand_goto_internal (NULL_TREE, whichloop->data.loop.continue_label,
2408 NULL_RTX);
2409 return 1;
2412 /* Generate a jump to exit the current loop. If not currently inside a loop,
2413 return 0 and do nothing; caller will print an error message. */
2416 expand_exit_loop (whichloop)
2417 struct nesting *whichloop;
2419 last_expr_type = 0;
2420 if (whichloop == 0)
2421 whichloop = loop_stack;
2422 if (whichloop == 0)
2423 return 0;
2424 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label, NULL_RTX);
2425 return 1;
2428 /* Generate a conditional jump to exit the current loop if COND
2429 evaluates to zero. If not currently inside a loop,
2430 return 0 and do nothing; caller will print an error message. */
2433 expand_exit_loop_if_false (whichloop, cond)
2434 struct nesting *whichloop;
2435 tree cond;
2437 last_expr_type = 0;
2438 if (whichloop == 0)
2439 whichloop = loop_stack;
2440 if (whichloop == 0)
2441 return 0;
2442 if (output_bytecode)
2444 bc_expand_expr (cond);
2445 bc_expand_goto_internal (xjumpifnot,
2446 BYTECODE_BC_LABEL (whichloop->exit_label),
2447 NULL_TREE);
2449 else
2451 /* In order to handle fixups, we actually create a conditional jump
2452 around a unconditional branch to exit the loop. If fixups are
2453 necessary, they go before the unconditional branch. */
2455 rtx label = gen_label_rtx ();
2456 rtx last_insn;
2458 do_jump (cond, NULL_RTX, label);
2459 last_insn = get_last_insn ();
2460 if (GET_CODE (last_insn) == CODE_LABEL)
2461 whichloop->data.loop.alt_end_label = last_insn;
2462 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label,
2463 NULL_RTX);
2464 emit_label (label);
2467 return 1;
2470 /* Return non-zero if we should preserve sub-expressions as separate
2471 pseudos. We never do so if we aren't optimizing. We always do so
2472 if -fexpensive-optimizations.
2474 Otherwise, we only do so if we are in the "early" part of a loop. I.e.,
2475 the loop may still be a small one. */
2478 preserve_subexpressions_p ()
2480 rtx insn;
2482 if (flag_expensive_optimizations)
2483 return 1;
2485 if (optimize == 0 || loop_stack == 0)
2486 return 0;
2488 insn = get_last_insn_anywhere ();
2490 return (insn
2491 && (INSN_UID (insn) - INSN_UID (loop_stack->data.loop.start_label)
2492 < n_non_fixed_regs * 3));
2496 /* Generate a jump to exit the current loop, conditional, binding contour
2497 or case statement. Not all such constructs are visible to this function,
2498 only those started with EXIT_FLAG nonzero. Individual languages use
2499 the EXIT_FLAG parameter to control which kinds of constructs you can
2500 exit this way.
2502 If not currently inside anything that can be exited,
2503 return 0 and do nothing; caller will print an error message. */
2506 expand_exit_something ()
2508 struct nesting *n;
2509 last_expr_type = 0;
2510 for (n = nesting_stack; n; n = n->all)
2511 if (n->exit_label != 0)
2513 expand_goto_internal (NULL_TREE, n->exit_label, NULL_RTX);
2514 return 1;
2517 return 0;
2520 /* Generate RTL to return from the current function, with no value.
2521 (That is, we do not do anything about returning any value.) */
2523 void
2524 expand_null_return ()
2526 struct nesting *block = block_stack;
2527 rtx last_insn = 0;
2529 if (output_bytecode)
2531 bc_emit_instruction (ret);
2532 return;
2535 /* Does any pending block have cleanups? */
2537 while (block && block->data.block.cleanups == 0)
2538 block = block->next;
2540 /* If yes, use a goto to return, since that runs cleanups. */
2542 expand_null_return_1 (last_insn, block != 0);
2545 /* Generate RTL to return from the current function, with value VAL. */
2547 static void
2548 expand_value_return (val)
2549 rtx val;
2551 struct nesting *block = block_stack;
2552 rtx last_insn = get_last_insn ();
2553 rtx return_reg = DECL_RTL (DECL_RESULT (current_function_decl));
2555 /* Copy the value to the return location
2556 unless it's already there. */
2558 if (return_reg != val)
2560 #ifdef PROMOTE_FUNCTION_RETURN
2561 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
2562 int unsignedp = TREE_UNSIGNED (type);
2563 enum machine_mode mode
2564 = promote_mode (type, DECL_MODE (DECL_RESULT (current_function_decl)),
2565 &unsignedp, 1);
2567 if (GET_MODE (val) != VOIDmode && GET_MODE (val) != mode)
2568 convert_move (return_reg, val, unsignedp);
2569 else
2570 #endif
2571 emit_move_insn (return_reg, val);
2573 if (GET_CODE (return_reg) == REG
2574 && REGNO (return_reg) < FIRST_PSEUDO_REGISTER)
2575 emit_insn (gen_rtx (USE, VOIDmode, return_reg));
2576 /* Handle calls that return values in multiple non-contiguous locations.
2577 The Irix 6 ABI has examples of this. */
2578 else if (GET_CODE (return_reg) == PARALLEL)
2580 int i;
2582 for (i = 0; i < XVECLEN (return_reg, 0); i++)
2584 rtx x = XEXP (XVECEXP (return_reg, 0, i), 0);
2586 if (GET_CODE (x) == REG
2587 && REGNO (x) < FIRST_PSEUDO_REGISTER)
2588 emit_insn (gen_rtx (USE, VOIDmode, x));
2592 /* Does any pending block have cleanups? */
2594 while (block && block->data.block.cleanups == 0)
2595 block = block->next;
2597 /* If yes, use a goto to return, since that runs cleanups.
2598 Use LAST_INSN to put cleanups *before* the move insn emitted above. */
2600 expand_null_return_1 (last_insn, block != 0);
2603 /* Output a return with no value. If LAST_INSN is nonzero,
2604 pretend that the return takes place after LAST_INSN.
2605 If USE_GOTO is nonzero then don't use a return instruction;
2606 go to the return label instead. This causes any cleanups
2607 of pending blocks to be executed normally. */
2609 static void
2610 expand_null_return_1 (last_insn, use_goto)
2611 rtx last_insn;
2612 int use_goto;
2614 rtx end_label = cleanup_label ? cleanup_label : return_label;
2616 clear_pending_stack_adjust ();
2617 do_pending_stack_adjust ();
2618 last_expr_type = 0;
2620 /* PCC-struct return always uses an epilogue. */
2621 if (current_function_returns_pcc_struct || use_goto)
2623 if (end_label == 0)
2624 end_label = return_label = gen_label_rtx ();
2625 expand_goto_internal (NULL_TREE, end_label, last_insn);
2626 return;
2629 /* Otherwise output a simple return-insn if one is available,
2630 unless it won't do the job. */
2631 #ifdef HAVE_return
2632 if (HAVE_return && use_goto == 0 && cleanup_label == 0)
2634 emit_jump_insn (gen_return ());
2635 emit_barrier ();
2636 return;
2638 #endif
2640 /* Otherwise jump to the epilogue. */
2641 expand_goto_internal (NULL_TREE, end_label, last_insn);
2644 /* Generate RTL to evaluate the expression RETVAL and return it
2645 from the current function. */
2647 void
2648 expand_return (retval)
2649 tree retval;
2651 /* If there are any cleanups to be performed, then they will
2652 be inserted following LAST_INSN. It is desirable
2653 that the last_insn, for such purposes, should be the
2654 last insn before computing the return value. Otherwise, cleanups
2655 which call functions can clobber the return value. */
2656 /* ??? rms: I think that is erroneous, because in C++ it would
2657 run destructors on variables that might be used in the subsequent
2658 computation of the return value. */
2659 rtx last_insn = 0;
2660 register rtx val = 0;
2661 register rtx op0;
2662 tree retval_rhs;
2663 int cleanups;
2664 struct nesting *block;
2666 /* Bytecode returns are quite simple, just leave the result on the
2667 arithmetic stack. */
2668 if (output_bytecode)
2670 bc_expand_expr (retval);
2671 bc_emit_instruction (ret);
2672 return;
2675 /* If function wants no value, give it none. */
2676 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
2678 expand_expr (retval, NULL_RTX, VOIDmode, 0);
2679 emit_queue ();
2680 expand_null_return ();
2681 return;
2684 /* Are any cleanups needed? E.g. C++ destructors to be run? */
2685 /* This is not sufficient. We also need to watch for cleanups of the
2686 expression we are about to expand. Unfortunately, we cannot know
2687 if it has cleanups until we expand it, and we want to change how we
2688 expand it depending upon if we need cleanups. We can't win. */
2689 #if 0
2690 cleanups = any_pending_cleanups (1);
2691 #else
2692 cleanups = 1;
2693 #endif
2695 if (TREE_CODE (retval) == RESULT_DECL)
2696 retval_rhs = retval;
2697 else if ((TREE_CODE (retval) == MODIFY_EXPR || TREE_CODE (retval) == INIT_EXPR)
2698 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
2699 retval_rhs = TREE_OPERAND (retval, 1);
2700 else if (TREE_TYPE (retval) == void_type_node)
2701 /* Recognize tail-recursive call to void function. */
2702 retval_rhs = retval;
2703 else
2704 retval_rhs = NULL_TREE;
2706 /* Only use `last_insn' if there are cleanups which must be run. */
2707 if (cleanups || cleanup_label != 0)
2708 last_insn = get_last_insn ();
2710 /* Distribute return down conditional expr if either of the sides
2711 may involve tail recursion (see test below). This enhances the number
2712 of tail recursions we see. Don't do this always since it can produce
2713 sub-optimal code in some cases and we distribute assignments into
2714 conditional expressions when it would help. */
2716 if (optimize && retval_rhs != 0
2717 && frame_offset == 0
2718 && TREE_CODE (retval_rhs) == COND_EXPR
2719 && (TREE_CODE (TREE_OPERAND (retval_rhs, 1)) == CALL_EXPR
2720 || TREE_CODE (TREE_OPERAND (retval_rhs, 2)) == CALL_EXPR))
2722 rtx label = gen_label_rtx ();
2723 tree expr;
2725 do_jump (TREE_OPERAND (retval_rhs, 0), label, NULL_RTX);
2726 expr = build (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (current_function_decl)),
2727 DECL_RESULT (current_function_decl),
2728 TREE_OPERAND (retval_rhs, 1));
2729 TREE_SIDE_EFFECTS (expr) = 1;
2730 expand_return (expr);
2731 emit_label (label);
2733 expr = build (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (current_function_decl)),
2734 DECL_RESULT (current_function_decl),
2735 TREE_OPERAND (retval_rhs, 2));
2736 TREE_SIDE_EFFECTS (expr) = 1;
2737 expand_return (expr);
2738 return;
2741 /* For tail-recursive call to current function,
2742 just jump back to the beginning.
2743 It's unsafe if any auto variable in this function
2744 has its address taken; for simplicity,
2745 require stack frame to be empty. */
2746 if (optimize && retval_rhs != 0
2747 && frame_offset == 0
2748 && TREE_CODE (retval_rhs) == CALL_EXPR
2749 && TREE_CODE (TREE_OPERAND (retval_rhs, 0)) == ADDR_EXPR
2750 && TREE_OPERAND (TREE_OPERAND (retval_rhs, 0), 0) == current_function_decl
2751 /* Finish checking validity, and if valid emit code
2752 to set the argument variables for the new call. */
2753 && tail_recursion_args (TREE_OPERAND (retval_rhs, 1),
2754 DECL_ARGUMENTS (current_function_decl)))
2756 if (tail_recursion_label == 0)
2758 tail_recursion_label = gen_label_rtx ();
2759 emit_label_after (tail_recursion_label,
2760 tail_recursion_reentry);
2762 emit_queue ();
2763 expand_goto_internal (NULL_TREE, tail_recursion_label, last_insn);
2764 emit_barrier ();
2765 return;
2767 #ifdef HAVE_return
2768 /* This optimization is safe if there are local cleanups
2769 because expand_null_return takes care of them.
2770 ??? I think it should also be safe when there is a cleanup label,
2771 because expand_null_return takes care of them, too.
2772 Any reason why not? */
2773 if (HAVE_return && cleanup_label == 0
2774 && ! current_function_returns_pcc_struct
2775 && BRANCH_COST <= 1)
2777 /* If this is return x == y; then generate
2778 if (x == y) return 1; else return 0;
2779 if we can do it with explicit return insns and branches are cheap,
2780 but not if we have the corresponding scc insn. */
2781 int has_scc = 0;
2782 if (retval_rhs)
2783 switch (TREE_CODE (retval_rhs))
2785 case EQ_EXPR:
2786 #ifdef HAVE_seq
2787 has_scc = HAVE_seq;
2788 #endif
2789 case NE_EXPR:
2790 #ifdef HAVE_sne
2791 has_scc = HAVE_sne;
2792 #endif
2793 case GT_EXPR:
2794 #ifdef HAVE_sgt
2795 has_scc = HAVE_sgt;
2796 #endif
2797 case GE_EXPR:
2798 #ifdef HAVE_sge
2799 has_scc = HAVE_sge;
2800 #endif
2801 case LT_EXPR:
2802 #ifdef HAVE_slt
2803 has_scc = HAVE_slt;
2804 #endif
2805 case LE_EXPR:
2806 #ifdef HAVE_sle
2807 has_scc = HAVE_sle;
2808 #endif
2809 case TRUTH_ANDIF_EXPR:
2810 case TRUTH_ORIF_EXPR:
2811 case TRUTH_AND_EXPR:
2812 case TRUTH_OR_EXPR:
2813 case TRUTH_NOT_EXPR:
2814 case TRUTH_XOR_EXPR:
2815 if (! has_scc)
2817 op0 = gen_label_rtx ();
2818 jumpifnot (retval_rhs, op0);
2819 expand_value_return (const1_rtx);
2820 emit_label (op0);
2821 expand_value_return (const0_rtx);
2822 return;
2826 #endif /* HAVE_return */
2828 /* If the result is an aggregate that is being returned in one (or more)
2829 registers, load the registers here. The compiler currently can't handle
2830 copying a BLKmode value into registers. We could put this code in a
2831 more general area (for use by everyone instead of just function
2832 call/return), but until this feature is generally usable it is kept here
2833 (and in expand_call). The value must go into a pseudo in case there
2834 are cleanups that will clobber the real return register. */
2836 if (retval_rhs != 0
2837 && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
2838 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG)
2840 int i, bitpos, xbitpos;
2841 int big_endian_correction = 0;
2842 int bytes = int_size_in_bytes (TREE_TYPE (retval_rhs));
2843 int n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
2844 int bitsize = MIN (TYPE_ALIGN (TREE_TYPE (retval_rhs)),BITS_PER_WORD);
2845 rtx *result_pseudos = (rtx *) alloca (sizeof (rtx) * n_regs);
2846 rtx result_reg, src, dst;
2847 rtx result_val = expand_expr (retval_rhs, NULL_RTX, VOIDmode, 0);
2848 enum machine_mode tmpmode, result_reg_mode;
2850 /* Structures whose size is not a multiple of a word are aligned
2851 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2852 machine, this means we must skip the empty high order bytes when
2853 calculating the bit offset. */
2854 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2855 big_endian_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2856 * BITS_PER_UNIT));
2858 /* Copy the structure BITSIZE bits at a time. */
2859 for (bitpos = 0, xbitpos = big_endian_correction;
2860 bitpos < bytes * BITS_PER_UNIT;
2861 bitpos += bitsize, xbitpos += bitsize)
2863 /* We need a new destination pseudo each time xbitpos is
2864 on a word boundary and when xbitpos == big_endian_correction
2865 (the first time through). */
2866 if (xbitpos % BITS_PER_WORD == 0
2867 || xbitpos == big_endian_correction)
2869 /* Generate an appropriate register. */
2870 dst = gen_reg_rtx (word_mode);
2871 result_pseudos[xbitpos / BITS_PER_WORD] = dst;
2873 /* Clobber the destination before we move anything into it. */
2874 emit_insn (gen_rtx (CLOBBER, VOIDmode, dst));
2877 /* We need a new source operand each time bitpos is on a word
2878 boundary. */
2879 if (bitpos % BITS_PER_WORD == 0)
2880 src = operand_subword_force (result_val,
2881 bitpos / BITS_PER_WORD,
2882 BLKmode);
2884 /* Use bitpos for the source extraction (left justified) and
2885 xbitpos for the destination store (right justified). */
2886 store_bit_field (dst, bitsize, xbitpos % BITS_PER_WORD, word_mode,
2887 extract_bit_field (src, bitsize,
2888 bitpos % BITS_PER_WORD, 1,
2889 NULL_RTX, word_mode,
2890 word_mode,
2891 bitsize / BITS_PER_UNIT,
2892 BITS_PER_WORD),
2893 bitsize / BITS_PER_UNIT, BITS_PER_WORD);
2896 /* Find the smallest integer mode large enough to hold the
2897 entire structure and use that mode instead of BLKmode
2898 on the USE insn for the return register. */
2899 bytes = int_size_in_bytes (TREE_TYPE (retval_rhs));
2900 for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2901 tmpmode != MAX_MACHINE_MODE;
2902 tmpmode = GET_MODE_WIDER_MODE (tmpmode))
2904 /* Have we found a large enough mode? */
2905 if (GET_MODE_SIZE (tmpmode) >= bytes)
2906 break;
2909 /* No suitable mode found. */
2910 if (tmpmode == MAX_MACHINE_MODE)
2911 abort ();
2913 PUT_MODE (DECL_RTL (DECL_RESULT (current_function_decl)), tmpmode);
2915 if (GET_MODE_SIZE (tmpmode) < GET_MODE_SIZE (word_mode))
2916 result_reg_mode = word_mode;
2917 else
2918 result_reg_mode = tmpmode;
2919 result_reg = gen_reg_rtx (result_reg_mode);
2921 /* Now that the value is in pseudos, copy it to the result reg(s). */
2922 expand_cleanups_to (NULL_TREE);
2923 emit_queue ();
2924 free_temp_slots ();
2925 for (i = 0; i < n_regs; i++)
2926 emit_move_insn (operand_subword (result_reg, i, 0, result_reg_mode),
2927 result_pseudos[i]);
2929 if (tmpmode != result_reg_mode)
2930 result_reg = gen_lowpart (tmpmode, result_reg);
2932 expand_value_return (result_reg);
2934 else if (cleanups
2935 && retval_rhs != 0
2936 && TREE_TYPE (retval_rhs) != void_type_node
2937 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG)
2939 /* Calculate the return value into a pseudo reg. */
2940 val = gen_reg_rtx (DECL_MODE (DECL_RESULT (current_function_decl)));
2941 val = expand_expr (retval_rhs, val, GET_MODE (val), 0);
2942 val = force_not_mem (val);
2943 expand_cleanups_to (NULL_TREE);
2944 emit_queue ();
2945 /* All temporaries have now been used. */
2946 free_temp_slots ();
2947 /* Return the calculated value, doing cleanups first. */
2948 expand_value_return (val);
2950 else
2952 /* No cleanups or no hard reg used;
2953 calculate value into hard return reg. */
2954 expand_expr (retval, const0_rtx, VOIDmode, 0);
2955 expand_cleanups_to (NULL_TREE);
2956 emit_queue ();
2957 free_temp_slots ();
2958 expand_value_return (DECL_RTL (DECL_RESULT (current_function_decl)));
2962 /* Return 1 if the end of the generated RTX is not a barrier.
2963 This means code already compiled can drop through. */
2966 drop_through_at_end_p ()
2968 rtx insn = get_last_insn ();
2969 while (insn && GET_CODE (insn) == NOTE)
2970 insn = PREV_INSN (insn);
2971 return insn && GET_CODE (insn) != BARRIER;
2974 /* Emit code to alter this function's formal parms for a tail-recursive call.
2975 ACTUALS is a list of actual parameter expressions (chain of TREE_LISTs).
2976 FORMALS is the chain of decls of formals.
2977 Return 1 if this can be done;
2978 otherwise return 0 and do not emit any code. */
2980 static int
2981 tail_recursion_args (actuals, formals)
2982 tree actuals, formals;
2984 register tree a = actuals, f = formals;
2985 register int i;
2986 register rtx *argvec;
2988 /* Check that number and types of actuals are compatible
2989 with the formals. This is not always true in valid C code.
2990 Also check that no formal needs to be addressable
2991 and that all formals are scalars. */
2993 /* Also count the args. */
2995 for (a = actuals, f = formals, i = 0; a && f; a = TREE_CHAIN (a), f = TREE_CHAIN (f), i++)
2997 if (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_VALUE (a)))
2998 != TYPE_MAIN_VARIANT (TREE_TYPE (f)))
2999 return 0;
3000 if (GET_CODE (DECL_RTL (f)) != REG || DECL_MODE (f) == BLKmode)
3001 return 0;
3003 if (a != 0 || f != 0)
3004 return 0;
3006 /* Compute all the actuals. */
3008 argvec = (rtx *) alloca (i * sizeof (rtx));
3010 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
3011 argvec[i] = expand_expr (TREE_VALUE (a), NULL_RTX, VOIDmode, 0);
3013 /* Find which actual values refer to current values of previous formals.
3014 Copy each of them now, before any formal is changed. */
3016 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
3018 int copy = 0;
3019 register int j;
3020 for (f = formals, j = 0; j < i; f = TREE_CHAIN (f), j++)
3021 if (reg_mentioned_p (DECL_RTL (f), argvec[i]))
3022 { copy = 1; break; }
3023 if (copy)
3024 argvec[i] = copy_to_reg (argvec[i]);
3027 /* Store the values of the actuals into the formals. */
3029 for (f = formals, a = actuals, i = 0; f;
3030 f = TREE_CHAIN (f), a = TREE_CHAIN (a), i++)
3032 if (GET_MODE (DECL_RTL (f)) == GET_MODE (argvec[i]))
3033 emit_move_insn (DECL_RTL (f), argvec[i]);
3034 else
3035 convert_move (DECL_RTL (f), argvec[i],
3036 TREE_UNSIGNED (TREE_TYPE (TREE_VALUE (a))));
3039 free_temp_slots ();
3040 return 1;
3043 /* Generate the RTL code for entering a binding contour.
3044 The variables are declared one by one, by calls to `expand_decl'.
3046 EXIT_FLAG is nonzero if this construct should be visible to
3047 `exit_something'. */
3049 void
3050 expand_start_bindings (exit_flag)
3051 int exit_flag;
3053 struct nesting *thisblock = ALLOC_NESTING ();
3054 rtx note = output_bytecode ? 0 : emit_note (NULL_PTR, NOTE_INSN_BLOCK_BEG);
3056 /* Make an entry on block_stack for the block we are entering. */
3058 thisblock->next = block_stack;
3059 thisblock->all = nesting_stack;
3060 thisblock->depth = ++nesting_depth;
3061 thisblock->data.block.stack_level = 0;
3062 thisblock->data.block.cleanups = 0;
3063 thisblock->data.block.function_call_count = 0;
3064 #if 0
3065 if (block_stack)
3067 if (block_stack->data.block.cleanups == NULL_TREE
3068 && (block_stack->data.block.outer_cleanups == NULL_TREE
3069 || block_stack->data.block.outer_cleanups == empty_cleanup_list))
3070 thisblock->data.block.outer_cleanups = empty_cleanup_list;
3071 else
3072 thisblock->data.block.outer_cleanups
3073 = tree_cons (NULL_TREE, block_stack->data.block.cleanups,
3074 block_stack->data.block.outer_cleanups);
3076 else
3077 thisblock->data.block.outer_cleanups = 0;
3078 #endif
3079 #if 1
3080 if (block_stack
3081 && !(block_stack->data.block.cleanups == NULL_TREE
3082 && block_stack->data.block.outer_cleanups == NULL_TREE))
3083 thisblock->data.block.outer_cleanups
3084 = tree_cons (NULL_TREE, block_stack->data.block.cleanups,
3085 block_stack->data.block.outer_cleanups);
3086 else
3087 thisblock->data.block.outer_cleanups = 0;
3088 #endif
3089 thisblock->data.block.label_chain = 0;
3090 thisblock->data.block.innermost_stack_block = stack_block_stack;
3091 thisblock->data.block.first_insn = note;
3092 thisblock->data.block.block_start_count = ++block_start_count;
3093 thisblock->exit_label = exit_flag ? gen_label_rtx () : 0;
3094 block_stack = thisblock;
3095 nesting_stack = thisblock;
3097 if (!output_bytecode)
3099 /* Make a new level for allocating stack slots. */
3100 push_temp_slots ();
3104 /* Given a pointer to a BLOCK node, save a pointer to the most recently
3105 generated NOTE_INSN_BLOCK_END in the BLOCK_END_NOTE field of the given
3106 BLOCK node. */
3108 void
3109 remember_end_note (block)
3110 register tree block;
3112 BLOCK_END_NOTE (block) = last_block_end_note;
3113 last_block_end_note = NULL_RTX;
3116 /* Generate RTL code to terminate a binding contour.
3117 VARS is the chain of VAR_DECL nodes
3118 for the variables bound in this contour.
3119 MARK_ENDS is nonzero if we should put a note at the beginning
3120 and end of this binding contour.
3122 DONT_JUMP_IN is nonzero if it is not valid to jump into this contour.
3123 (That is true automatically if the contour has a saved stack level.) */
3125 void
3126 expand_end_bindings (vars, mark_ends, dont_jump_in)
3127 tree vars;
3128 int mark_ends;
3129 int dont_jump_in;
3131 register struct nesting *thisblock = block_stack;
3132 register tree decl;
3134 if (output_bytecode)
3136 bc_expand_end_bindings (vars, mark_ends, dont_jump_in);
3137 return;
3140 if (warn_unused)
3141 for (decl = vars; decl; decl = TREE_CHAIN (decl))
3142 if (! TREE_USED (decl) && TREE_CODE (decl) == VAR_DECL
3143 && ! DECL_IN_SYSTEM_HEADER (decl))
3144 warning_with_decl (decl, "unused variable `%s'");
3146 if (thisblock->exit_label)
3148 do_pending_stack_adjust ();
3149 emit_label (thisblock->exit_label);
3152 /* If necessary, make a handler for nonlocal gotos taking
3153 place in the function calls in this block. */
3154 if (function_call_count != thisblock->data.block.function_call_count
3155 && nonlocal_labels
3156 /* Make handler for outermost block
3157 if there were any nonlocal gotos to this function. */
3158 && (thisblock->next == 0 ? current_function_has_nonlocal_label
3159 /* Make handler for inner block if it has something
3160 special to do when you jump out of it. */
3161 : (thisblock->data.block.cleanups != 0
3162 || thisblock->data.block.stack_level != 0)))
3164 tree link;
3165 rtx afterward = gen_label_rtx ();
3166 rtx handler_label = gen_label_rtx ();
3167 rtx save_receiver = gen_reg_rtx (Pmode);
3168 rtx insns;
3170 /* Don't let jump_optimize delete the handler. */
3171 LABEL_PRESERVE_P (handler_label) = 1;
3173 /* Record the handler address in the stack slot for that purpose,
3174 during this block, saving and restoring the outer value. */
3175 if (thisblock->next != 0)
3177 emit_move_insn (nonlocal_goto_handler_slot, save_receiver);
3179 start_sequence ();
3180 emit_move_insn (save_receiver, nonlocal_goto_handler_slot);
3181 insns = get_insns ();
3182 end_sequence ();
3183 emit_insns_before (insns, thisblock->data.block.first_insn);
3186 start_sequence ();
3187 emit_move_insn (nonlocal_goto_handler_slot,
3188 gen_rtx (LABEL_REF, Pmode, handler_label));
3189 insns = get_insns ();
3190 end_sequence ();
3191 emit_insns_before (insns, thisblock->data.block.first_insn);
3193 /* Jump around the handler; it runs only when specially invoked. */
3194 emit_jump (afterward);
3195 emit_label (handler_label);
3197 #ifdef HAVE_nonlocal_goto
3198 if (! HAVE_nonlocal_goto)
3199 #endif
3200 /* First adjust our frame pointer to its actual value. It was
3201 previously set to the start of the virtual area corresponding to
3202 the stacked variables when we branched here and now needs to be
3203 adjusted to the actual hardware fp value.
3205 Assignments are to virtual registers are converted by
3206 instantiate_virtual_regs into the corresponding assignment
3207 to the underlying register (fp in this case) that makes
3208 the original assignment true.
3209 So the following insn will actually be
3210 decrementing fp by STARTING_FRAME_OFFSET. */
3211 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
3213 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3214 if (fixed_regs[ARG_POINTER_REGNUM])
3216 #ifdef ELIMINABLE_REGS
3217 /* If the argument pointer can be eliminated in favor of the
3218 frame pointer, we don't need to restore it. We assume here
3219 that if such an elimination is present, it can always be used.
3220 This is the case on all known machines; if we don't make this
3221 assumption, we do unnecessary saving on many machines. */
3222 static struct elims {int from, to;} elim_regs[] = ELIMINABLE_REGS;
3223 int i;
3225 for (i = 0; i < sizeof elim_regs / sizeof elim_regs[0]; i++)
3226 if (elim_regs[i].from == ARG_POINTER_REGNUM
3227 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
3228 break;
3230 if (i == sizeof elim_regs / sizeof elim_regs [0])
3231 #endif
3233 /* Now restore our arg pointer from the address at which it
3234 was saved in our stack frame.
3235 If there hasn't be space allocated for it yet, make
3236 some now. */
3237 if (arg_pointer_save_area == 0)
3238 arg_pointer_save_area
3239 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
3240 emit_move_insn (virtual_incoming_args_rtx,
3241 /* We need a pseudo here, or else
3242 instantiate_virtual_regs_1 complains. */
3243 copy_to_reg (arg_pointer_save_area));
3246 #endif
3248 #ifdef HAVE_nonlocal_goto_receiver
3249 if (HAVE_nonlocal_goto_receiver)
3250 emit_insn (gen_nonlocal_goto_receiver ());
3251 #endif
3253 /* The handler expects the desired label address in the static chain
3254 register. It tests the address and does an appropriate jump
3255 to whatever label is desired. */
3256 for (link = nonlocal_labels; link; link = TREE_CHAIN (link))
3257 /* Skip any labels we shouldn't be able to jump to from here. */
3258 if (! DECL_TOO_LATE (TREE_VALUE (link)))
3260 rtx not_this = gen_label_rtx ();
3261 rtx this = gen_label_rtx ();
3262 do_jump_if_equal (static_chain_rtx,
3263 gen_rtx (LABEL_REF, Pmode, DECL_RTL (TREE_VALUE (link))),
3264 this, 0);
3265 emit_jump (not_this);
3266 emit_label (this);
3267 expand_goto (TREE_VALUE (link));
3268 emit_label (not_this);
3270 /* If label is not recognized, abort. */
3271 emit_library_call (gen_rtx (SYMBOL_REF, Pmode, "abort"), 0,
3272 VOIDmode, 0);
3273 emit_barrier ();
3274 emit_label (afterward);
3277 /* Don't allow jumping into a block that has cleanups or a stack level. */
3278 if (dont_jump_in
3279 || thisblock->data.block.stack_level != 0
3280 || thisblock->data.block.cleanups != 0)
3282 struct label_chain *chain;
3284 /* Any labels in this block are no longer valid to go to.
3285 Mark them to cause an error message. */
3286 for (chain = thisblock->data.block.label_chain; chain; chain = chain->next)
3288 DECL_TOO_LATE (chain->label) = 1;
3289 /* If any goto without a fixup came to this label,
3290 that must be an error, because gotos without fixups
3291 come from outside all saved stack-levels and all cleanups. */
3292 if (TREE_ADDRESSABLE (chain->label))
3293 error_with_decl (chain->label,
3294 "label `%s' used before containing binding contour");
3298 /* Restore stack level in effect before the block
3299 (only if variable-size objects allocated). */
3300 /* Perform any cleanups associated with the block. */
3302 if (thisblock->data.block.stack_level != 0
3303 || thisblock->data.block.cleanups != 0)
3305 /* Only clean up here if this point can actually be reached. */
3306 int reachable = GET_CODE (get_last_insn ()) != BARRIER;
3308 /* Don't let cleanups affect ({...}) constructs. */
3309 int old_expr_stmts_for_value = expr_stmts_for_value;
3310 rtx old_last_expr_value = last_expr_value;
3311 tree old_last_expr_type = last_expr_type;
3312 expr_stmts_for_value = 0;
3314 /* Do the cleanups. */
3315 expand_cleanups (thisblock->data.block.cleanups, NULL_TREE, 0, reachable);
3316 if (reachable)
3317 do_pending_stack_adjust ();
3319 expr_stmts_for_value = old_expr_stmts_for_value;
3320 last_expr_value = old_last_expr_value;
3321 last_expr_type = old_last_expr_type;
3323 /* Restore the stack level. */
3325 if (reachable && thisblock->data.block.stack_level != 0)
3327 emit_stack_restore (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3328 thisblock->data.block.stack_level, NULL_RTX);
3329 if (nonlocal_goto_handler_slot != 0)
3330 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level,
3331 NULL_RTX);
3334 /* Any gotos out of this block must also do these things.
3335 Also report any gotos with fixups that came to labels in this
3336 level. */
3337 fixup_gotos (thisblock,
3338 thisblock->data.block.stack_level,
3339 thisblock->data.block.cleanups,
3340 thisblock->data.block.first_insn,
3341 dont_jump_in);
3344 /* Mark the beginning and end of the scope if requested.
3345 We do this now, after running cleanups on the variables
3346 just going out of scope, so they are in scope for their cleanups. */
3348 if (mark_ends)
3349 last_block_end_note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_END);
3350 else
3351 /* Get rid of the beginning-mark if we don't make an end-mark. */
3352 NOTE_LINE_NUMBER (thisblock->data.block.first_insn) = NOTE_INSN_DELETED;
3354 /* If doing stupid register allocation, make sure lives of all
3355 register variables declared here extend thru end of scope. */
3357 if (obey_regdecls)
3358 for (decl = vars; decl; decl = TREE_CHAIN (decl))
3360 rtx rtl = DECL_RTL (decl);
3361 if (TREE_CODE (decl) == VAR_DECL && rtl != 0)
3362 use_variable (rtl);
3365 /* Restore block_stack level for containing block. */
3367 stack_block_stack = thisblock->data.block.innermost_stack_block;
3368 POPSTACK (block_stack);
3370 /* Pop the stack slot nesting and free any slots at this level. */
3371 pop_temp_slots ();
3375 /* End a binding contour.
3376 VARS is the chain of VAR_DECL nodes for the variables bound
3377 in this contour. MARK_ENDS is nonzer if we should put a note
3378 at the beginning and end of this binding contour.
3379 DONT_JUMP_IN is nonzero if it is not valid to jump into this
3380 contour. */
3382 static void
3383 bc_expand_end_bindings (vars, mark_ends, dont_jump_in)
3384 tree vars;
3385 int mark_ends;
3386 int dont_jump_in;
3388 struct nesting *thisbind = nesting_stack;
3389 tree decl;
3391 if (warn_unused)
3392 for (decl = vars; decl; decl = TREE_CHAIN (decl))
3393 if (! TREE_USED (TREE_VALUE (decl)) && TREE_CODE (TREE_VALUE (decl)) == VAR_DECL)
3394 warning_with_decl (decl, "unused variable `%s'");
3396 if (thisbind->exit_label)
3397 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thisbind->exit_label));
3399 /* Pop block/bindings off stack */
3400 POPSTACK (block_stack);
3403 /* Generate RTL for the automatic variable declaration DECL.
3404 (Other kinds of declarations are simply ignored if seen here.) */
3406 void
3407 expand_decl (decl)
3408 register tree decl;
3410 struct nesting *thisblock = block_stack;
3411 tree type;
3413 if (output_bytecode)
3415 bc_expand_decl (decl, 0);
3416 return;
3419 type = TREE_TYPE (decl);
3421 /* Only automatic variables need any expansion done.
3422 Static and external variables, and external functions,
3423 will be handled by `assemble_variable' (called from finish_decl).
3424 TYPE_DECL and CONST_DECL require nothing.
3425 PARM_DECLs are handled in `assign_parms'. */
3427 if (TREE_CODE (decl) != VAR_DECL)
3428 return;
3429 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
3430 return;
3432 /* Create the RTL representation for the variable. */
3434 if (type == error_mark_node)
3435 DECL_RTL (decl) = gen_rtx (MEM, BLKmode, const0_rtx);
3436 else if (DECL_SIZE (decl) == 0)
3437 /* Variable with incomplete type. */
3439 if (DECL_INITIAL (decl) == 0)
3440 /* Error message was already done; now avoid a crash. */
3441 DECL_RTL (decl) = assign_stack_temp (DECL_MODE (decl), 0, 1);
3442 else
3443 /* An initializer is going to decide the size of this array.
3444 Until we know the size, represent its address with a reg. */
3445 DECL_RTL (decl) = gen_rtx (MEM, BLKmode, gen_reg_rtx (Pmode));
3446 MEM_IN_STRUCT_P (DECL_RTL (decl)) = AGGREGATE_TYPE_P (type);
3448 else if (DECL_MODE (decl) != BLKmode
3449 /* If -ffloat-store, don't put explicit float vars
3450 into regs. */
3451 && !(flag_float_store
3452 && TREE_CODE (type) == REAL_TYPE)
3453 && ! TREE_THIS_VOLATILE (decl)
3454 && ! TREE_ADDRESSABLE (decl)
3455 && (DECL_REGISTER (decl) || ! obey_regdecls))
3457 /* Automatic variable that can go in a register. */
3458 int unsignedp = TREE_UNSIGNED (type);
3459 enum machine_mode reg_mode
3460 = promote_mode (type, DECL_MODE (decl), &unsignedp, 0);
3462 DECL_RTL (decl) = gen_reg_rtx (reg_mode);
3463 mark_user_reg (DECL_RTL (decl));
3465 if (TREE_CODE (type) == POINTER_TYPE)
3466 mark_reg_pointer (DECL_RTL (decl),
3467 (TYPE_ALIGN (TREE_TYPE (TREE_TYPE (decl)))
3468 / BITS_PER_UNIT));
3471 else if (TREE_CODE (DECL_SIZE (decl)) == INTEGER_CST)
3473 /* Variable of fixed size that goes on the stack. */
3474 rtx oldaddr = 0;
3475 rtx addr;
3477 /* If we previously made RTL for this decl, it must be an array
3478 whose size was determined by the initializer.
3479 The old address was a register; set that register now
3480 to the proper address. */
3481 if (DECL_RTL (decl) != 0)
3483 if (GET_CODE (DECL_RTL (decl)) != MEM
3484 || GET_CODE (XEXP (DECL_RTL (decl), 0)) != REG)
3485 abort ();
3486 oldaddr = XEXP (DECL_RTL (decl), 0);
3489 DECL_RTL (decl)
3490 = assign_stack_temp (DECL_MODE (decl),
3491 ((TREE_INT_CST_LOW (DECL_SIZE (decl))
3492 + BITS_PER_UNIT - 1)
3493 / BITS_PER_UNIT),
3495 MEM_IN_STRUCT_P (DECL_RTL (decl)) = AGGREGATE_TYPE_P (TREE_TYPE (decl));
3497 /* Set alignment we actually gave this decl. */
3498 DECL_ALIGN (decl) = (DECL_MODE (decl) == BLKmode ? BIGGEST_ALIGNMENT
3499 : GET_MODE_BITSIZE (DECL_MODE (decl)));
3501 if (oldaddr)
3503 addr = force_operand (XEXP (DECL_RTL (decl), 0), oldaddr);
3504 if (addr != oldaddr)
3505 emit_move_insn (oldaddr, addr);
3508 /* If this is a memory ref that contains aggregate components,
3509 mark it as such for cse and loop optimize. */
3510 MEM_IN_STRUCT_P (DECL_RTL (decl)) = AGGREGATE_TYPE_P (TREE_TYPE (decl));
3511 #if 0
3512 /* If this is in memory because of -ffloat-store,
3513 set the volatile bit, to prevent optimizations from
3514 undoing the effects. */
3515 if (flag_float_store && TREE_CODE (type) == REAL_TYPE)
3516 MEM_VOLATILE_P (DECL_RTL (decl)) = 1;
3517 #endif
3519 else
3520 /* Dynamic-size object: must push space on the stack. */
3522 rtx address, size;
3524 /* Record the stack pointer on entry to block, if have
3525 not already done so. */
3526 if (thisblock->data.block.stack_level == 0)
3528 do_pending_stack_adjust ();
3529 emit_stack_save (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3530 &thisblock->data.block.stack_level,
3531 thisblock->data.block.first_insn);
3532 stack_block_stack = thisblock;
3535 /* Compute the variable's size, in bytes. */
3536 size = expand_expr (size_binop (CEIL_DIV_EXPR,
3537 DECL_SIZE (decl),
3538 size_int (BITS_PER_UNIT)),
3539 NULL_RTX, VOIDmode, 0);
3540 free_temp_slots ();
3542 /* Allocate space on the stack for the variable. Note that
3543 DECL_ALIGN says how the variable is to be aligned and we
3544 cannot use it to conclude anything about the alignment of
3545 the size. */
3546 address = allocate_dynamic_stack_space (size, NULL_RTX,
3547 TYPE_ALIGN (TREE_TYPE (decl)));
3549 /* Reference the variable indirect through that rtx. */
3550 DECL_RTL (decl) = gen_rtx (MEM, DECL_MODE (decl), address);
3552 /* If this is a memory ref that contains aggregate components,
3553 mark it as such for cse and loop optimize. */
3554 MEM_IN_STRUCT_P (DECL_RTL (decl)) = AGGREGATE_TYPE_P (TREE_TYPE (decl));
3556 /* Indicate the alignment we actually gave this variable. */
3557 #ifdef STACK_BOUNDARY
3558 DECL_ALIGN (decl) = STACK_BOUNDARY;
3559 #else
3560 DECL_ALIGN (decl) = BIGGEST_ALIGNMENT;
3561 #endif
3564 if (TREE_THIS_VOLATILE (decl))
3565 MEM_VOLATILE_P (DECL_RTL (decl)) = 1;
3566 #if 0 /* A variable is not necessarily unchanging
3567 just because it is const. RTX_UNCHANGING_P
3568 means no change in the function,
3569 not merely no change in the variable's scope.
3570 It is correct to set RTX_UNCHANGING_P if the variable's scope
3571 is the whole function. There's no convenient way to test that. */
3572 if (TREE_READONLY (decl))
3573 RTX_UNCHANGING_P (DECL_RTL (decl)) = 1;
3574 #endif
3576 /* If doing stupid register allocation, make sure life of any
3577 register variable starts here, at the start of its scope. */
3579 if (obey_regdecls)
3580 use_variable (DECL_RTL (decl));
3584 /* Generate code for the automatic variable declaration DECL. For
3585 most variables this just means we give it a stack offset. The
3586 compiler sometimes emits cleanups without variables and we will
3587 have to deal with those too. */
3589 static void
3590 bc_expand_decl (decl, cleanup)
3591 tree decl;
3592 tree cleanup;
3594 tree type;
3596 if (!decl)
3598 /* A cleanup with no variable. */
3599 if (!cleanup)
3600 abort ();
3602 return;
3605 /* Only auto variables need any work. */
3606 if (TREE_CODE (decl) != VAR_DECL || TREE_STATIC (decl) || DECL_EXTERNAL (decl))
3607 return;
3609 type = TREE_TYPE (decl);
3611 if (type == error_mark_node)
3612 DECL_RTL (decl) = bc_gen_rtx ((char *) 0, 0, (struct bc_label *) 0);
3614 else if (DECL_SIZE (decl) == 0)
3616 /* Variable with incomplete type. The stack offset herein will be
3617 fixed later in expand_decl_init (). */
3618 DECL_RTL (decl) = bc_gen_rtx ((char *) 0, 0, (struct bc_label *) 0);
3620 else if (TREE_CONSTANT (DECL_SIZE (decl)))
3622 DECL_RTL (decl) = bc_allocate_local (TREE_INT_CST_LOW (DECL_SIZE (decl)) / BITS_PER_UNIT,
3623 DECL_ALIGN (decl));
3625 else
3626 DECL_RTL (decl) = bc_allocate_variable_array (DECL_SIZE (decl));
3629 /* Emit code to perform the initialization of a declaration DECL. */
3631 void
3632 expand_decl_init (decl)
3633 tree decl;
3635 int was_used = TREE_USED (decl);
3637 if (output_bytecode)
3639 bc_expand_decl_init (decl);
3640 return;
3643 /* If this is a CONST_DECL, we don't have to generate any code, but
3644 if DECL_INITIAL is a constant, call expand_expr to force TREE_CST_RTL
3645 to be set while in the obstack containing the constant. If we don't
3646 do this, we can lose if we have functions nested three deep and the middle
3647 function makes a CONST_DECL whose DECL_INITIAL is a STRING_CST while
3648 the innermost function is the first to expand that STRING_CST. */
3649 if (TREE_CODE (decl) == CONST_DECL)
3651 if (DECL_INITIAL (decl) && TREE_CONSTANT (DECL_INITIAL (decl)))
3652 expand_expr (DECL_INITIAL (decl), NULL_RTX, VOIDmode,
3653 EXPAND_INITIALIZER);
3654 return;
3657 if (TREE_STATIC (decl))
3658 return;
3660 /* Compute and store the initial value now. */
3662 if (DECL_INITIAL (decl) == error_mark_node)
3664 enum tree_code code = TREE_CODE (TREE_TYPE (decl));
3665 if (code == INTEGER_TYPE || code == REAL_TYPE || code == ENUMERAL_TYPE
3666 || code == POINTER_TYPE)
3667 expand_assignment (decl, convert (TREE_TYPE (decl), integer_zero_node),
3668 0, 0);
3669 emit_queue ();
3671 else if (DECL_INITIAL (decl) && TREE_CODE (DECL_INITIAL (decl)) != TREE_LIST)
3673 emit_line_note (DECL_SOURCE_FILE (decl), DECL_SOURCE_LINE (decl));
3674 expand_assignment (decl, DECL_INITIAL (decl), 0, 0);
3675 emit_queue ();
3678 /* Don't let the initialization count as "using" the variable. */
3679 TREE_USED (decl) = was_used;
3681 /* Free any temporaries we made while initializing the decl. */
3682 preserve_temp_slots (NULL_RTX);
3683 free_temp_slots ();
3686 /* Expand initialization for variable-sized types. Allocate array
3687 using newlocalSI and set local variable, which is a pointer to the
3688 storage. */
3690 static void
3691 bc_expand_variable_local_init (decl)
3692 tree decl;
3694 /* Evaluate size expression and coerce to SI */
3695 bc_expand_expr (DECL_SIZE (decl));
3697 /* Type sizes are always (?) of TREE_CODE INTEGER_CST, so
3698 no coercion is necessary (?) */
3700 /* emit_typecode_conversion (preferred_typecode (TYPE_MODE (DECL_SIZE (decl)),
3701 TREE_UNSIGNED (DECL_SIZE (decl))), SIcode); */
3703 /* Emit code to allocate array */
3704 bc_emit_instruction (newlocalSI);
3706 /* Store array pointer in local variable. This is the only instance
3707 where we actually want the address of the pointer to the
3708 variable-size block, rather than the pointer itself. We avoid
3709 using expand_address() since that would cause the pointer to be
3710 pushed rather than its address. Hence the hard-coded reference;
3711 notice also that the variable is always local (no global
3712 variable-size type variables). */
3714 bc_load_localaddr (DECL_RTL (decl));
3715 bc_emit_instruction (storeP);
3719 /* Emit code to initialize a declaration. */
3721 static void
3722 bc_expand_decl_init (decl)
3723 tree decl;
3725 int org_stack_depth;
3727 /* Statical initializers are handled elsewhere */
3729 if (TREE_STATIC (decl))
3730 return;
3732 /* Memory original stack depth */
3733 org_stack_depth = stack_depth;
3735 /* If the type is variable-size, we first create its space (we ASSUME
3736 it CAN'T be static). We do this regardless of whether there's an
3737 initializer assignment or not. */
3739 if (TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
3740 bc_expand_variable_local_init (decl);
3742 /* Expand initializer assignment */
3743 if (DECL_INITIAL (decl) == error_mark_node)
3745 enum tree_code code = TREE_CODE (TREE_TYPE (decl));
3747 if (code == INTEGER_TYPE || code == REAL_TYPE || code == ENUMERAL_TYPE
3748 || code == POINTER_TYPE)
3750 expand_assignment (TREE_TYPE (decl), decl, 0, 0);
3752 else if (DECL_INITIAL (decl))
3753 expand_assignment (TREE_TYPE (decl), decl, 0, 0);
3755 /* Restore stack depth */
3756 if (org_stack_depth > stack_depth)
3757 abort ();
3759 bc_adjust_stack (stack_depth - org_stack_depth);
3763 /* CLEANUP is an expression to be executed at exit from this binding contour;
3764 for example, in C++, it might call the destructor for this variable.
3766 We wrap CLEANUP in an UNSAVE_EXPR node, so that we can expand the
3767 CLEANUP multiple times, and have the correct semantics. This
3768 happens in exception handling, and for non-local gotos.
3770 If CLEANUP is nonzero and DECL is zero, we record a cleanup
3771 that is not associated with any particular variable. */
3774 expand_decl_cleanup (decl, cleanup)
3775 tree decl, cleanup;
3777 struct nesting *thisblock = block_stack;
3779 /* Error if we are not in any block. */
3780 if (thisblock == 0)
3781 return 0;
3783 /* Record the cleanup if there is one. */
3785 if (cleanup != 0)
3787 cleanup = unsave_expr (cleanup);
3789 thisblock->data.block.cleanups
3790 = temp_tree_cons (decl, cleanup, thisblock->data.block.cleanups);
3791 /* If this block has a cleanup, it belongs in stack_block_stack. */
3792 stack_block_stack = thisblock;
3793 expand_eh_region_start ();
3795 return 1;
3798 /* DECL is an anonymous union. CLEANUP is a cleanup for DECL.
3799 DECL_ELTS is the list of elements that belong to DECL's type.
3800 In each, the TREE_VALUE is a VAR_DECL, and the TREE_PURPOSE a cleanup. */
3802 void
3803 expand_anon_union_decl (decl, cleanup, decl_elts)
3804 tree decl, cleanup, decl_elts;
3806 struct nesting *thisblock = block_stack;
3807 rtx x;
3809 expand_decl (decl);
3810 expand_decl_cleanup (decl, cleanup);
3811 x = DECL_RTL (decl);
3813 while (decl_elts)
3815 tree decl_elt = TREE_VALUE (decl_elts);
3816 tree cleanup_elt = TREE_PURPOSE (decl_elts);
3817 enum machine_mode mode = TYPE_MODE (TREE_TYPE (decl_elt));
3819 /* Propagate the union's alignment to the elements. */
3820 DECL_ALIGN (decl_elt) = DECL_ALIGN (decl);
3822 /* If the element has BLKmode and the union doesn't, the union is
3823 aligned such that the element doesn't need to have BLKmode, so
3824 change the element's mode to the appropriate one for its size. */
3825 if (mode == BLKmode && DECL_MODE (decl) != BLKmode)
3826 DECL_MODE (decl_elt) = mode
3827 = mode_for_size (TREE_INT_CST_LOW (DECL_SIZE (decl_elt)),
3828 MODE_INT, 1);
3830 /* (SUBREG (MEM ...)) at RTL generation time is invalid, so we
3831 instead create a new MEM rtx with the proper mode. */
3832 if (GET_CODE (x) == MEM)
3834 if (mode == GET_MODE (x))
3835 DECL_RTL (decl_elt) = x;
3836 else
3838 DECL_RTL (decl_elt) = gen_rtx (MEM, mode, copy_rtx (XEXP (x, 0)));
3839 MEM_IN_STRUCT_P (DECL_RTL (decl_elt)) = MEM_IN_STRUCT_P (x);
3840 RTX_UNCHANGING_P (DECL_RTL (decl_elt)) = RTX_UNCHANGING_P (x);
3843 else if (GET_CODE (x) == REG)
3845 if (mode == GET_MODE (x))
3846 DECL_RTL (decl_elt) = x;
3847 else
3848 DECL_RTL (decl_elt) = gen_rtx (SUBREG, mode, x, 0);
3850 else
3851 abort ();
3853 /* Record the cleanup if there is one. */
3855 if (cleanup != 0)
3856 thisblock->data.block.cleanups
3857 = temp_tree_cons (decl_elt, cleanup_elt,
3858 thisblock->data.block.cleanups);
3860 decl_elts = TREE_CHAIN (decl_elts);
3864 /* Expand a list of cleanups LIST.
3865 Elements may be expressions or may be nested lists.
3867 If DONT_DO is nonnull, then any list-element
3868 whose TREE_PURPOSE matches DONT_DO is omitted.
3869 This is sometimes used to avoid a cleanup associated with
3870 a value that is being returned out of the scope.
3872 If IN_FIXUP is non-zero, we are generating this cleanup for a fixup
3873 goto and handle protection regions specially in that case.
3875 If REACHABLE, we emit code, otherwise just inform the exception handling
3876 code about this finalization. */
3878 static void
3879 expand_cleanups (list, dont_do, in_fixup, reachable)
3880 tree list;
3881 tree dont_do;
3882 int in_fixup;
3883 int reachable;
3885 tree tail;
3886 for (tail = list; tail; tail = TREE_CHAIN (tail))
3887 if (dont_do == 0 || TREE_PURPOSE (tail) != dont_do)
3889 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3890 expand_cleanups (TREE_VALUE (tail), dont_do, in_fixup, reachable);
3891 else
3893 if (! in_fixup)
3894 expand_eh_region_end (TREE_VALUE (tail));
3896 if (reachable)
3898 /* Cleanups may be run multiple times. For example,
3899 when exiting a binding contour, we expand the
3900 cleanups associated with that contour. When a goto
3901 within that binding contour has a target outside that
3902 contour, it will expand all cleanups from its scope to
3903 the target. Though the cleanups are expanded multiple
3904 times, the control paths are non-overlapping so the
3905 cleanups will not be executed twice. */
3906 expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
3907 free_temp_slots ();
3913 /* Move all cleanups from the current block_stack
3914 to the containing block_stack, where they are assumed to
3915 have been created. If anything can cause a temporary to
3916 be created, but not expanded for more than one level of
3917 block_stacks, then this code will have to change. */
3919 void
3920 move_cleanups_up ()
3922 struct nesting *block = block_stack;
3923 struct nesting *outer = block->next;
3925 outer->data.block.cleanups
3926 = chainon (block->data.block.cleanups,
3927 outer->data.block.cleanups);
3928 block->data.block.cleanups = 0;
3931 tree
3932 last_cleanup_this_contour ()
3934 if (block_stack == 0)
3935 return 0;
3937 return block_stack->data.block.cleanups;
3940 /* Return 1 if there are any pending cleanups at this point.
3941 If THIS_CONTOUR is nonzero, check the current contour as well.
3942 Otherwise, look only at the contours that enclose this one. */
3945 any_pending_cleanups (this_contour)
3946 int this_contour;
3948 struct nesting *block;
3950 if (block_stack == 0)
3951 return 0;
3953 if (this_contour && block_stack->data.block.cleanups != NULL)
3954 return 1;
3955 if (block_stack->data.block.cleanups == 0
3956 && (block_stack->data.block.outer_cleanups == 0
3957 #if 0
3958 || block_stack->data.block.outer_cleanups == empty_cleanup_list
3959 #endif
3961 return 0;
3963 for (block = block_stack->next; block; block = block->next)
3964 if (block->data.block.cleanups != 0)
3965 return 1;
3967 return 0;
3970 /* Enter a case (Pascal) or switch (C) statement.
3971 Push a block onto case_stack and nesting_stack
3972 to accumulate the case-labels that are seen
3973 and to record the labels generated for the statement.
3975 EXIT_FLAG is nonzero if `exit_something' should exit this case stmt.
3976 Otherwise, this construct is transparent for `exit_something'.
3978 EXPR is the index-expression to be dispatched on.
3979 TYPE is its nominal type. We could simply convert EXPR to this type,
3980 but instead we take short cuts. */
3982 void
3983 expand_start_case (exit_flag, expr, type, printname)
3984 int exit_flag;
3985 tree expr;
3986 tree type;
3987 char *printname;
3989 register struct nesting *thiscase = ALLOC_NESTING ();
3991 /* Make an entry on case_stack for the case we are entering. */
3993 thiscase->next = case_stack;
3994 thiscase->all = nesting_stack;
3995 thiscase->depth = ++nesting_depth;
3996 thiscase->exit_label = exit_flag ? gen_label_rtx () : 0;
3997 thiscase->data.case_stmt.case_list = 0;
3998 thiscase->data.case_stmt.index_expr = expr;
3999 thiscase->data.case_stmt.nominal_type = type;
4000 thiscase->data.case_stmt.default_label = 0;
4001 thiscase->data.case_stmt.num_ranges = 0;
4002 thiscase->data.case_stmt.printname = printname;
4003 thiscase->data.case_stmt.seenlabel = 0;
4004 case_stack = thiscase;
4005 nesting_stack = thiscase;
4007 if (output_bytecode)
4009 bc_expand_start_case (thiscase, expr, type, printname);
4010 return;
4013 do_pending_stack_adjust ();
4015 /* Make sure case_stmt.start points to something that won't
4016 need any transformation before expand_end_case. */
4017 if (GET_CODE (get_last_insn ()) != NOTE)
4018 emit_note (NULL_PTR, NOTE_INSN_DELETED);
4020 thiscase->data.case_stmt.start = get_last_insn ();
4024 /* Enter a case statement. It is assumed that the caller has pushed
4025 the current context onto the case stack. */
4027 static void
4028 bc_expand_start_case (thiscase, expr, type, printname)
4029 struct nesting *thiscase;
4030 tree expr;
4031 tree type;
4032 char *printname;
4034 bc_expand_expr (expr);
4035 bc_expand_conversion (TREE_TYPE (expr), type);
4037 /* For cases, the skip is a place we jump to that's emitted after
4038 the size of the jump table is known. */
4040 thiscase->data.case_stmt.skip_label = gen_label_rtx ();
4041 bc_emit_bytecode (jump);
4042 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (thiscase->data.case_stmt.skip_label));
4044 #ifdef DEBUG_PRINT_CODE
4045 fputc ('\n', stderr);
4046 #endif
4050 /* Start a "dummy case statement" within which case labels are invalid
4051 and are not connected to any larger real case statement.
4052 This can be used if you don't want to let a case statement jump
4053 into the middle of certain kinds of constructs. */
4055 void
4056 expand_start_case_dummy ()
4058 register struct nesting *thiscase = ALLOC_NESTING ();
4060 /* Make an entry on case_stack for the dummy. */
4062 thiscase->next = case_stack;
4063 thiscase->all = nesting_stack;
4064 thiscase->depth = ++nesting_depth;
4065 thiscase->exit_label = 0;
4066 thiscase->data.case_stmt.case_list = 0;
4067 thiscase->data.case_stmt.start = 0;
4068 thiscase->data.case_stmt.nominal_type = 0;
4069 thiscase->data.case_stmt.default_label = 0;
4070 thiscase->data.case_stmt.num_ranges = 0;
4071 case_stack = thiscase;
4072 nesting_stack = thiscase;
4075 /* End a dummy case statement. */
4077 void
4078 expand_end_case_dummy ()
4080 POPSTACK (case_stack);
4083 /* Return the data type of the index-expression
4084 of the innermost case statement, or null if none. */
4086 tree
4087 case_index_expr_type ()
4089 if (case_stack)
4090 return TREE_TYPE (case_stack->data.case_stmt.index_expr);
4091 return 0;
4094 /* Accumulate one case or default label inside a case or switch statement.
4095 VALUE is the value of the case (a null pointer, for a default label).
4096 The function CONVERTER, when applied to arguments T and V,
4097 converts the value V to the type T.
4099 If not currently inside a case or switch statement, return 1 and do
4100 nothing. The caller will print a language-specific error message.
4101 If VALUE is a duplicate or overlaps, return 2 and do nothing
4102 except store the (first) duplicate node in *DUPLICATE.
4103 If VALUE is out of range, return 3 and do nothing.
4104 If we are jumping into the scope of a cleaup or var-sized array, return 5.
4105 Return 0 on success.
4107 Extended to handle range statements. */
4110 pushcase (value, converter, label, duplicate)
4111 register tree value;
4112 tree (*converter) PROTO((tree, tree));
4113 register tree label;
4114 tree *duplicate;
4116 register struct case_node **l;
4117 register struct case_node *n;
4118 tree index_type;
4119 tree nominal_type;
4121 if (output_bytecode)
4122 return bc_pushcase (value, label);
4124 /* Fail if not inside a real case statement. */
4125 if (! (case_stack && case_stack->data.case_stmt.start))
4126 return 1;
4128 if (stack_block_stack
4129 && stack_block_stack->depth > case_stack->depth)
4130 return 5;
4132 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4133 nominal_type = case_stack->data.case_stmt.nominal_type;
4135 /* If the index is erroneous, avoid more problems: pretend to succeed. */
4136 if (index_type == error_mark_node)
4137 return 0;
4139 /* Convert VALUE to the type in which the comparisons are nominally done. */
4140 if (value != 0)
4141 value = (*converter) (nominal_type, value);
4143 /* If this is the first label, warn if any insns have been emitted. */
4144 if (case_stack->data.case_stmt.seenlabel == 0)
4146 rtx insn;
4147 for (insn = case_stack->data.case_stmt.start;
4148 insn;
4149 insn = NEXT_INSN (insn))
4151 if (GET_CODE (insn) == CODE_LABEL)
4152 break;
4153 if (GET_CODE (insn) != NOTE
4154 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn)) != USE))
4156 warning ("unreachable code at beginning of %s",
4157 case_stack->data.case_stmt.printname);
4158 break;
4162 case_stack->data.case_stmt.seenlabel = 1;
4164 /* Fail if this value is out of range for the actual type of the index
4165 (which may be narrower than NOMINAL_TYPE). */
4166 if (value != 0 && ! int_fits_type_p (value, index_type))
4167 return 3;
4169 /* Fail if this is a duplicate or overlaps another entry. */
4170 if (value == 0)
4172 if (case_stack->data.case_stmt.default_label != 0)
4174 *duplicate = case_stack->data.case_stmt.default_label;
4175 return 2;
4177 case_stack->data.case_stmt.default_label = label;
4179 else
4180 return add_case_node (value, value, label, duplicate);
4182 expand_label (label);
4183 return 0;
4186 /* Like pushcase but this case applies to all values
4187 between VALUE1 and VALUE2 (inclusive).
4188 The return value is the same as that of pushcase
4189 but there is one additional error code:
4190 4 means the specified range was empty. */
4193 pushcase_range (value1, value2, converter, label, duplicate)
4194 register tree value1, value2;
4195 tree (*converter) PROTO((tree, tree));
4196 register tree label;
4197 tree *duplicate;
4199 register struct case_node **l;
4200 register struct case_node *n;
4201 tree index_type;
4202 tree nominal_type;
4204 /* Fail if not inside a real case statement. */
4205 if (! (case_stack && case_stack->data.case_stmt.start))
4206 return 1;
4208 if (stack_block_stack
4209 && stack_block_stack->depth > case_stack->depth)
4210 return 5;
4212 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4213 nominal_type = case_stack->data.case_stmt.nominal_type;
4215 /* If the index is erroneous, avoid more problems: pretend to succeed. */
4216 if (index_type == error_mark_node)
4217 return 0;
4219 /* If this is the first label, warn if any insns have been emitted. */
4220 if (case_stack->data.case_stmt.seenlabel == 0)
4222 rtx insn;
4223 for (insn = case_stack->data.case_stmt.start;
4224 insn;
4225 insn = NEXT_INSN (insn))
4227 if (GET_CODE (insn) == CODE_LABEL)
4228 break;
4229 if (GET_CODE (insn) != NOTE
4230 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn)) != USE))
4232 warning ("unreachable code at beginning of %s",
4233 case_stack->data.case_stmt.printname);
4234 break;
4238 case_stack->data.case_stmt.seenlabel = 1;
4240 /* Convert VALUEs to type in which the comparisons are nominally done. */
4241 if (value1 == 0) /* Negative infinity. */
4242 value1 = TYPE_MIN_VALUE(index_type);
4243 value1 = (*converter) (nominal_type, value1);
4245 if (value2 == 0) /* Positive infinity. */
4246 value2 = TYPE_MAX_VALUE(index_type);
4247 value2 = (*converter) (nominal_type, value2);
4249 /* Fail if these values are out of range. */
4250 if (! int_fits_type_p (value1, index_type))
4251 return 3;
4253 if (! int_fits_type_p (value2, index_type))
4254 return 3;
4256 /* Fail if the range is empty. */
4257 if (tree_int_cst_lt (value2, value1))
4258 return 4;
4260 return add_case_node (value1, value2, label, duplicate);
4263 /* Do the actual insertion of a case label for pushcase and pushcase_range
4264 into case_stack->data.case_stmt.case_list. Use an AVL tree to avoid
4265 slowdown for large switch statements. */
4267 static int
4268 add_case_node (low, high, label, duplicate)
4269 tree low, high;
4270 tree label;
4271 tree *duplicate;
4273 struct case_node *p, **q, *r;
4275 q = &case_stack->data.case_stmt.case_list;
4276 p = *q;
4278 while (r = *q)
4280 p = r;
4282 /* Keep going past elements distinctly greater than HIGH. */
4283 if (tree_int_cst_lt (high, p->low))
4284 q = &p->left;
4286 /* or distinctly less than LOW. */
4287 else if (tree_int_cst_lt (p->high, low))
4288 q = &p->right;
4290 else
4292 /* We have an overlap; this is an error. */
4293 *duplicate = p->code_label;
4294 return 2;
4298 /* Add this label to the chain, and succeed.
4299 Copy LOW, HIGH so they are on temporary rather than momentary
4300 obstack and will thus survive till the end of the case statement. */
4302 r = (struct case_node *) oballoc (sizeof (struct case_node));
4303 r->low = copy_node (low);
4305 /* If the bounds are equal, turn this into the one-value case. */
4307 if (tree_int_cst_equal (low, high))
4308 r->high = r->low;
4309 else
4311 r->high = copy_node (high);
4312 case_stack->data.case_stmt.num_ranges++;
4315 r->code_label = label;
4316 expand_label (label);
4318 *q = r;
4319 r->parent = p;
4320 r->left = 0;
4321 r->right = 0;
4322 r->balance = 0;
4324 while (p)
4326 struct case_node *s;
4328 if (r == p->left)
4330 int b;
4332 if (! (b = p->balance))
4333 /* Growth propagation from left side. */
4334 p->balance = -1;
4335 else if (b < 0)
4337 if (r->balance < 0)
4339 /* R-Rotation */
4340 if (p->left = s = r->right)
4341 s->parent = p;
4343 r->right = p;
4344 p->balance = 0;
4345 r->balance = 0;
4346 s = p->parent;
4347 p->parent = r;
4349 if (r->parent = s)
4351 if (s->left == p)
4352 s->left = r;
4353 else
4354 s->right = r;
4356 else
4357 case_stack->data.case_stmt.case_list = r;
4359 else
4360 /* r->balance == +1 */
4362 /* LR-Rotation */
4364 int b2;
4365 struct case_node *t = r->right;
4367 if (p->left = s = t->right)
4368 s->parent = p;
4370 t->right = p;
4371 if (r->right = s = t->left)
4372 s->parent = r;
4374 t->left = r;
4375 b = t->balance;
4376 b2 = b < 0;
4377 p->balance = b2;
4378 b2 = -b2 - b;
4379 r->balance = b2;
4380 t->balance = 0;
4381 s = p->parent;
4382 p->parent = t;
4383 r->parent = t;
4385 if (t->parent = s)
4387 if (s->left == p)
4388 s->left = t;
4389 else
4390 s->right = t;
4392 else
4393 case_stack->data.case_stmt.case_list = t;
4395 break;
4398 else
4400 /* p->balance == +1; growth of left side balances the node. */
4401 p->balance = 0;
4402 break;
4405 else
4406 /* r == p->right */
4408 int b;
4410 if (! (b = p->balance))
4411 /* Growth propagation from right side. */
4412 p->balance++;
4413 else if (b > 0)
4415 if (r->balance > 0)
4417 /* L-Rotation */
4419 if (p->right = s = r->left)
4420 s->parent = p;
4422 r->left = p;
4423 p->balance = 0;
4424 r->balance = 0;
4425 s = p->parent;
4426 p->parent = r;
4427 if (r->parent = s)
4429 if (s->left == p)
4430 s->left = r;
4431 else
4432 s->right = r;
4435 else
4436 case_stack->data.case_stmt.case_list = r;
4439 else
4440 /* r->balance == -1 */
4442 /* RL-Rotation */
4443 int b2;
4444 struct case_node *t = r->left;
4446 if (p->right = s = t->left)
4447 s->parent = p;
4449 t->left = p;
4451 if (r->left = s = t->right)
4452 s->parent = r;
4454 t->right = r;
4455 b = t->balance;
4456 b2 = b < 0;
4457 r->balance = b2;
4458 b2 = -b2 - b;
4459 p->balance = b2;
4460 t->balance = 0;
4461 s = p->parent;
4462 p->parent = t;
4463 r->parent = t;
4465 if (t->parent = s)
4467 if (s->left == p)
4468 s->left = t;
4469 else
4470 s->right = t;
4473 else
4474 case_stack->data.case_stmt.case_list = t;
4476 break;
4478 else
4480 /* p->balance == -1; growth of right side balances the node. */
4481 p->balance = 0;
4482 break;
4486 r = p;
4487 p = p->parent;
4490 return 0;
4493 /* Accumulate one case or default label; VALUE is the value of the
4494 case, or nil for a default label. If not currently inside a case,
4495 return 1 and do nothing. If VALUE is a duplicate or overlaps, return
4496 2 and do nothing. If VALUE is out of range, return 3 and do nothing.
4497 Return 0 on success. This function is a leftover from the earlier
4498 bytecode compiler, which was based on gcc 1.37. It should be
4499 merged into pushcase. */
4501 static int
4502 bc_pushcase (value, label)
4503 tree value;
4504 tree label;
4506 struct nesting *thiscase = case_stack;
4507 struct case_node *case_label, *new_label;
4509 if (! thiscase)
4510 return 1;
4512 /* Fail if duplicate, overlap, or out of type range. */
4513 if (value)
4515 value = convert (thiscase->data.case_stmt.nominal_type, value);
4516 if (! int_fits_type_p (value, thiscase->data.case_stmt.nominal_type))
4517 return 3;
4519 for (case_label = thiscase->data.case_stmt.case_list;
4520 case_label->left; case_label = case_label->left)
4521 if (! tree_int_cst_lt (case_label->left->high, value))
4522 break;
4524 if (case_label != thiscase->data.case_stmt.case_list
4525 && ! tree_int_cst_lt (case_label->high, value)
4526 || (case_label->left && ! tree_int_cst_lt (value, case_label->left->low)))
4527 return 2;
4529 new_label = (struct case_node *) oballoc (sizeof (struct case_node));
4530 new_label->low = new_label->high = copy_node (value);
4531 new_label->code_label = label;
4532 new_label->left = case_label->left;
4534 case_label->left = new_label;
4535 thiscase->data.case_stmt.num_ranges++;
4537 else
4539 if (thiscase->data.case_stmt.default_label)
4540 return 2;
4541 thiscase->data.case_stmt.default_label = label;
4544 expand_label (label);
4545 return 0;
4548 /* Returns the number of possible values of TYPE.
4549 Returns -1 if the number is unknown or variable.
4550 Returns -2 if the number does not fit in a HOST_WIDE_INT.
4551 Sets *SPARENESS to 2 if TYPE is an ENUMERAL_TYPE whose values
4552 do not increase monotonically (there may be duplicates);
4553 to 1 if the values increase monotonically, but not always by 1;
4554 otherwise sets it to 0. */
4556 HOST_WIDE_INT
4557 all_cases_count (type, spareness)
4558 tree type;
4559 int *spareness;
4561 HOST_WIDE_INT count, count_high = 0;
4562 *spareness = 0;
4564 switch (TREE_CODE (type))
4566 tree t;
4567 case BOOLEAN_TYPE:
4568 count = 2;
4569 break;
4570 case CHAR_TYPE:
4571 count = 1 << BITS_PER_UNIT;
4572 break;
4573 default:
4574 case INTEGER_TYPE:
4575 if (TREE_CODE (TYPE_MIN_VALUE (type)) != INTEGER_CST
4576 || TREE_CODE (TYPE_MAX_VALUE (type)) != INTEGER_CST)
4577 return -1;
4578 else
4580 /* count
4581 = TREE_INT_CST_LOW (TYPE_MAX_VALUE (type))
4582 - TREE_INT_CST_LOW (TYPE_MIN_VALUE (type)) + 1
4583 but with overflow checking. */
4584 tree mint = TYPE_MIN_VALUE (type);
4585 tree maxt = TYPE_MAX_VALUE (type);
4586 HOST_WIDE_INT lo, hi;
4587 neg_double(TREE_INT_CST_LOW (mint), TREE_INT_CST_HIGH (mint),
4588 &lo, &hi);
4589 add_double(TREE_INT_CST_LOW (maxt), TREE_INT_CST_HIGH (maxt),
4590 lo, hi, &lo, &hi);
4591 add_double (lo, hi, 1, 0, &lo, &hi);
4592 if (hi != 0 || lo < 0)
4593 return -2;
4594 count = lo;
4596 break;
4597 case ENUMERAL_TYPE:
4598 count = 0;
4599 for (t = TYPE_VALUES (type); t != NULL_TREE; t = TREE_CHAIN (t))
4601 if (TREE_CODE (TYPE_MIN_VALUE (type)) != INTEGER_CST
4602 || TREE_CODE (TREE_VALUE (t)) != INTEGER_CST
4603 || TREE_INT_CST_LOW (TYPE_MIN_VALUE (type)) + count
4604 != TREE_INT_CST_LOW (TREE_VALUE (t)))
4605 *spareness = 1;
4606 count++;
4608 if (*spareness == 1)
4610 tree prev = TREE_VALUE (TYPE_VALUES (type));
4611 for (t = TYPE_VALUES (type); t = TREE_CHAIN (t), t != NULL_TREE; )
4613 if (! tree_int_cst_lt (prev, TREE_VALUE (t)))
4615 *spareness = 2;
4616 break;
4618 prev = TREE_VALUE (t);
4623 return count;
4627 #define BITARRAY_TEST(ARRAY, INDEX) \
4628 ((ARRAY)[(unsigned) (INDEX) / HOST_BITS_PER_CHAR]\
4629 & (1 << ((unsigned) (INDEX) % HOST_BITS_PER_CHAR)))
4630 #define BITARRAY_SET(ARRAY, INDEX) \
4631 ((ARRAY)[(unsigned) (INDEX) / HOST_BITS_PER_CHAR]\
4632 |= 1 << ((unsigned) (INDEX) % HOST_BITS_PER_CHAR))
4634 /* Set the elements of the bitstring CASES_SEEN (which has length COUNT),
4635 with the case values we have seen, assuming the case expression
4636 has the given TYPE.
4637 SPARSENESS is as determined by all_cases_count.
4639 The time needed is proportional to COUNT, unless
4640 SPARSENESS is 2, in which case quadratic time is needed. */
4642 void
4643 mark_seen_cases (type, cases_seen, count, sparseness)
4644 tree type;
4645 unsigned char *cases_seen;
4646 long count;
4647 int sparseness;
4649 long i;
4651 tree next_node_to_try = NULL_TREE;
4652 long next_node_offset = 0;
4654 register struct case_node *n, *root = case_stack->data.case_stmt.case_list;
4655 tree val = make_node (INTEGER_CST);
4656 TREE_TYPE (val) = type;
4657 if (! root)
4658 ; /* Do nothing */
4659 else if (sparseness == 2)
4661 tree t;
4662 HOST_WIDE_INT xlo;
4664 /* This less efficient loop is only needed to handle
4665 duplicate case values (multiple enum constants
4666 with the same value). */
4667 TREE_TYPE (val) = TREE_TYPE (root->low);
4668 for (t = TYPE_VALUES (type), xlo = 0; t != NULL_TREE;
4669 t = TREE_CHAIN (t), xlo++)
4671 TREE_INT_CST_LOW (val) = TREE_INT_CST_LOW (TREE_VALUE (t));
4672 TREE_INT_CST_HIGH (val) = TREE_INT_CST_HIGH (TREE_VALUE (t));
4673 n = root;
4676 /* Keep going past elements distinctly greater than VAL. */
4677 if (tree_int_cst_lt (val, n->low))
4678 n = n->left;
4680 /* or distinctly less than VAL. */
4681 else if (tree_int_cst_lt (n->high, val))
4682 n = n->right;
4684 else
4686 /* We have found a matching range. */
4687 BITARRAY_SET (cases_seen, xlo);
4688 break;
4691 while (n);
4694 else
4696 if (root->left)
4697 case_stack->data.case_stmt.case_list = root = case_tree2list (root, 0);
4698 for (n = root; n; n = n->right)
4700 TREE_INT_CST_LOW (val) = TREE_INT_CST_LOW (n->low);
4701 TREE_INT_CST_HIGH (val) = TREE_INT_CST_HIGH (n->low);
4702 while ( ! tree_int_cst_lt (n->high, val))
4704 /* Calculate (into xlo) the "offset" of the integer (val).
4705 The element with lowest value has offset 0, the next smallest
4706 element has offset 1, etc. */
4708 HOST_WIDE_INT xlo, xhi;
4709 tree t;
4710 if (sparseness && TYPE_VALUES (type) != NULL_TREE)
4712 /* The TYPE_VALUES will be in increasing order, so
4713 starting searching where we last ended. */
4714 t = next_node_to_try;
4715 xlo = next_node_offset;
4716 xhi = 0;
4717 for (;;)
4719 if (t == NULL_TREE)
4721 t = TYPE_VALUES (type);
4722 xlo = 0;
4724 if (tree_int_cst_equal (val, TREE_VALUE (t)))
4726 next_node_to_try = TREE_CHAIN (t);
4727 next_node_offset = xlo + 1;
4728 break;
4730 xlo++;
4731 t = TREE_CHAIN (t);
4732 if (t == next_node_to_try)
4734 xlo = -1;
4735 break;
4739 else
4741 t = TYPE_MIN_VALUE (type);
4742 if (t)
4743 neg_double (TREE_INT_CST_LOW (t), TREE_INT_CST_HIGH (t),
4744 &xlo, &xhi);
4745 else
4746 xlo = xhi = 0;
4747 add_double (xlo, xhi,
4748 TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
4749 &xlo, &xhi);
4752 if (xhi == 0 && xlo >= 0 && xlo < count)
4753 BITARRAY_SET (cases_seen, xlo);
4754 add_double (TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
4755 1, 0,
4756 &TREE_INT_CST_LOW (val), &TREE_INT_CST_HIGH (val));
4762 /* Called when the index of a switch statement is an enumerated type
4763 and there is no default label.
4765 Checks that all enumeration literals are covered by the case
4766 expressions of a switch. Also, warn if there are any extra
4767 switch cases that are *not* elements of the enumerated type.
4769 If all enumeration literals were covered by the case expressions,
4770 turn one of the expressions into the default expression since it should
4771 not be possible to fall through such a switch. */
4773 void
4774 check_for_full_enumeration_handling (type)
4775 tree type;
4777 register struct case_node *n;
4778 register struct case_node **l;
4779 register tree chain;
4780 int all_values = 1;
4782 /* True iff the selector type is a numbered set mode. */
4783 int sparseness = 0;
4785 /* The number of possible selector values. */
4786 HOST_WIDE_INT size;
4788 /* For each possible selector value. a one iff it has been matched
4789 by a case value alternative. */
4790 unsigned char *cases_seen;
4792 /* The allocated size of cases_seen, in chars. */
4793 long bytes_needed;
4794 tree t;
4796 if (output_bytecode)
4798 bc_check_for_full_enumeration_handling (type);
4799 return;
4802 if (! warn_switch)
4803 return;
4805 size = all_cases_count (type, &sparseness);
4806 bytes_needed = (size + HOST_BITS_PER_CHAR) / HOST_BITS_PER_CHAR;
4808 if (size > 0 && size < 600000
4809 /* We deliberately use malloc here - not xmalloc. */
4810 && (cases_seen = (unsigned char *) malloc (bytes_needed)) != NULL)
4812 long i;
4813 tree v = TYPE_VALUES (type);
4814 bzero (cases_seen, bytes_needed);
4816 /* The time complexity of this code is normally O(N), where
4817 N being the number of members in the enumerated type.
4818 However, if type is a ENUMERAL_TYPE whose values do not
4819 increase monotonically, O(N*log(N)) time may be needed. */
4821 mark_seen_cases (type, cases_seen, size, sparseness);
4823 for (i = 0; v != NULL_TREE && i < size; i++, v = TREE_CHAIN (v))
4825 if (BITARRAY_TEST(cases_seen, i) == 0)
4826 warning ("enumeration value `%s' not handled in switch",
4827 IDENTIFIER_POINTER (TREE_PURPOSE (v)));
4830 free (cases_seen);
4833 /* Now we go the other way around; we warn if there are case
4834 expressions that don't correspond to enumerators. This can
4835 occur since C and C++ don't enforce type-checking of
4836 assignments to enumeration variables. */
4838 if (case_stack->data.case_stmt.case_list
4839 && case_stack->data.case_stmt.case_list->left)
4840 case_stack->data.case_stmt.case_list
4841 = case_tree2list (case_stack->data.case_stmt.case_list, 0);
4842 if (warn_switch)
4843 for (n = case_stack->data.case_stmt.case_list; n; n = n->right)
4845 for (chain = TYPE_VALUES (type);
4846 chain && !tree_int_cst_equal (n->low, TREE_VALUE (chain));
4847 chain = TREE_CHAIN (chain))
4850 if (!chain)
4852 if (TYPE_NAME (type) == 0)
4853 warning ("case value `%d' not in enumerated type",
4854 TREE_INT_CST_LOW (n->low));
4855 else
4856 warning ("case value `%d' not in enumerated type `%s'",
4857 TREE_INT_CST_LOW (n->low),
4858 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
4859 == IDENTIFIER_NODE)
4860 ? TYPE_NAME (type)
4861 : DECL_NAME (TYPE_NAME (type))));
4863 if (!tree_int_cst_equal (n->low, n->high))
4865 for (chain = TYPE_VALUES (type);
4866 chain && !tree_int_cst_equal (n->high, TREE_VALUE (chain));
4867 chain = TREE_CHAIN (chain))
4870 if (!chain)
4872 if (TYPE_NAME (type) == 0)
4873 warning ("case value `%d' not in enumerated type",
4874 TREE_INT_CST_LOW (n->high));
4875 else
4876 warning ("case value `%d' not in enumerated type `%s'",
4877 TREE_INT_CST_LOW (n->high),
4878 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
4879 == IDENTIFIER_NODE)
4880 ? TYPE_NAME (type)
4881 : DECL_NAME (TYPE_NAME (type))));
4886 #if 0
4887 /* ??? This optimization is disabled because it causes valid programs to
4888 fail. ANSI C does not guarantee that an expression with enum type
4889 will have a value that is the same as one of the enumeration literals. */
4891 /* If all values were found as case labels, make one of them the default
4892 label. Thus, this switch will never fall through. We arbitrarily pick
4893 the last one to make the default since this is likely the most
4894 efficient choice. */
4896 if (all_values)
4898 for (l = &case_stack->data.case_stmt.case_list;
4899 (*l)->right != 0;
4900 l = &(*l)->right)
4903 case_stack->data.case_stmt.default_label = (*l)->code_label;
4904 *l = 0;
4906 #endif /* 0 */
4910 /* Check that all enumeration literals are covered by the case
4911 expressions of a switch. Also warn if there are any cases
4912 that are not elements of the enumerated type. */
4914 static void
4915 bc_check_for_full_enumeration_handling (type)
4916 tree type;
4918 struct nesting *thiscase = case_stack;
4919 struct case_node *c;
4920 tree e;
4922 /* Check for enums not handled. */
4923 for (e = TYPE_VALUES (type); e; e = TREE_CHAIN (e))
4925 for (c = thiscase->data.case_stmt.case_list->left;
4926 c && tree_int_cst_lt (c->high, TREE_VALUE (e));
4927 c = c->left)
4929 if (! (c && tree_int_cst_equal (c->low, TREE_VALUE (e))))
4930 warning ("enumerated value `%s' not handled in switch",
4931 IDENTIFIER_POINTER (TREE_PURPOSE (e)));
4934 /* Check for cases not in the enumeration. */
4935 for (c = thiscase->data.case_stmt.case_list->left; c; c = c->left)
4937 for (e = TYPE_VALUES (type);
4938 e && !tree_int_cst_equal (c->low, TREE_VALUE (e));
4939 e = TREE_CHAIN (e))
4941 if (! e)
4942 warning ("case value `%d' not in enumerated type `%s'",
4943 TREE_INT_CST_LOW (c->low),
4944 IDENTIFIER_POINTER (TREE_CODE (TYPE_NAME (type)) == IDENTIFIER_NODE
4945 ? TYPE_NAME (type)
4946 : DECL_NAME (TYPE_NAME (type))));
4950 /* Terminate a case (Pascal) or switch (C) statement
4951 in which ORIG_INDEX is the expression to be tested.
4952 Generate the code to test it and jump to the right place. */
4954 void
4955 expand_end_case (orig_index)
4956 tree orig_index;
4958 tree minval, maxval, range, orig_minval;
4959 rtx default_label = 0;
4960 register struct case_node *n;
4961 int count;
4962 rtx index;
4963 rtx table_label;
4964 int ncases;
4965 rtx *labelvec;
4966 register int i;
4967 rtx before_case;
4968 register struct nesting *thiscase = case_stack;
4969 tree index_expr, index_type;
4970 int unsignedp;
4972 if (output_bytecode)
4974 bc_expand_end_case (orig_index);
4975 return;
4978 table_label = gen_label_rtx ();
4979 index_expr = thiscase->data.case_stmt.index_expr;
4980 index_type = TREE_TYPE (index_expr);
4981 unsignedp = TREE_UNSIGNED (index_type);
4983 do_pending_stack_adjust ();
4985 /* An ERROR_MARK occurs for various reasons including invalid data type. */
4986 if (index_type != error_mark_node)
4988 /* If switch expression was an enumerated type, check that all
4989 enumeration literals are covered by the cases.
4990 No sense trying this if there's a default case, however. */
4992 if (!thiscase->data.case_stmt.default_label
4993 && TREE_CODE (TREE_TYPE (orig_index)) == ENUMERAL_TYPE
4994 && TREE_CODE (index_expr) != INTEGER_CST)
4995 check_for_full_enumeration_handling (TREE_TYPE (orig_index));
4997 /* If this is the first label, warn if any insns have been emitted. */
4998 if (thiscase->data.case_stmt.seenlabel == 0)
5000 rtx insn;
5001 for (insn = get_last_insn ();
5002 insn != case_stack->data.case_stmt.start;
5003 insn = PREV_INSN (insn))
5004 if (GET_CODE (insn) != NOTE
5005 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn))!= USE))
5007 warning ("unreachable code at beginning of %s",
5008 case_stack->data.case_stmt.printname);
5009 break;
5013 /* If we don't have a default-label, create one here,
5014 after the body of the switch. */
5015 if (thiscase->data.case_stmt.default_label == 0)
5017 thiscase->data.case_stmt.default_label
5018 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
5019 expand_label (thiscase->data.case_stmt.default_label);
5021 default_label = label_rtx (thiscase->data.case_stmt.default_label);
5023 before_case = get_last_insn ();
5025 if (thiscase->data.case_stmt.case_list
5026 && thiscase->data.case_stmt.case_list->left)
5027 thiscase->data.case_stmt.case_list
5028 = case_tree2list(thiscase->data.case_stmt.case_list, 0);
5030 /* Simplify the case-list before we count it. */
5031 group_case_nodes (thiscase->data.case_stmt.case_list);
5033 /* Get upper and lower bounds of case values.
5034 Also convert all the case values to the index expr's data type. */
5036 count = 0;
5037 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5039 /* Check low and high label values are integers. */
5040 if (TREE_CODE (n->low) != INTEGER_CST)
5041 abort ();
5042 if (TREE_CODE (n->high) != INTEGER_CST)
5043 abort ();
5045 n->low = convert (index_type, n->low);
5046 n->high = convert (index_type, n->high);
5048 /* Count the elements and track the largest and smallest
5049 of them (treating them as signed even if they are not). */
5050 if (count++ == 0)
5052 minval = n->low;
5053 maxval = n->high;
5055 else
5057 if (INT_CST_LT (n->low, minval))
5058 minval = n->low;
5059 if (INT_CST_LT (maxval, n->high))
5060 maxval = n->high;
5062 /* A range counts double, since it requires two compares. */
5063 if (! tree_int_cst_equal (n->low, n->high))
5064 count++;
5067 orig_minval = minval;
5069 /* Compute span of values. */
5070 if (count != 0)
5071 range = fold (build (MINUS_EXPR, index_type, maxval, minval));
5073 if (count == 0)
5075 expand_expr (index_expr, const0_rtx, VOIDmode, 0);
5076 emit_queue ();
5077 emit_jump (default_label);
5080 /* If range of values is much bigger than number of values,
5081 make a sequence of conditional branches instead of a dispatch.
5082 If the switch-index is a constant, do it this way
5083 because we can optimize it. */
5085 #ifndef CASE_VALUES_THRESHOLD
5086 #ifdef HAVE_casesi
5087 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
5088 #else
5089 /* If machine does not have a case insn that compares the
5090 bounds, this means extra overhead for dispatch tables
5091 which raises the threshold for using them. */
5092 #define CASE_VALUES_THRESHOLD 5
5093 #endif /* HAVE_casesi */
5094 #endif /* CASE_VALUES_THRESHOLD */
5096 else if (TREE_INT_CST_HIGH (range) != 0
5097 || count < CASE_VALUES_THRESHOLD
5098 || ((unsigned HOST_WIDE_INT) (TREE_INT_CST_LOW (range))
5099 > 10 * count)
5100 #ifndef ASM_OUTPUT_ADDR_DIFF_ELT
5101 || flag_pic
5102 #endif
5103 || TREE_CODE (index_expr) == INTEGER_CST
5104 /* These will reduce to a constant. */
5105 || (TREE_CODE (index_expr) == CALL_EXPR
5106 && TREE_CODE (TREE_OPERAND (index_expr, 0)) == ADDR_EXPR
5107 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == FUNCTION_DECL
5108 && DECL_FUNCTION_CODE (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == BUILT_IN_CLASSIFY_TYPE)
5109 || (TREE_CODE (index_expr) == COMPOUND_EXPR
5110 && TREE_CODE (TREE_OPERAND (index_expr, 1)) == INTEGER_CST))
5112 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5114 /* If the index is a short or char that we do not have
5115 an insn to handle comparisons directly, convert it to
5116 a full integer now, rather than letting each comparison
5117 generate the conversion. */
5119 if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT
5120 && (cmp_optab->handlers[(int) GET_MODE(index)].insn_code
5121 == CODE_FOR_nothing))
5123 enum machine_mode wider_mode;
5124 for (wider_mode = GET_MODE (index); wider_mode != VOIDmode;
5125 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
5126 if (cmp_optab->handlers[(int) wider_mode].insn_code
5127 != CODE_FOR_nothing)
5129 index = convert_to_mode (wider_mode, index, unsignedp);
5130 break;
5134 emit_queue ();
5135 do_pending_stack_adjust ();
5137 index = protect_from_queue (index, 0);
5138 if (GET_CODE (index) == MEM)
5139 index = copy_to_reg (index);
5140 if (GET_CODE (index) == CONST_INT
5141 || TREE_CODE (index_expr) == INTEGER_CST)
5143 /* Make a tree node with the proper constant value
5144 if we don't already have one. */
5145 if (TREE_CODE (index_expr) != INTEGER_CST)
5147 index_expr
5148 = build_int_2 (INTVAL (index),
5149 unsignedp || INTVAL (index) >= 0 ? 0 : -1);
5150 index_expr = convert (index_type, index_expr);
5153 /* For constant index expressions we need only
5154 issue a unconditional branch to the appropriate
5155 target code. The job of removing any unreachable
5156 code is left to the optimisation phase if the
5157 "-O" option is specified. */
5158 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5159 if (! tree_int_cst_lt (index_expr, n->low)
5160 && ! tree_int_cst_lt (n->high, index_expr))
5161 break;
5163 if (n)
5164 emit_jump (label_rtx (n->code_label));
5165 else
5166 emit_jump (default_label);
5168 else
5170 /* If the index expression is not constant we generate
5171 a binary decision tree to select the appropriate
5172 target code. This is done as follows:
5174 The list of cases is rearranged into a binary tree,
5175 nearly optimal assuming equal probability for each case.
5177 The tree is transformed into RTL, eliminating
5178 redundant test conditions at the same time.
5180 If program flow could reach the end of the
5181 decision tree an unconditional jump to the
5182 default code is emitted. */
5184 use_cost_table
5185 = (TREE_CODE (TREE_TYPE (orig_index)) != ENUMERAL_TYPE
5186 && estimate_case_costs (thiscase->data.case_stmt.case_list));
5187 balance_case_nodes (&thiscase->data.case_stmt.case_list,
5188 NULL_PTR);
5189 emit_case_nodes (index, thiscase->data.case_stmt.case_list,
5190 default_label, index_type);
5191 emit_jump_if_reachable (default_label);
5194 else
5196 int win = 0;
5197 #ifdef HAVE_casesi
5198 if (HAVE_casesi)
5200 enum machine_mode index_mode = SImode;
5201 int index_bits = GET_MODE_BITSIZE (index_mode);
5202 rtx op1, op2;
5203 enum machine_mode op_mode;
5205 /* Convert the index to SImode. */
5206 if (GET_MODE_BITSIZE (TYPE_MODE (index_type))
5207 > GET_MODE_BITSIZE (index_mode))
5209 enum machine_mode omode = TYPE_MODE (index_type);
5210 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
5212 /* We must handle the endpoints in the original mode. */
5213 index_expr = build (MINUS_EXPR, index_type,
5214 index_expr, minval);
5215 minval = integer_zero_node;
5216 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5217 emit_cmp_insn (rangertx, index, LTU, NULL_RTX, omode, 1, 0);
5218 emit_jump_insn (gen_bltu (default_label));
5219 /* Now we can safely truncate. */
5220 index = convert_to_mode (index_mode, index, 0);
5222 else
5224 if (TYPE_MODE (index_type) != index_mode)
5226 index_expr = convert (type_for_size (index_bits, 0),
5227 index_expr);
5228 index_type = TREE_TYPE (index_expr);
5231 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5233 emit_queue ();
5234 index = protect_from_queue (index, 0);
5235 do_pending_stack_adjust ();
5237 op_mode = insn_operand_mode[(int)CODE_FOR_casesi][0];
5238 if (! (*insn_operand_predicate[(int)CODE_FOR_casesi][0])
5239 (index, op_mode))
5240 index = copy_to_mode_reg (op_mode, index);
5242 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
5244 op_mode = insn_operand_mode[(int)CODE_FOR_casesi][1];
5245 if (! (*insn_operand_predicate[(int)CODE_FOR_casesi][1])
5246 (op1, op_mode))
5247 op1 = copy_to_mode_reg (op_mode, op1);
5249 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
5251 op_mode = insn_operand_mode[(int)CODE_FOR_casesi][2];
5252 if (! (*insn_operand_predicate[(int)CODE_FOR_casesi][2])
5253 (op2, op_mode))
5254 op2 = copy_to_mode_reg (op_mode, op2);
5256 emit_jump_insn (gen_casesi (index, op1, op2,
5257 table_label, default_label));
5258 win = 1;
5260 #endif
5261 #ifdef HAVE_tablejump
5262 if (! win && HAVE_tablejump)
5264 index_expr = convert (thiscase->data.case_stmt.nominal_type,
5265 fold (build (MINUS_EXPR, index_type,
5266 index_expr, minval)));
5267 index_type = TREE_TYPE (index_expr);
5268 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5269 emit_queue ();
5270 index = protect_from_queue (index, 0);
5271 do_pending_stack_adjust ();
5273 do_tablejump (index, TYPE_MODE (index_type),
5274 expand_expr (range, NULL_RTX, VOIDmode, 0),
5275 table_label, default_label);
5276 win = 1;
5278 #endif
5279 if (! win)
5280 abort ();
5282 /* Get table of labels to jump to, in order of case index. */
5284 ncases = TREE_INT_CST_LOW (range) + 1;
5285 labelvec = (rtx *) alloca (ncases * sizeof (rtx));
5286 bzero ((char *) labelvec, ncases * sizeof (rtx));
5288 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5290 register HOST_WIDE_INT i
5291 = TREE_INT_CST_LOW (n->low) - TREE_INT_CST_LOW (orig_minval);
5293 while (1)
5295 labelvec[i]
5296 = gen_rtx (LABEL_REF, Pmode, label_rtx (n->code_label));
5297 if (i + TREE_INT_CST_LOW (orig_minval)
5298 == TREE_INT_CST_LOW (n->high))
5299 break;
5300 i++;
5304 /* Fill in the gaps with the default. */
5305 for (i = 0; i < ncases; i++)
5306 if (labelvec[i] == 0)
5307 labelvec[i] = gen_rtx (LABEL_REF, Pmode, default_label);
5309 /* Output the table */
5310 emit_label (table_label);
5312 /* This would be a lot nicer if CASE_VECTOR_PC_RELATIVE
5313 were an expression, instead of an #ifdef/#ifndef. */
5314 if (
5315 #ifdef CASE_VECTOR_PC_RELATIVE
5316 1 ||
5317 #endif
5318 flag_pic)
5319 emit_jump_insn (gen_rtx (ADDR_DIFF_VEC, CASE_VECTOR_MODE,
5320 gen_rtx (LABEL_REF, Pmode, table_label),
5321 gen_rtvec_v (ncases, labelvec)));
5322 else
5323 emit_jump_insn (gen_rtx (ADDR_VEC, CASE_VECTOR_MODE,
5324 gen_rtvec_v (ncases, labelvec)));
5326 /* If the case insn drops through the table,
5327 after the table we must jump to the default-label.
5328 Otherwise record no drop-through after the table. */
5329 #ifdef CASE_DROPS_THROUGH
5330 emit_jump (default_label);
5331 #else
5332 emit_barrier ();
5333 #endif
5336 before_case = squeeze_notes (NEXT_INSN (before_case), get_last_insn ());
5337 reorder_insns (before_case, get_last_insn (),
5338 thiscase->data.case_stmt.start);
5341 if (thiscase->exit_label)
5342 emit_label (thiscase->exit_label);
5344 POPSTACK (case_stack);
5346 free_temp_slots ();
5349 /* Convert the tree NODE into a list linked by the right field, with the left
5350 field zeroed. RIGHT is used for recursion; it is a list to be placed
5351 rightmost in the resulting list. */
5353 static struct case_node *
5354 case_tree2list (node, right)
5355 struct case_node *node, *right;
5357 struct case_node *left;
5359 if (node->right)
5360 right = case_tree2list (node->right, right);
5362 node->right = right;
5363 if (left = node->left)
5365 node->left = 0;
5366 return case_tree2list (left, node);
5369 return node;
5372 /* Terminate a case statement. EXPR is the original index
5373 expression. */
5375 static void
5376 bc_expand_end_case (expr)
5377 tree expr;
5379 struct nesting *thiscase = case_stack;
5380 enum bytecode_opcode opcode;
5381 struct bc_label *jump_label;
5382 struct case_node *c;
5384 bc_emit_bytecode (jump);
5385 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (thiscase->exit_label));
5387 #ifdef DEBUG_PRINT_CODE
5388 fputc ('\n', stderr);
5389 #endif
5391 /* Now that the size of the jump table is known, emit the actual
5392 indexed jump instruction. */
5393 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thiscase->data.case_stmt.skip_label));
5395 opcode = TYPE_MODE (thiscase->data.case_stmt.nominal_type) == SImode
5396 ? TREE_UNSIGNED (thiscase->data.case_stmt.nominal_type) ? caseSU : caseSI
5397 : TREE_UNSIGNED (thiscase->data.case_stmt.nominal_type) ? caseDU : caseDI;
5399 bc_emit_bytecode (opcode);
5401 /* Now emit the case instructions literal arguments, in order.
5402 In addition to the value on the stack, it uses:
5403 1. The address of the jump table.
5404 2. The size of the jump table.
5405 3. The default label. */
5407 jump_label = bc_get_bytecode_label ();
5408 bc_emit_bytecode_labelref (jump_label);
5409 bc_emit_bytecode_const ((char *) &thiscase->data.case_stmt.num_ranges,
5410 sizeof thiscase->data.case_stmt.num_ranges);
5412 if (thiscase->data.case_stmt.default_label)
5413 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (DECL_RTL (thiscase->data.case_stmt.default_label)));
5414 else
5415 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (thiscase->exit_label));
5417 /* Output the jump table. */
5419 bc_align_bytecode (3 /* PTR_ALIGN */);
5420 bc_emit_bytecode_labeldef (jump_label);
5422 if (TYPE_MODE (thiscase->data.case_stmt.nominal_type) == SImode)
5423 for (c = thiscase->data.case_stmt.case_list->left; c; c = c->left)
5425 opcode = TREE_INT_CST_LOW (c->low);
5426 bc_emit_bytecode_const ((char *) &opcode, sizeof opcode);
5428 opcode = TREE_INT_CST_LOW (c->high);
5429 bc_emit_bytecode_const ((char *) &opcode, sizeof opcode);
5431 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (DECL_RTL (c->code_label)));
5433 else
5434 if (TYPE_MODE (thiscase->data.case_stmt.nominal_type) == DImode)
5435 for (c = thiscase->data.case_stmt.case_list->left; c; c = c->left)
5437 bc_emit_bytecode_DI_const (c->low);
5438 bc_emit_bytecode_DI_const (c->high);
5440 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (DECL_RTL (c->code_label)));
5442 else
5443 /* Bad mode */
5444 abort ();
5447 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thiscase->exit_label));
5449 /* Possibly issue enumeration warnings. */
5451 if (!thiscase->data.case_stmt.default_label
5452 && TREE_CODE (TREE_TYPE (expr)) == ENUMERAL_TYPE
5453 && TREE_CODE (expr) != INTEGER_CST
5454 && warn_switch)
5455 check_for_full_enumeration_handling (TREE_TYPE (expr));
5458 #ifdef DEBUG_PRINT_CODE
5459 fputc ('\n', stderr);
5460 #endif
5462 POPSTACK (case_stack);
5466 /* Return unique bytecode ID. */
5468 int
5469 bc_new_uid ()
5471 static int bc_uid = 0;
5473 return (++bc_uid);
5476 /* Generate code to jump to LABEL if OP1 and OP2 are equal. */
5478 static void
5479 do_jump_if_equal (op1, op2, label, unsignedp)
5480 rtx op1, op2, label;
5481 int unsignedp;
5483 if (GET_CODE (op1) == CONST_INT
5484 && GET_CODE (op2) == CONST_INT)
5486 if (INTVAL (op1) == INTVAL (op2))
5487 emit_jump (label);
5489 else
5491 enum machine_mode mode = GET_MODE (op1);
5492 if (mode == VOIDmode)
5493 mode = GET_MODE (op2);
5494 emit_cmp_insn (op1, op2, EQ, NULL_RTX, mode, unsignedp, 0);
5495 emit_jump_insn (gen_beq (label));
5499 /* Not all case values are encountered equally. This function
5500 uses a heuristic to weight case labels, in cases where that
5501 looks like a reasonable thing to do.
5503 Right now, all we try to guess is text, and we establish the
5504 following weights:
5506 chars above space: 16
5507 digits: 16
5508 default: 12
5509 space, punct: 8
5510 tab: 4
5511 newline: 2
5512 other "\" chars: 1
5513 remaining chars: 0
5515 If we find any cases in the switch that are not either -1 or in the range
5516 of valid ASCII characters, or are control characters other than those
5517 commonly used with "\", don't treat this switch scanning text.
5519 Return 1 if these nodes are suitable for cost estimation, otherwise
5520 return 0. */
5522 static int
5523 estimate_case_costs (node)
5524 case_node_ptr node;
5526 tree min_ascii = build_int_2 (-1, -1);
5527 tree max_ascii = convert (TREE_TYPE (node->high), build_int_2 (127, 0));
5528 case_node_ptr n;
5529 int i;
5531 /* If we haven't already made the cost table, make it now. Note that the
5532 lower bound of the table is -1, not zero. */
5534 if (cost_table == NULL)
5536 cost_table = ((short *) xmalloc (129 * sizeof (short))) + 1;
5537 bzero ((char *) (cost_table - 1), 129 * sizeof (short));
5539 for (i = 0; i < 128; i++)
5541 if (isalnum (i))
5542 cost_table[i] = 16;
5543 else if (ispunct (i))
5544 cost_table[i] = 8;
5545 else if (iscntrl (i))
5546 cost_table[i] = -1;
5549 cost_table[' '] = 8;
5550 cost_table['\t'] = 4;
5551 cost_table['\0'] = 4;
5552 cost_table['\n'] = 2;
5553 cost_table['\f'] = 1;
5554 cost_table['\v'] = 1;
5555 cost_table['\b'] = 1;
5558 /* See if all the case expressions look like text. It is text if the
5559 constant is >= -1 and the highest constant is <= 127. Do all comparisons
5560 as signed arithmetic since we don't want to ever access cost_table with a
5561 value less than -1. Also check that none of the constants in a range
5562 are strange control characters. */
5564 for (n = node; n; n = n->right)
5566 if ((INT_CST_LT (n->low, min_ascii)) || INT_CST_LT (max_ascii, n->high))
5567 return 0;
5569 for (i = TREE_INT_CST_LOW (n->low); i <= TREE_INT_CST_LOW (n->high); i++)
5570 if (cost_table[i] < 0)
5571 return 0;
5574 /* All interesting values are within the range of interesting
5575 ASCII characters. */
5576 return 1;
5579 /* Scan an ordered list of case nodes
5580 combining those with consecutive values or ranges.
5582 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
5584 static void
5585 group_case_nodes (head)
5586 case_node_ptr head;
5588 case_node_ptr node = head;
5590 while (node)
5592 rtx lb = next_real_insn (label_rtx (node->code_label));
5593 rtx lb2;
5594 case_node_ptr np = node;
5596 /* Try to group the successors of NODE with NODE. */
5597 while (((np = np->right) != 0)
5598 /* Do they jump to the same place? */
5599 && ((lb2 = next_real_insn (label_rtx (np->code_label))) == lb
5600 || (lb != 0 && lb2 != 0
5601 && simplejump_p (lb)
5602 && simplejump_p (lb2)
5603 && rtx_equal_p (SET_SRC (PATTERN (lb)),
5604 SET_SRC (PATTERN (lb2)))))
5605 /* Are their ranges consecutive? */
5606 && tree_int_cst_equal (np->low,
5607 fold (build (PLUS_EXPR,
5608 TREE_TYPE (node->high),
5609 node->high,
5610 integer_one_node)))
5611 /* An overflow is not consecutive. */
5612 && tree_int_cst_lt (node->high,
5613 fold (build (PLUS_EXPR,
5614 TREE_TYPE (node->high),
5615 node->high,
5616 integer_one_node))))
5618 node->high = np->high;
5620 /* NP is the first node after NODE which can't be grouped with it.
5621 Delete the nodes in between, and move on to that node. */
5622 node->right = np;
5623 node = np;
5627 /* Take an ordered list of case nodes
5628 and transform them into a near optimal binary tree,
5629 on the assumption that any target code selection value is as
5630 likely as any other.
5632 The transformation is performed by splitting the ordered
5633 list into two equal sections plus a pivot. The parts are
5634 then attached to the pivot as left and right branches. Each
5635 branch is is then transformed recursively. */
5637 static void
5638 balance_case_nodes (head, parent)
5639 case_node_ptr *head;
5640 case_node_ptr parent;
5642 register case_node_ptr np;
5644 np = *head;
5645 if (np)
5647 int cost = 0;
5648 int i = 0;
5649 int ranges = 0;
5650 register case_node_ptr *npp;
5651 case_node_ptr left;
5653 /* Count the number of entries on branch. Also count the ranges. */
5655 while (np)
5657 if (!tree_int_cst_equal (np->low, np->high))
5659 ranges++;
5660 if (use_cost_table)
5661 cost += cost_table[TREE_INT_CST_LOW (np->high)];
5664 if (use_cost_table)
5665 cost += cost_table[TREE_INT_CST_LOW (np->low)];
5667 i++;
5668 np = np->right;
5671 if (i > 2)
5673 /* Split this list if it is long enough for that to help. */
5674 npp = head;
5675 left = *npp;
5676 if (use_cost_table)
5678 /* Find the place in the list that bisects the list's total cost,
5679 Here I gets half the total cost. */
5680 int n_moved = 0;
5681 i = (cost + 1) / 2;
5682 while (1)
5684 /* Skip nodes while their cost does not reach that amount. */
5685 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5686 i -= cost_table[TREE_INT_CST_LOW ((*npp)->high)];
5687 i -= cost_table[TREE_INT_CST_LOW ((*npp)->low)];
5688 if (i <= 0)
5689 break;
5690 npp = &(*npp)->right;
5691 n_moved += 1;
5693 if (n_moved == 0)
5695 /* Leave this branch lopsided, but optimize left-hand
5696 side and fill in `parent' fields for right-hand side. */
5697 np = *head;
5698 np->parent = parent;
5699 balance_case_nodes (&np->left, np);
5700 for (; np->right; np = np->right)
5701 np->right->parent = np;
5702 return;
5705 /* If there are just three nodes, split at the middle one. */
5706 else if (i == 3)
5707 npp = &(*npp)->right;
5708 else
5710 /* Find the place in the list that bisects the list's total cost,
5711 where ranges count as 2.
5712 Here I gets half the total cost. */
5713 i = (i + ranges + 1) / 2;
5714 while (1)
5716 /* Skip nodes while their cost does not reach that amount. */
5717 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5718 i--;
5719 i--;
5720 if (i <= 0)
5721 break;
5722 npp = &(*npp)->right;
5725 *head = np = *npp;
5726 *npp = 0;
5727 np->parent = parent;
5728 np->left = left;
5730 /* Optimize each of the two split parts. */
5731 balance_case_nodes (&np->left, np);
5732 balance_case_nodes (&np->right, np);
5734 else
5736 /* Else leave this branch as one level,
5737 but fill in `parent' fields. */
5738 np = *head;
5739 np->parent = parent;
5740 for (; np->right; np = np->right)
5741 np->right->parent = np;
5746 /* Search the parent sections of the case node tree
5747 to see if a test for the lower bound of NODE would be redundant.
5748 INDEX_TYPE is the type of the index expression.
5750 The instructions to generate the case decision tree are
5751 output in the same order as nodes are processed so it is
5752 known that if a parent node checks the range of the current
5753 node minus one that the current node is bounded at its lower
5754 span. Thus the test would be redundant. */
5756 static int
5757 node_has_low_bound (node, index_type)
5758 case_node_ptr node;
5759 tree index_type;
5761 tree low_minus_one;
5762 case_node_ptr pnode;
5764 /* If the lower bound of this node is the lowest value in the index type,
5765 we need not test it. */
5767 if (tree_int_cst_equal (node->low, TYPE_MIN_VALUE (index_type)))
5768 return 1;
5770 /* If this node has a left branch, the value at the left must be less
5771 than that at this node, so it cannot be bounded at the bottom and
5772 we need not bother testing any further. */
5774 if (node->left)
5775 return 0;
5777 low_minus_one = fold (build (MINUS_EXPR, TREE_TYPE (node->low),
5778 node->low, integer_one_node));
5780 /* If the subtraction above overflowed, we can't verify anything.
5781 Otherwise, look for a parent that tests our value - 1. */
5783 if (! tree_int_cst_lt (low_minus_one, node->low))
5784 return 0;
5786 for (pnode = node->parent; pnode; pnode = pnode->parent)
5787 if (tree_int_cst_equal (low_minus_one, pnode->high))
5788 return 1;
5790 return 0;
5793 /* Search the parent sections of the case node tree
5794 to see if a test for the upper bound of NODE would be redundant.
5795 INDEX_TYPE is the type of the index expression.
5797 The instructions to generate the case decision tree are
5798 output in the same order as nodes are processed so it is
5799 known that if a parent node checks the range of the current
5800 node plus one that the current node is bounded at its upper
5801 span. Thus the test would be redundant. */
5803 static int
5804 node_has_high_bound (node, index_type)
5805 case_node_ptr node;
5806 tree index_type;
5808 tree high_plus_one;
5809 case_node_ptr pnode;
5811 /* If the upper bound of this node is the highest value in the type
5812 of the index expression, we need not test against it. */
5814 if (tree_int_cst_equal (node->high, TYPE_MAX_VALUE (index_type)))
5815 return 1;
5817 /* If this node has a right branch, the value at the right must be greater
5818 than that at this node, so it cannot be bounded at the top and
5819 we need not bother testing any further. */
5821 if (node->right)
5822 return 0;
5824 high_plus_one = fold (build (PLUS_EXPR, TREE_TYPE (node->high),
5825 node->high, integer_one_node));
5827 /* If the addition above overflowed, we can't verify anything.
5828 Otherwise, look for a parent that tests our value + 1. */
5830 if (! tree_int_cst_lt (node->high, high_plus_one))
5831 return 0;
5833 for (pnode = node->parent; pnode; pnode = pnode->parent)
5834 if (tree_int_cst_equal (high_plus_one, pnode->low))
5835 return 1;
5837 return 0;
5840 /* Search the parent sections of the
5841 case node tree to see if both tests for the upper and lower
5842 bounds of NODE would be redundant. */
5844 static int
5845 node_is_bounded (node, index_type)
5846 case_node_ptr node;
5847 tree index_type;
5849 return (node_has_low_bound (node, index_type)
5850 && node_has_high_bound (node, index_type));
5853 /* Emit an unconditional jump to LABEL unless it would be dead code. */
5855 static void
5856 emit_jump_if_reachable (label)
5857 rtx label;
5859 if (GET_CODE (get_last_insn ()) != BARRIER)
5860 emit_jump (label);
5863 /* Emit step-by-step code to select a case for the value of INDEX.
5864 The thus generated decision tree follows the form of the
5865 case-node binary tree NODE, whose nodes represent test conditions.
5866 INDEX_TYPE is the type of the index of the switch.
5868 Care is taken to prune redundant tests from the decision tree
5869 by detecting any boundary conditions already checked by
5870 emitted rtx. (See node_has_high_bound, node_has_low_bound
5871 and node_is_bounded, above.)
5873 Where the test conditions can be shown to be redundant we emit
5874 an unconditional jump to the target code. As a further
5875 optimization, the subordinates of a tree node are examined to
5876 check for bounded nodes. In this case conditional and/or
5877 unconditional jumps as a result of the boundary check for the
5878 current node are arranged to target the subordinates associated
5879 code for out of bound conditions on the current node node.
5881 We can assume that when control reaches the code generated here,
5882 the index value has already been compared with the parents
5883 of this node, and determined to be on the same side of each parent
5884 as this node is. Thus, if this node tests for the value 51,
5885 and a parent tested for 52, we don't need to consider
5886 the possibility of a value greater than 51. If another parent
5887 tests for the value 50, then this node need not test anything. */
5889 static void
5890 emit_case_nodes (index, node, default_label, index_type)
5891 rtx index;
5892 case_node_ptr node;
5893 rtx default_label;
5894 tree index_type;
5896 /* If INDEX has an unsigned type, we must make unsigned branches. */
5897 int unsignedp = TREE_UNSIGNED (index_type);
5898 typedef rtx rtx_function ();
5899 rtx_function *gen_bgt_pat = unsignedp ? gen_bgtu : gen_bgt;
5900 rtx_function *gen_bge_pat = unsignedp ? gen_bgeu : gen_bge;
5901 rtx_function *gen_blt_pat = unsignedp ? gen_bltu : gen_blt;
5902 rtx_function *gen_ble_pat = unsignedp ? gen_bleu : gen_ble;
5903 enum machine_mode mode = GET_MODE (index);
5905 /* See if our parents have already tested everything for us.
5906 If they have, emit an unconditional jump for this node. */
5907 if (node_is_bounded (node, index_type))
5908 emit_jump (label_rtx (node->code_label));
5910 else if (tree_int_cst_equal (node->low, node->high))
5912 /* Node is single valued. First see if the index expression matches
5913 this node and then check our children, if any. */
5915 do_jump_if_equal (index, expand_expr (node->low, NULL_RTX, VOIDmode, 0),
5916 label_rtx (node->code_label), unsignedp);
5918 if (node->right != 0 && node->left != 0)
5920 /* This node has children on both sides.
5921 Dispatch to one side or the other
5922 by comparing the index value with this node's value.
5923 If one subtree is bounded, check that one first,
5924 so we can avoid real branches in the tree. */
5926 if (node_is_bounded (node->right, index_type))
5928 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5929 VOIDmode, 0),
5930 GT, NULL_RTX, mode, unsignedp, 0);
5932 emit_jump_insn ((*gen_bgt_pat) (label_rtx (node->right->code_label)));
5933 emit_case_nodes (index, node->left, default_label, index_type);
5936 else if (node_is_bounded (node->left, index_type))
5938 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5939 VOIDmode, 0),
5940 LT, NULL_RTX, mode, unsignedp, 0);
5941 emit_jump_insn ((*gen_blt_pat) (label_rtx (node->left->code_label)));
5942 emit_case_nodes (index, node->right, default_label, index_type);
5945 else
5947 /* Neither node is bounded. First distinguish the two sides;
5948 then emit the code for one side at a time. */
5950 tree test_label
5951 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
5953 /* See if the value is on the right. */
5954 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5955 VOIDmode, 0),
5956 GT, NULL_RTX, mode, unsignedp, 0);
5957 emit_jump_insn ((*gen_bgt_pat) (label_rtx (test_label)));
5959 /* Value must be on the left.
5960 Handle the left-hand subtree. */
5961 emit_case_nodes (index, node->left, default_label, index_type);
5962 /* If left-hand subtree does nothing,
5963 go to default. */
5964 emit_jump_if_reachable (default_label);
5966 /* Code branches here for the right-hand subtree. */
5967 expand_label (test_label);
5968 emit_case_nodes (index, node->right, default_label, index_type);
5972 else if (node->right != 0 && node->left == 0)
5974 /* Here we have a right child but no left so we issue conditional
5975 branch to default and process the right child.
5977 Omit the conditional branch to default if we it avoid only one
5978 right child; it costs too much space to save so little time. */
5980 if (node->right->right || node->right->left
5981 || !tree_int_cst_equal (node->right->low, node->right->high))
5983 if (!node_has_low_bound (node, index_type))
5985 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5986 VOIDmode, 0),
5987 LT, NULL_RTX, mode, unsignedp, 0);
5988 emit_jump_insn ((*gen_blt_pat) (default_label));
5991 emit_case_nodes (index, node->right, default_label, index_type);
5993 else
5994 /* We cannot process node->right normally
5995 since we haven't ruled out the numbers less than
5996 this node's value. So handle node->right explicitly. */
5997 do_jump_if_equal (index,
5998 expand_expr (node->right->low, NULL_RTX,
5999 VOIDmode, 0),
6000 label_rtx (node->right->code_label), unsignedp);
6003 else if (node->right == 0 && node->left != 0)
6005 /* Just one subtree, on the left. */
6007 #if 0 /* The following code and comment were formerly part
6008 of the condition here, but they didn't work
6009 and I don't understand what the idea was. -- rms. */
6010 /* If our "most probable entry" is less probable
6011 than the default label, emit a jump to
6012 the default label using condition codes
6013 already lying around. With no right branch,
6014 a branch-greater-than will get us to the default
6015 label correctly. */
6016 if (use_cost_table
6017 && cost_table[TREE_INT_CST_LOW (node->high)] < 12)
6019 #endif /* 0 */
6020 if (node->left->left || node->left->right
6021 || !tree_int_cst_equal (node->left->low, node->left->high))
6023 if (!node_has_high_bound (node, index_type))
6025 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
6026 VOIDmode, 0),
6027 GT, NULL_RTX, mode, unsignedp, 0);
6028 emit_jump_insn ((*gen_bgt_pat) (default_label));
6031 emit_case_nodes (index, node->left, default_label, index_type);
6033 else
6034 /* We cannot process node->left normally
6035 since we haven't ruled out the numbers less than
6036 this node's value. So handle node->left explicitly. */
6037 do_jump_if_equal (index,
6038 expand_expr (node->left->low, NULL_RTX,
6039 VOIDmode, 0),
6040 label_rtx (node->left->code_label), unsignedp);
6043 else
6045 /* Node is a range. These cases are very similar to those for a single
6046 value, except that we do not start by testing whether this node
6047 is the one to branch to. */
6049 if (node->right != 0 && node->left != 0)
6051 /* Node has subtrees on both sides.
6052 If the right-hand subtree is bounded,
6053 test for it first, since we can go straight there.
6054 Otherwise, we need to make a branch in the control structure,
6055 then handle the two subtrees. */
6056 tree test_label = 0;
6058 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
6059 VOIDmode, 0),
6060 GT, NULL_RTX, mode, unsignedp, 0);
6062 if (node_is_bounded (node->right, index_type))
6063 /* Right hand node is fully bounded so we can eliminate any
6064 testing and branch directly to the target code. */
6065 emit_jump_insn ((*gen_bgt_pat) (label_rtx (node->right->code_label)));
6066 else
6068 /* Right hand node requires testing.
6069 Branch to a label where we will handle it later. */
6071 test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
6072 emit_jump_insn ((*gen_bgt_pat) (label_rtx (test_label)));
6075 /* Value belongs to this node or to the left-hand subtree. */
6077 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX, VOIDmode, 0),
6078 GE, NULL_RTX, mode, unsignedp, 0);
6079 emit_jump_insn ((*gen_bge_pat) (label_rtx (node->code_label)));
6081 /* Handle the left-hand subtree. */
6082 emit_case_nodes (index, node->left, default_label, index_type);
6084 /* If right node had to be handled later, do that now. */
6086 if (test_label)
6088 /* If the left-hand subtree fell through,
6089 don't let it fall into the right-hand subtree. */
6090 emit_jump_if_reachable (default_label);
6092 expand_label (test_label);
6093 emit_case_nodes (index, node->right, default_label, index_type);
6097 else if (node->right != 0 && node->left == 0)
6099 /* Deal with values to the left of this node,
6100 if they are possible. */
6101 if (!node_has_low_bound (node, index_type))
6103 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX,
6104 VOIDmode, 0),
6105 LT, NULL_RTX, mode, unsignedp, 0);
6106 emit_jump_insn ((*gen_blt_pat) (default_label));
6109 /* Value belongs to this node or to the right-hand subtree. */
6111 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
6112 VOIDmode, 0),
6113 LE, NULL_RTX, mode, unsignedp, 0);
6114 emit_jump_insn ((*gen_ble_pat) (label_rtx (node->code_label)));
6116 emit_case_nodes (index, node->right, default_label, index_type);
6119 else if (node->right == 0 && node->left != 0)
6121 /* Deal with values to the right of this node,
6122 if they are possible. */
6123 if (!node_has_high_bound (node, index_type))
6125 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
6126 VOIDmode, 0),
6127 GT, NULL_RTX, mode, unsignedp, 0);
6128 emit_jump_insn ((*gen_bgt_pat) (default_label));
6131 /* Value belongs to this node or to the left-hand subtree. */
6133 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX, VOIDmode, 0),
6134 GE, NULL_RTX, mode, unsignedp, 0);
6135 emit_jump_insn ((*gen_bge_pat) (label_rtx (node->code_label)));
6137 emit_case_nodes (index, node->left, default_label, index_type);
6140 else
6142 /* Node has no children so we check low and high bounds to remove
6143 redundant tests. Only one of the bounds can exist,
6144 since otherwise this node is bounded--a case tested already. */
6146 if (!node_has_high_bound (node, index_type))
6148 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
6149 VOIDmode, 0),
6150 GT, NULL_RTX, mode, unsignedp, 0);
6151 emit_jump_insn ((*gen_bgt_pat) (default_label));
6154 if (!node_has_low_bound (node, index_type))
6156 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX,
6157 VOIDmode, 0),
6158 LT, NULL_RTX, mode, unsignedp, 0);
6159 emit_jump_insn ((*gen_blt_pat) (default_label));
6162 emit_jump (label_rtx (node->code_label));
6167 /* These routines are used by the loop unrolling code. They copy BLOCK trees
6168 so that the debugging info will be correct for the unrolled loop. */
6170 /* Indexed by block number, contains a pointer to the N'th block node. */
6172 static tree *block_vector;
6174 void
6175 find_loop_tree_blocks ()
6177 tree block = DECL_INITIAL (current_function_decl);
6179 block_vector = identify_blocks (block, get_insns ());
6182 void
6183 unroll_block_trees ()
6185 tree block = DECL_INITIAL (current_function_decl);
6187 reorder_blocks (block_vector, block, get_insns ());