* stmt.c (expand_asm_operands): If an ASM has no outputs, then treat
[official-gcc.git] / gcc / stmt.c
blob1220a8e1fccd1a165946c8720af93f30c9e94711
1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 88, 89, 92-6, 1997 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
22 /* This file handles the generation of rtl code from tree structure
23 above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
24 It also creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
27 The functions whose names start with `expand_' are called by the
28 parser to generate RTL instructions for various kinds of constructs.
30 Some control and binding constructs require calling several such
31 functions at different times. For example, a simple if-then
32 is expanded by calling `expand_start_cond' (with the condition-expression
33 as argument) before parsing the then-clause and calling `expand_end_cond'
34 after parsing the then-clause. */
36 #include "config.h"
38 #include <stdio.h>
39 #include <ctype.h>
41 #include "rtl.h"
42 #include "tree.h"
43 #include "flags.h"
44 #include "except.h"
45 #include "function.h"
46 #include "insn-flags.h"
47 #include "insn-config.h"
48 #include "insn-codes.h"
49 #include "expr.h"
50 #include "hard-reg-set.h"
51 #include "obstack.h"
52 #include "loop.h"
53 #include "recog.h"
54 #include "machmode.h"
56 #include "bytecode.h"
57 #include "bc-typecd.h"
58 #include "bc-opcode.h"
59 #include "bc-optab.h"
60 #include "bc-emit.h"
62 #define obstack_chunk_alloc xmalloc
63 #define obstack_chunk_free free
64 struct obstack stmt_obstack;
66 /* Filename and line number of last line-number note,
67 whether we actually emitted it or not. */
68 char *emit_filename;
69 int emit_lineno;
71 /* Nonzero if within a ({...}) grouping, in which case we must
72 always compute a value for each expr-stmt in case it is the last one. */
74 int expr_stmts_for_value;
76 /* Each time we expand an expression-statement,
77 record the expr's type and its RTL value here. */
79 static tree last_expr_type;
80 static rtx last_expr_value;
82 /* Each time we expand the end of a binding contour (in `expand_end_bindings')
83 and we emit a new NOTE_INSN_BLOCK_END note, we save a pointer to it here.
84 This is used by the `remember_end_note' function to record the endpoint
85 of each generated block in its associated BLOCK node. */
87 static rtx last_block_end_note;
89 /* Number of binding contours started so far in this function. */
91 int block_start_count;
93 /* Nonzero if function being compiled needs to
94 return the address of where it has put a structure value. */
96 extern int current_function_returns_pcc_struct;
98 /* Label that will go on parm cleanup code, if any.
99 Jumping to this label runs cleanup code for parameters, if
100 such code must be run. Following this code is the logical return label. */
102 extern rtx cleanup_label;
104 /* Label that will go on function epilogue.
105 Jumping to this label serves as a "return" instruction
106 on machines which require execution of the epilogue on all returns. */
108 extern rtx return_label;
110 /* Offset to end of allocated area of stack frame.
111 If stack grows down, this is the address of the last stack slot allocated.
112 If stack grows up, this is the address for the next slot. */
113 extern int frame_offset;
115 /* Label to jump back to for tail recursion, or 0 if we have
116 not yet needed one for this function. */
117 extern rtx tail_recursion_label;
119 /* Place after which to insert the tail_recursion_label if we need one. */
120 extern rtx tail_recursion_reentry;
122 /* Location at which to save the argument pointer if it will need to be
123 referenced. There are two cases where this is done: if nonlocal gotos
124 exist, or if vars whose is an offset from the argument pointer will be
125 needed by inner routines. */
127 extern rtx arg_pointer_save_area;
129 /* Chain of all RTL_EXPRs that have insns in them. */
130 extern tree rtl_expr_chain;
132 /* Stack allocation level in which temporaries for TARGET_EXPRs live. */
133 extern int target_temp_slot_level;
135 extern int temp_slot_level;
137 /* Functions and data structures for expanding case statements. */
139 /* Case label structure, used to hold info on labels within case
140 statements. We handle "range" labels; for a single-value label
141 as in C, the high and low limits are the same.
143 An AVL tree of case nodes is initially created, and later transformed
144 to a list linked via the RIGHT fields in the nodes. Nodes with
145 higher case values are later in the list.
147 Switch statements can be output in one of two forms. A branch table
148 is used if there are more than a few labels and the labels are dense
149 within the range between the smallest and largest case value. If a
150 branch table is used, no further manipulations are done with the case
151 node chain.
153 The alternative to the use of a branch table is to generate a series
154 of compare and jump insns. When that is done, we use the LEFT, RIGHT,
155 and PARENT fields to hold a binary tree. Initially the tree is
156 totally unbalanced, with everything on the right. We balance the tree
157 with nodes on the left having lower case values than the parent
158 and nodes on the right having higher values. We then output the tree
159 in order. */
161 struct case_node
163 struct case_node *left; /* Left son in binary tree */
164 struct case_node *right; /* Right son in binary tree; also node chain */
165 struct case_node *parent; /* Parent of node in binary tree */
166 tree low; /* Lowest index value for this label */
167 tree high; /* Highest index value for this label */
168 tree code_label; /* Label to jump to when node matches */
169 int balance;
172 typedef struct case_node case_node;
173 typedef struct case_node *case_node_ptr;
175 /* These are used by estimate_case_costs and balance_case_nodes. */
177 /* This must be a signed type, and non-ANSI compilers lack signed char. */
178 static short *cost_table;
179 static int use_cost_table;
181 /* Stack of control and binding constructs we are currently inside.
183 These constructs begin when you call `expand_start_WHATEVER'
184 and end when you call `expand_end_WHATEVER'. This stack records
185 info about how the construct began that tells the end-function
186 what to do. It also may provide information about the construct
187 to alter the behavior of other constructs within the body.
188 For example, they may affect the behavior of C `break' and `continue'.
190 Each construct gets one `struct nesting' object.
191 All of these objects are chained through the `all' field.
192 `nesting_stack' points to the first object (innermost construct).
193 The position of an entry on `nesting_stack' is in its `depth' field.
195 Each type of construct has its own individual stack.
196 For example, loops have `loop_stack'. Each object points to the
197 next object of the same type through the `next' field.
199 Some constructs are visible to `break' exit-statements and others
200 are not. Which constructs are visible depends on the language.
201 Therefore, the data structure allows each construct to be visible
202 or not, according to the args given when the construct is started.
203 The construct is visible if the `exit_label' field is non-null.
204 In that case, the value should be a CODE_LABEL rtx. */
206 struct nesting
208 struct nesting *all;
209 struct nesting *next;
210 int depth;
211 rtx exit_label;
212 union
214 /* For conds (if-then and if-then-else statements). */
215 struct
217 /* Label for the end of the if construct.
218 There is none if EXITFLAG was not set
219 and no `else' has been seen yet. */
220 rtx endif_label;
221 /* Label for the end of this alternative.
222 This may be the end of the if or the next else/elseif. */
223 rtx next_label;
224 } cond;
225 /* For loops. */
226 struct
228 /* Label at the top of the loop; place to loop back to. */
229 rtx start_label;
230 /* Label at the end of the whole construct. */
231 rtx end_label;
232 /* Label before a jump that branches to the end of the whole
233 construct. This is where destructors go if any. */
234 rtx alt_end_label;
235 /* Label for `continue' statement to jump to;
236 this is in front of the stepper of the loop. */
237 rtx continue_label;
238 } loop;
239 /* For variable binding contours. */
240 struct
242 /* Sequence number of this binding contour within the function,
243 in order of entry. */
244 int block_start_count;
245 /* Nonzero => value to restore stack to on exit. Complemented by
246 bc_stack_level (see below) when generating bytecodes. */
247 rtx stack_level;
248 /* The NOTE that starts this contour.
249 Used by expand_goto to check whether the destination
250 is within each contour or not. */
251 rtx first_insn;
252 /* Innermost containing binding contour that has a stack level. */
253 struct nesting *innermost_stack_block;
254 /* List of cleanups to be run on exit from this contour.
255 This is a list of expressions to be evaluated.
256 The TREE_PURPOSE of each link is the ..._DECL node
257 which the cleanup pertains to. */
258 tree cleanups;
259 /* List of cleanup-lists of blocks containing this block,
260 as they were at the locus where this block appears.
261 There is an element for each containing block,
262 ordered innermost containing block first.
263 The tail of this list can be 0,
264 if all remaining elements would be empty lists.
265 The element's TREE_VALUE is the cleanup-list of that block,
266 which may be null. */
267 tree outer_cleanups;
268 /* Chain of labels defined inside this binding contour.
269 For contours that have stack levels or cleanups. */
270 struct label_chain *label_chain;
271 /* Number of function calls seen, as of start of this block. */
272 int function_call_count;
273 /* Bytecode specific: stack level to restore stack to on exit. */
274 int bc_stack_level;
275 /* Nonzero if this is associated with a EH region. */
276 int exception_region;
277 /* The saved target_temp_slot_level from our outer block.
278 We may reset target_temp_slot_level to be the level of
279 this block, if that is done, target_temp_slot_level
280 reverts to the saved target_temp_slot_level at the very
281 end of the block. */
282 int target_temp_slot_level;
283 /* True if we are currently emitting insns in an area of
284 output code that is controlled by a conditional
285 expression. This is used by the cleanup handling code to
286 generate conditional cleanup actions. */
287 int conditional_code;
288 /* A place to move the start of the exception region for any
289 of the conditional cleanups, must be at the end or after
290 the start of the last unconditional cleanup, and before any
291 conditional branch points. */
292 rtx last_unconditional_cleanup;
293 /* When in a conditional context, this is the specific
294 cleanup list associated with last_unconditional_cleanup,
295 where we place the conditionalized cleanups. */
296 tree *cleanup_ptr;
297 } block;
298 /* For switch (C) or case (Pascal) statements,
299 and also for dummies (see `expand_start_case_dummy'). */
300 struct
302 /* The insn after which the case dispatch should finally
303 be emitted. Zero for a dummy. */
304 rtx start;
305 /* For bytecodes, the case table is in-lined right in the code.
306 A label is needed for skipping over this block. It is only
307 used when generating bytecodes. */
308 rtx skip_label;
309 /* A list of case labels; it is first built as an AVL tree.
310 During expand_end_case, this is converted to a list, and may be
311 rearranged into a nearly balanced binary tree. */
312 struct case_node *case_list;
313 /* Label to jump to if no case matches. */
314 tree default_label;
315 /* The expression to be dispatched on. */
316 tree index_expr;
317 /* Type that INDEX_EXPR should be converted to. */
318 tree nominal_type;
319 /* Number of range exprs in case statement. */
320 int num_ranges;
321 /* Name of this kind of statement, for warnings. */
322 char *printname;
323 /* Nonzero if a case label has been seen in this case stmt. */
324 char seenlabel;
325 } case_stmt;
326 } data;
329 /* Chain of all pending binding contours. */
330 struct nesting *block_stack;
332 /* If any new stacks are added here, add them to POPSTACKS too. */
334 /* Chain of all pending binding contours that restore stack levels
335 or have cleanups. */
336 struct nesting *stack_block_stack;
338 /* Chain of all pending conditional statements. */
339 struct nesting *cond_stack;
341 /* Chain of all pending loops. */
342 struct nesting *loop_stack;
344 /* Chain of all pending case or switch statements. */
345 struct nesting *case_stack;
347 /* Separate chain including all of the above,
348 chained through the `all' field. */
349 struct nesting *nesting_stack;
351 /* Number of entries on nesting_stack now. */
352 int nesting_depth;
354 /* Allocate and return a new `struct nesting'. */
356 #define ALLOC_NESTING() \
357 (struct nesting *) obstack_alloc (&stmt_obstack, sizeof (struct nesting))
359 /* Pop the nesting stack element by element until we pop off
360 the element which is at the top of STACK.
361 Update all the other stacks, popping off elements from them
362 as we pop them from nesting_stack. */
364 #define POPSTACK(STACK) \
365 do { struct nesting *target = STACK; \
366 struct nesting *this; \
367 do { this = nesting_stack; \
368 if (loop_stack == this) \
369 loop_stack = loop_stack->next; \
370 if (cond_stack == this) \
371 cond_stack = cond_stack->next; \
372 if (block_stack == this) \
373 block_stack = block_stack->next; \
374 if (stack_block_stack == this) \
375 stack_block_stack = stack_block_stack->next; \
376 if (case_stack == this) \
377 case_stack = case_stack->next; \
378 nesting_depth = nesting_stack->depth - 1; \
379 nesting_stack = this->all; \
380 obstack_free (&stmt_obstack, this); } \
381 while (this != target); } while (0)
383 /* In some cases it is impossible to generate code for a forward goto
384 until the label definition is seen. This happens when it may be necessary
385 for the goto to reset the stack pointer: we don't yet know how to do that.
386 So expand_goto puts an entry on this fixup list.
387 Each time a binding contour that resets the stack is exited,
388 we check each fixup.
389 If the target label has now been defined, we can insert the proper code. */
391 struct goto_fixup
393 /* Points to following fixup. */
394 struct goto_fixup *next;
395 /* Points to the insn before the jump insn.
396 If more code must be inserted, it goes after this insn. */
397 rtx before_jump;
398 /* The LABEL_DECL that this jump is jumping to, or 0
399 for break, continue or return. */
400 tree target;
401 /* The BLOCK for the place where this goto was found. */
402 tree context;
403 /* The CODE_LABEL rtx that this is jumping to. */
404 rtx target_rtl;
405 /* Number of binding contours started in current function
406 before the label reference. */
407 int block_start_count;
408 /* The outermost stack level that should be restored for this jump.
409 Each time a binding contour that resets the stack is exited,
410 if the target label is *not* yet defined, this slot is updated. */
411 rtx stack_level;
412 /* List of lists of cleanup expressions to be run by this goto.
413 There is one element for each block that this goto is within.
414 The tail of this list can be 0,
415 if all remaining elements would be empty.
416 The TREE_VALUE contains the cleanup list of that block as of the
417 time this goto was seen.
418 The TREE_ADDRESSABLE flag is 1 for a block that has been exited. */
419 tree cleanup_list_list;
421 /* Bytecode specific members follow */
423 /* The label that this jump is jumping to, or 0 for break, continue
424 or return. */
425 struct bc_label *bc_target;
427 /* The label we use for the fixup patch */
428 struct bc_label *label;
430 /* True (non-0) if fixup has been handled */
431 int bc_handled:1;
433 /* Like stack_level above, except refers to the interpreter stack */
434 int bc_stack_level;
437 static struct goto_fixup *goto_fixup_chain;
439 /* Within any binding contour that must restore a stack level,
440 all labels are recorded with a chain of these structures. */
442 struct label_chain
444 /* Points to following fixup. */
445 struct label_chain *next;
446 tree label;
450 /* Non-zero if we are using EH to handle cleanus. */
451 static int using_eh_for_cleanups_p = 0;
454 static void expand_goto_internal PROTO((tree, rtx, rtx));
455 static void bc_expand_goto_internal PROTO((enum bytecode_opcode,
456 struct bc_label *, tree));
457 static int expand_fixup PROTO((tree, rtx, rtx));
458 static void bc_expand_fixup PROTO((enum bytecode_opcode,
459 struct bc_label *, int));
460 static void fixup_gotos PROTO((struct nesting *, rtx, tree,
461 rtx, int));
462 static void bc_fixup_gotos PROTO((struct nesting *, int, tree,
463 rtx, int));
464 static void bc_expand_start_cond PROTO((tree, int));
465 static void bc_expand_end_cond PROTO((void));
466 static void bc_expand_start_else PROTO((void));
467 static void bc_expand_end_loop PROTO((void));
468 static void bc_expand_end_bindings PROTO((tree, int, int));
469 static void bc_expand_decl PROTO((tree, tree));
470 static void bc_expand_variable_local_init PROTO((tree));
471 static void bc_expand_decl_init PROTO((tree));
472 static void expand_null_return_1 PROTO((rtx, int));
473 static void expand_value_return PROTO((rtx));
474 static int tail_recursion_args PROTO((tree, tree));
475 static void expand_cleanups PROTO((tree, tree, int, int));
476 static void bc_expand_start_case PROTO((struct nesting *, tree,
477 tree, char *));
478 static int bc_pushcase PROTO((tree, tree));
479 static void bc_check_for_full_enumeration_handling PROTO((tree));
480 static void bc_expand_end_case PROTO((tree));
481 static void do_jump_if_equal PROTO((rtx, rtx, rtx, int));
482 static int estimate_case_costs PROTO((case_node_ptr));
483 static void group_case_nodes PROTO((case_node_ptr));
484 static void balance_case_nodes PROTO((case_node_ptr *,
485 case_node_ptr));
486 static int node_has_low_bound PROTO((case_node_ptr, tree));
487 static int node_has_high_bound PROTO((case_node_ptr, tree));
488 static int node_is_bounded PROTO((case_node_ptr, tree));
489 static void emit_jump_if_reachable PROTO((rtx));
490 static void emit_case_nodes PROTO((rtx, case_node_ptr, rtx, tree));
491 static int add_case_node PROTO((tree, tree, tree, tree *));
492 static struct case_node *case_tree2list PROTO((case_node *, case_node *));
494 extern rtx bc_allocate_local ();
495 extern rtx bc_allocate_variable_array ();
497 void
498 using_eh_for_cleanups ()
500 using_eh_for_cleanups_p = 1;
503 void
504 init_stmt ()
506 gcc_obstack_init (&stmt_obstack);
507 init_eh ();
510 void
511 init_stmt_for_function ()
513 /* We are not currently within any block, conditional, loop or case. */
514 block_stack = 0;
515 stack_block_stack = 0;
516 loop_stack = 0;
517 case_stack = 0;
518 cond_stack = 0;
519 nesting_stack = 0;
520 nesting_depth = 0;
522 block_start_count = 0;
524 /* No gotos have been expanded yet. */
525 goto_fixup_chain = 0;
527 /* We are not processing a ({...}) grouping. */
528 expr_stmts_for_value = 0;
529 last_expr_type = 0;
531 init_eh_for_function ();
534 void
535 save_stmt_status (p)
536 struct function *p;
538 p->block_stack = block_stack;
539 p->stack_block_stack = stack_block_stack;
540 p->cond_stack = cond_stack;
541 p->loop_stack = loop_stack;
542 p->case_stack = case_stack;
543 p->nesting_stack = nesting_stack;
544 p->nesting_depth = nesting_depth;
545 p->block_start_count = block_start_count;
546 p->last_expr_type = last_expr_type;
547 p->last_expr_value = last_expr_value;
548 p->expr_stmts_for_value = expr_stmts_for_value;
549 p->emit_filename = emit_filename;
550 p->emit_lineno = emit_lineno;
551 p->goto_fixup_chain = goto_fixup_chain;
552 save_eh_status (p);
555 void
556 restore_stmt_status (p)
557 struct function *p;
559 block_stack = p->block_stack;
560 stack_block_stack = p->stack_block_stack;
561 cond_stack = p->cond_stack;
562 loop_stack = p->loop_stack;
563 case_stack = p->case_stack;
564 nesting_stack = p->nesting_stack;
565 nesting_depth = p->nesting_depth;
566 block_start_count = p->block_start_count;
567 last_expr_type = p->last_expr_type;
568 last_expr_value = p->last_expr_value;
569 expr_stmts_for_value = p->expr_stmts_for_value;
570 emit_filename = p->emit_filename;
571 emit_lineno = p->emit_lineno;
572 goto_fixup_chain = p->goto_fixup_chain;
573 restore_eh_status (p);
576 /* Emit a no-op instruction. */
578 void
579 emit_nop ()
581 rtx last_insn;
583 if (!output_bytecode)
585 last_insn = get_last_insn ();
586 if (!optimize
587 && (GET_CODE (last_insn) == CODE_LABEL
588 || (GET_CODE (last_insn) == NOTE
589 && prev_real_insn (last_insn) == 0)))
590 emit_insn (gen_nop ());
594 /* Return the rtx-label that corresponds to a LABEL_DECL,
595 creating it if necessary. */
598 label_rtx (label)
599 tree label;
601 if (TREE_CODE (label) != LABEL_DECL)
602 abort ();
604 if (DECL_RTL (label))
605 return DECL_RTL (label);
607 return DECL_RTL (label) = gen_label_rtx ();
610 /* Add an unconditional jump to LABEL as the next sequential instruction. */
612 void
613 emit_jump (label)
614 rtx label;
616 do_pending_stack_adjust ();
617 emit_jump_insn (gen_jump (label));
618 emit_barrier ();
621 /* Emit code to jump to the address
622 specified by the pointer expression EXP. */
624 void
625 expand_computed_goto (exp)
626 tree exp;
628 if (output_bytecode)
630 bc_expand_expr (exp);
631 bc_emit_instruction (jumpP);
633 else
635 rtx x = expand_expr (exp, NULL_RTX, VOIDmode, 0);
637 #ifdef POINTERS_EXTEND_UNSIGNED
638 x = convert_memory_address (Pmode, x);
639 #endif
641 emit_queue ();
642 /* Be sure the function is executable. */
643 if (flag_check_memory_usage)
644 emit_library_call (chkr_check_exec_libfunc, 1,
645 VOIDmode, 1, x, ptr_mode);
647 do_pending_stack_adjust ();
648 emit_indirect_jump (x);
652 /* Handle goto statements and the labels that they can go to. */
654 /* Specify the location in the RTL code of a label LABEL,
655 which is a LABEL_DECL tree node.
657 This is used for the kind of label that the user can jump to with a
658 goto statement, and for alternatives of a switch or case statement.
659 RTL labels generated for loops and conditionals don't go through here;
660 they are generated directly at the RTL level, by other functions below.
662 Note that this has nothing to do with defining label *names*.
663 Languages vary in how they do that and what that even means. */
665 void
666 expand_label (label)
667 tree label;
669 struct label_chain *p;
671 if (output_bytecode)
673 if (! DECL_RTL (label))
674 DECL_RTL (label) = bc_gen_rtx ((char *) 0, 0, bc_get_bytecode_label ());
675 if (! bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (DECL_RTL (label))))
676 error ("multiply defined label");
677 return;
680 do_pending_stack_adjust ();
681 emit_label (label_rtx (label));
682 if (DECL_NAME (label))
683 LABEL_NAME (DECL_RTL (label)) = IDENTIFIER_POINTER (DECL_NAME (label));
685 if (stack_block_stack != 0)
687 p = (struct label_chain *) oballoc (sizeof (struct label_chain));
688 p->next = stack_block_stack->data.block.label_chain;
689 stack_block_stack->data.block.label_chain = p;
690 p->label = label;
694 /* Declare that LABEL (a LABEL_DECL) may be used for nonlocal gotos
695 from nested functions. */
697 void
698 declare_nonlocal_label (label)
699 tree label;
701 nonlocal_labels = tree_cons (NULL_TREE, label, nonlocal_labels);
702 LABEL_PRESERVE_P (label_rtx (label)) = 1;
703 if (nonlocal_goto_handler_slot == 0)
705 nonlocal_goto_handler_slot
706 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
707 emit_stack_save (SAVE_NONLOCAL,
708 &nonlocal_goto_stack_level,
709 PREV_INSN (tail_recursion_reentry));
713 /* Generate RTL code for a `goto' statement with target label LABEL.
714 LABEL should be a LABEL_DECL tree node that was or will later be
715 defined with `expand_label'. */
717 void
718 expand_goto (label)
719 tree label;
721 tree context;
723 if (output_bytecode)
725 expand_goto_internal (label, label_rtx (label), NULL_RTX);
726 return;
729 /* Check for a nonlocal goto to a containing function. */
730 context = decl_function_context (label);
731 if (context != 0 && context != current_function_decl)
733 struct function *p = find_function_data (context);
734 rtx label_ref = gen_rtx (LABEL_REF, Pmode, label_rtx (label));
735 rtx temp;
737 p->has_nonlocal_label = 1;
738 current_function_has_nonlocal_goto = 1;
739 LABEL_REF_NONLOCAL_P (label_ref) = 1;
741 /* Copy the rtl for the slots so that they won't be shared in
742 case the virtual stack vars register gets instantiated differently
743 in the parent than in the child. */
745 #if HAVE_nonlocal_goto
746 if (HAVE_nonlocal_goto)
747 emit_insn (gen_nonlocal_goto (lookup_static_chain (label),
748 copy_rtx (p->nonlocal_goto_handler_slot),
749 copy_rtx (p->nonlocal_goto_stack_level),
750 label_ref));
751 else
752 #endif
754 rtx addr;
756 /* Restore frame pointer for containing function.
757 This sets the actual hard register used for the frame pointer
758 to the location of the function's incoming static chain info.
759 The non-local goto handler will then adjust it to contain the
760 proper value and reload the argument pointer, if needed. */
761 emit_move_insn (hard_frame_pointer_rtx, lookup_static_chain (label));
763 /* We have now loaded the frame pointer hardware register with
764 the address of that corresponds to the start of the virtual
765 stack vars. So replace virtual_stack_vars_rtx in all
766 addresses we use with stack_pointer_rtx. */
768 /* Get addr of containing function's current nonlocal goto handler,
769 which will do any cleanups and then jump to the label. */
770 addr = copy_rtx (p->nonlocal_goto_handler_slot);
771 temp = copy_to_reg (replace_rtx (addr, virtual_stack_vars_rtx,
772 hard_frame_pointer_rtx));
774 /* Restore the stack pointer. Note this uses fp just restored. */
775 addr = p->nonlocal_goto_stack_level;
776 if (addr)
777 addr = replace_rtx (copy_rtx (addr),
778 virtual_stack_vars_rtx,
779 hard_frame_pointer_rtx);
781 emit_stack_restore (SAVE_NONLOCAL, addr, NULL_RTX);
783 /* Put in the static chain register the nonlocal label address. */
784 emit_move_insn (static_chain_rtx, label_ref);
785 /* USE of hard_frame_pointer_rtx added for consistency; not clear if
786 really needed. */
787 emit_insn (gen_rtx (USE, VOIDmode, hard_frame_pointer_rtx));
788 emit_insn (gen_rtx (USE, VOIDmode, stack_pointer_rtx));
789 emit_insn (gen_rtx (USE, VOIDmode, static_chain_rtx));
790 emit_indirect_jump (temp);
793 else
794 expand_goto_internal (label, label_rtx (label), NULL_RTX);
797 /* Generate RTL code for a `goto' statement with target label BODY.
798 LABEL should be a LABEL_REF.
799 LAST_INSN, if non-0, is the rtx we should consider as the last
800 insn emitted (for the purposes of cleaning up a return). */
802 static void
803 expand_goto_internal (body, label, last_insn)
804 tree body;
805 rtx label;
806 rtx last_insn;
808 struct nesting *block;
809 rtx stack_level = 0;
811 /* NOTICE! If a bytecode instruction other than `jump' is needed,
812 then the caller has to call bc_expand_goto_internal()
813 directly. This is rather an exceptional case, and there aren't
814 that many places where this is necessary. */
815 if (output_bytecode)
817 expand_goto_internal (body, label, last_insn);
818 return;
821 if (GET_CODE (label) != CODE_LABEL)
822 abort ();
824 /* If label has already been defined, we can tell now
825 whether and how we must alter the stack level. */
827 if (PREV_INSN (label) != 0)
829 /* Find the innermost pending block that contains the label.
830 (Check containment by comparing insn-uids.)
831 Then restore the outermost stack level within that block,
832 and do cleanups of all blocks contained in it. */
833 for (block = block_stack; block; block = block->next)
835 if (INSN_UID (block->data.block.first_insn) < INSN_UID (label))
836 break;
837 if (block->data.block.stack_level != 0)
838 stack_level = block->data.block.stack_level;
839 /* Execute the cleanups for blocks we are exiting. */
840 if (block->data.block.cleanups != 0)
842 expand_cleanups (block->data.block.cleanups, NULL_TREE, 1, 1);
843 do_pending_stack_adjust ();
847 if (stack_level)
849 /* Ensure stack adjust isn't done by emit_jump, as this
850 would clobber the stack pointer. This one should be
851 deleted as dead by flow. */
852 clear_pending_stack_adjust ();
853 do_pending_stack_adjust ();
854 emit_stack_restore (SAVE_BLOCK, stack_level, NULL_RTX);
857 if (body != 0 && DECL_TOO_LATE (body))
858 error ("jump to `%s' invalidly jumps into binding contour",
859 IDENTIFIER_POINTER (DECL_NAME (body)));
861 /* Label not yet defined: may need to put this goto
862 on the fixup list. */
863 else if (! expand_fixup (body, label, last_insn))
865 /* No fixup needed. Record that the label is the target
866 of at least one goto that has no fixup. */
867 if (body != 0)
868 TREE_ADDRESSABLE (body) = 1;
871 emit_jump (label);
874 /* Generate a jump with OPCODE to the given bytecode LABEL which is
875 found within BODY. */
877 static void
878 bc_expand_goto_internal (opcode, label, body)
879 enum bytecode_opcode opcode;
880 struct bc_label *label;
881 tree body;
883 struct nesting *block;
884 int stack_level = -1;
886 /* If the label is defined, adjust the stack as necessary.
887 If it's not defined, we have to push the reference on the
888 fixup list. */
890 if (label->defined)
893 /* Find the innermost pending block that contains the label.
894 (Check containment by comparing bytecode uids.) Then restore the
895 outermost stack level within that block. */
897 for (block = block_stack; block; block = block->next)
899 if (BYTECODE_BC_LABEL (block->data.block.first_insn)->uid < label->uid)
900 break;
901 if (block->data.block.bc_stack_level)
902 stack_level = block->data.block.bc_stack_level;
904 /* Execute the cleanups for blocks we are exiting. */
905 if (block->data.block.cleanups != 0)
907 expand_cleanups (block->data.block.cleanups, NULL_TREE, 1, 1);
908 do_pending_stack_adjust ();
912 /* Restore the stack level. If we need to adjust the stack, we
913 must do so after the jump, since the jump may depend on
914 what's on the stack. Thus, any stack-modifying conditional
915 jumps (these are the only ones that rely on what's on the
916 stack) go into the fixup list. */
918 if (stack_level >= 0
919 && stack_depth != stack_level
920 && opcode != jump)
922 bc_expand_fixup (opcode, label, stack_level);
923 else
925 if (stack_level >= 0)
926 bc_adjust_stack (stack_depth - stack_level);
928 if (body && DECL_BIT_FIELD (body))
929 error ("jump to `%s' invalidly jumps into binding contour",
930 IDENTIFIER_POINTER (DECL_NAME (body)));
932 /* Emit immediate jump */
933 bc_emit_bytecode (opcode);
934 bc_emit_bytecode_labelref (label);
936 #ifdef DEBUG_PRINT_CODE
937 fputc ('\n', stderr);
938 #endif
941 else
942 /* Put goto in the fixup list */
943 bc_expand_fixup (opcode, label, stack_level);
946 /* Generate if necessary a fixup for a goto
947 whose target label in tree structure (if any) is TREE_LABEL
948 and whose target in rtl is RTL_LABEL.
950 If LAST_INSN is nonzero, we pretend that the jump appears
951 after insn LAST_INSN instead of at the current point in the insn stream.
953 The fixup will be used later to insert insns just before the goto.
954 Those insns will restore the stack level as appropriate for the
955 target label, and will (in the case of C++) also invoke any object
956 destructors which have to be invoked when we exit the scopes which
957 are exited by the goto.
959 Value is nonzero if a fixup is made. */
961 static int
962 expand_fixup (tree_label, rtl_label, last_insn)
963 tree tree_label;
964 rtx rtl_label;
965 rtx last_insn;
967 struct nesting *block, *end_block;
969 /* See if we can recognize which block the label will be output in.
970 This is possible in some very common cases.
971 If we succeed, set END_BLOCK to that block.
972 Otherwise, set it to 0. */
974 if (cond_stack
975 && (rtl_label == cond_stack->data.cond.endif_label
976 || rtl_label == cond_stack->data.cond.next_label))
977 end_block = cond_stack;
978 /* If we are in a loop, recognize certain labels which
979 are likely targets. This reduces the number of fixups
980 we need to create. */
981 else if (loop_stack
982 && (rtl_label == loop_stack->data.loop.start_label
983 || rtl_label == loop_stack->data.loop.end_label
984 || rtl_label == loop_stack->data.loop.continue_label))
985 end_block = loop_stack;
986 else
987 end_block = 0;
989 /* Now set END_BLOCK to the binding level to which we will return. */
991 if (end_block)
993 struct nesting *next_block = end_block->all;
994 block = block_stack;
996 /* First see if the END_BLOCK is inside the innermost binding level.
997 If so, then no cleanups or stack levels are relevant. */
998 while (next_block && next_block != block)
999 next_block = next_block->all;
1001 if (next_block)
1002 return 0;
1004 /* Otherwise, set END_BLOCK to the innermost binding level
1005 which is outside the relevant control-structure nesting. */
1006 next_block = block_stack->next;
1007 for (block = block_stack; block != end_block; block = block->all)
1008 if (block == next_block)
1009 next_block = next_block->next;
1010 end_block = next_block;
1013 /* Does any containing block have a stack level or cleanups?
1014 If not, no fixup is needed, and that is the normal case
1015 (the only case, for standard C). */
1016 for (block = block_stack; block != end_block; block = block->next)
1017 if (block->data.block.stack_level != 0
1018 || block->data.block.cleanups != 0)
1019 break;
1021 if (block != end_block)
1023 /* Ok, a fixup is needed. Add a fixup to the list of such. */
1024 struct goto_fixup *fixup
1025 = (struct goto_fixup *) oballoc (sizeof (struct goto_fixup));
1026 /* In case an old stack level is restored, make sure that comes
1027 after any pending stack adjust. */
1028 /* ?? If the fixup isn't to come at the present position,
1029 doing the stack adjust here isn't useful. Doing it with our
1030 settings at that location isn't useful either. Let's hope
1031 someone does it! */
1032 if (last_insn == 0)
1033 do_pending_stack_adjust ();
1034 fixup->target = tree_label;
1035 fixup->target_rtl = rtl_label;
1037 /* Create a BLOCK node and a corresponding matched set of
1038 NOTE_INSN_BEGIN_BLOCK and NOTE_INSN_END_BLOCK notes at
1039 this point. The notes will encapsulate any and all fixup
1040 code which we might later insert at this point in the insn
1041 stream. Also, the BLOCK node will be the parent (i.e. the
1042 `SUPERBLOCK') of any other BLOCK nodes which we might create
1043 later on when we are expanding the fixup code. */
1046 register rtx original_before_jump
1047 = last_insn ? last_insn : get_last_insn ();
1049 start_sequence ();
1050 pushlevel (0);
1051 fixup->before_jump = emit_note (NULL_PTR, NOTE_INSN_BLOCK_BEG);
1052 last_block_end_note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_END);
1053 fixup->context = poplevel (1, 0, 0); /* Create the BLOCK node now! */
1054 end_sequence ();
1055 emit_insns_after (fixup->before_jump, original_before_jump);
1058 fixup->block_start_count = block_start_count;
1059 fixup->stack_level = 0;
1060 fixup->cleanup_list_list
1061 = ((block->data.block.outer_cleanups
1062 || block->data.block.cleanups)
1063 ? tree_cons (NULL_TREE, block->data.block.cleanups,
1064 block->data.block.outer_cleanups)
1065 : 0);
1066 fixup->next = goto_fixup_chain;
1067 goto_fixup_chain = fixup;
1070 return block != 0;
1074 /* Generate bytecode jump with OPCODE to a fixup routine that links to LABEL.
1075 Make the fixup restore the stack level to STACK_LEVEL. */
1077 static void
1078 bc_expand_fixup (opcode, label, stack_level)
1079 enum bytecode_opcode opcode;
1080 struct bc_label *label;
1081 int stack_level;
1083 struct goto_fixup *fixup
1084 = (struct goto_fixup *) oballoc (sizeof (struct goto_fixup));
1086 fixup->label = bc_get_bytecode_label ();
1087 fixup->bc_target = label;
1088 fixup->bc_stack_level = stack_level;
1089 fixup->bc_handled = FALSE;
1091 fixup->next = goto_fixup_chain;
1092 goto_fixup_chain = fixup;
1094 /* Insert a jump to the fixup code */
1095 bc_emit_bytecode (opcode);
1096 bc_emit_bytecode_labelref (fixup->label);
1098 #ifdef DEBUG_PRINT_CODE
1099 fputc ('\n', stderr);
1100 #endif
1103 /* Expand any needed fixups in the outputmost binding level of the
1104 function. FIRST_INSN is the first insn in the function. */
1106 void
1107 expand_fixups (first_insn)
1108 rtx first_insn;
1110 fixup_gotos (NULL_PTR, NULL_RTX, NULL_TREE, first_insn, 0);
1113 /* When exiting a binding contour, process all pending gotos requiring fixups.
1114 THISBLOCK is the structure that describes the block being exited.
1115 STACK_LEVEL is the rtx for the stack level to restore exiting this contour.
1116 CLEANUP_LIST is a list of expressions to evaluate on exiting this contour.
1117 FIRST_INSN is the insn that began this contour.
1119 Gotos that jump out of this contour must restore the
1120 stack level and do the cleanups before actually jumping.
1122 DONT_JUMP_IN nonzero means report error there is a jump into this
1123 contour from before the beginning of the contour.
1124 This is also done if STACK_LEVEL is nonzero. */
1126 static void
1127 fixup_gotos (thisblock, stack_level, cleanup_list, first_insn, dont_jump_in)
1128 struct nesting *thisblock;
1129 rtx stack_level;
1130 tree cleanup_list;
1131 rtx first_insn;
1132 int dont_jump_in;
1134 register struct goto_fixup *f, *prev;
1136 if (output_bytecode)
1138 /* ??? The second arg is the bc stack level, which is not the same
1139 as STACK_LEVEL. I have no idea what should go here, so I'll
1140 just pass 0. */
1141 bc_fixup_gotos (thisblock, 0, cleanup_list, first_insn, dont_jump_in);
1142 return;
1145 /* F is the fixup we are considering; PREV is the previous one. */
1146 /* We run this loop in two passes so that cleanups of exited blocks
1147 are run first, and blocks that are exited are marked so
1148 afterwards. */
1150 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1152 /* Test for a fixup that is inactive because it is already handled. */
1153 if (f->before_jump == 0)
1155 /* Delete inactive fixup from the chain, if that is easy to do. */
1156 if (prev != 0)
1157 prev->next = f->next;
1159 /* Has this fixup's target label been defined?
1160 If so, we can finalize it. */
1161 else if (PREV_INSN (f->target_rtl) != 0)
1163 register rtx cleanup_insns;
1165 /* Get the first non-label after the label
1166 this goto jumps to. If that's before this scope begins,
1167 we don't have a jump into the scope. */
1168 rtx after_label = f->target_rtl;
1169 while (after_label != 0 && GET_CODE (after_label) == CODE_LABEL)
1170 after_label = NEXT_INSN (after_label);
1172 /* If this fixup jumped into this contour from before the beginning
1173 of this contour, report an error. */
1174 /* ??? Bug: this does not detect jumping in through intermediate
1175 blocks that have stack levels or cleanups.
1176 It detects only a problem with the innermost block
1177 around the label. */
1178 if (f->target != 0
1179 && (dont_jump_in || stack_level || cleanup_list)
1180 /* If AFTER_LABEL is 0, it means the jump goes to the end
1181 of the rtl, which means it jumps into this scope. */
1182 && (after_label == 0
1183 || INSN_UID (first_insn) < INSN_UID (after_label))
1184 && INSN_UID (first_insn) > INSN_UID (f->before_jump)
1185 && ! DECL_ERROR_ISSUED (f->target))
1187 error_with_decl (f->target,
1188 "label `%s' used before containing binding contour");
1189 /* Prevent multiple errors for one label. */
1190 DECL_ERROR_ISSUED (f->target) = 1;
1193 /* We will expand the cleanups into a sequence of their own and
1194 then later on we will attach this new sequence to the insn
1195 stream just ahead of the actual jump insn. */
1197 start_sequence ();
1199 /* Temporarily restore the lexical context where we will
1200 logically be inserting the fixup code. We do this for the
1201 sake of getting the debugging information right. */
1203 pushlevel (0);
1204 set_block (f->context);
1206 /* Expand the cleanups for blocks this jump exits. */
1207 if (f->cleanup_list_list)
1209 tree lists;
1210 for (lists = f->cleanup_list_list; lists; lists = TREE_CHAIN (lists))
1211 /* Marked elements correspond to blocks that have been closed.
1212 Do their cleanups. */
1213 if (TREE_ADDRESSABLE (lists)
1214 && TREE_VALUE (lists) != 0)
1216 expand_cleanups (TREE_VALUE (lists), NULL_TREE, 1, 1);
1217 /* Pop any pushes done in the cleanups,
1218 in case function is about to return. */
1219 do_pending_stack_adjust ();
1223 /* Restore stack level for the biggest contour that this
1224 jump jumps out of. */
1225 if (f->stack_level)
1226 emit_stack_restore (SAVE_BLOCK, f->stack_level, f->before_jump);
1228 /* Finish up the sequence containing the insns which implement the
1229 necessary cleanups, and then attach that whole sequence to the
1230 insn stream just ahead of the actual jump insn. Attaching it
1231 at that point insures that any cleanups which are in fact
1232 implicit C++ object destructions (which must be executed upon
1233 leaving the block) appear (to the debugger) to be taking place
1234 in an area of the generated code where the object(s) being
1235 destructed are still "in scope". */
1237 cleanup_insns = get_insns ();
1238 poplevel (1, 0, 0);
1240 end_sequence ();
1241 emit_insns_after (cleanup_insns, f->before_jump);
1244 f->before_jump = 0;
1248 /* For any still-undefined labels, do the cleanups for this block now.
1249 We must do this now since items in the cleanup list may go out
1250 of scope when the block ends. */
1251 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1252 if (f->before_jump != 0
1253 && PREV_INSN (f->target_rtl) == 0
1254 /* Label has still not appeared. If we are exiting a block with
1255 a stack level to restore, that started before the fixup,
1256 mark this stack level as needing restoration
1257 when the fixup is later finalized. */
1258 && thisblock != 0
1259 /* Note: if THISBLOCK == 0 and we have a label that hasn't appeared, it
1260 means the label is undefined. That's erroneous, but possible. */
1261 && (thisblock->data.block.block_start_count
1262 <= f->block_start_count))
1264 tree lists = f->cleanup_list_list;
1265 rtx cleanup_insns;
1267 for (; lists; lists = TREE_CHAIN (lists))
1268 /* If the following elt. corresponds to our containing block
1269 then the elt. must be for this block. */
1270 if (TREE_CHAIN (lists) == thisblock->data.block.outer_cleanups)
1272 start_sequence ();
1273 pushlevel (0);
1274 set_block (f->context);
1275 expand_cleanups (TREE_VALUE (lists), NULL_TREE, 1, 1);
1276 do_pending_stack_adjust ();
1277 cleanup_insns = get_insns ();
1278 poplevel (1, 0, 0);
1279 end_sequence ();
1280 if (cleanup_insns != 0)
1281 f->before_jump
1282 = emit_insns_after (cleanup_insns, f->before_jump);
1284 f->cleanup_list_list = TREE_CHAIN (lists);
1287 if (stack_level)
1288 f->stack_level = stack_level;
1293 /* When exiting a binding contour, process all pending gotos requiring fixups.
1294 Note: STACK_DEPTH is not altered.
1296 The arguments are currently not used in the bytecode compiler, but we may
1297 need them one day for languages other than C.
1299 THISBLOCK is the structure that describes the block being exited.
1300 STACK_LEVEL is the rtx for the stack level to restore exiting this contour.
1301 CLEANUP_LIST is a list of expressions to evaluate on exiting this contour.
1302 FIRST_INSN is the insn that began this contour.
1304 Gotos that jump out of this contour must restore the
1305 stack level and do the cleanups before actually jumping.
1307 DONT_JUMP_IN nonzero means report error there is a jump into this
1308 contour from before the beginning of the contour.
1309 This is also done if STACK_LEVEL is nonzero. */
1311 static void
1312 bc_fixup_gotos (thisblock, stack_level, cleanup_list, first_insn, dont_jump_in)
1313 struct nesting *thisblock;
1314 int stack_level;
1315 tree cleanup_list;
1316 rtx first_insn;
1317 int dont_jump_in;
1319 register struct goto_fixup *f, *prev;
1320 int saved_stack_depth;
1322 /* F is the fixup we are considering; PREV is the previous one. */
1324 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1326 /* Test for a fixup that is inactive because it is already handled. */
1327 if (f->before_jump == 0)
1329 /* Delete inactive fixup from the chain, if that is easy to do. */
1330 if (prev)
1331 prev->next = f->next;
1334 /* Emit code to restore the stack and continue */
1335 bc_emit_bytecode_labeldef (f->label);
1337 /* Save stack_depth across call, since bc_adjust_stack will alter
1338 the perceived stack depth via the instructions generated. */
1340 if (f->bc_stack_level >= 0)
1342 saved_stack_depth = stack_depth;
1343 bc_adjust_stack (stack_depth - f->bc_stack_level);
1344 stack_depth = saved_stack_depth;
1347 bc_emit_bytecode (jump);
1348 bc_emit_bytecode_labelref (f->bc_target);
1350 #ifdef DEBUG_PRINT_CODE
1351 fputc ('\n', stderr);
1352 #endif
1355 goto_fixup_chain = NULL;
1358 /* Generate RTL for an asm statement (explicit assembler code).
1359 BODY is a STRING_CST node containing the assembler code text,
1360 or an ADDR_EXPR containing a STRING_CST. */
1362 void
1363 expand_asm (body)
1364 tree body;
1366 if (output_bytecode)
1368 error ("`asm' is invalid when generating bytecode");
1369 return;
1372 if (flag_check_memory_usage)
1374 error ("`asm' cannot be used with `-fcheck-memory-usage'");
1375 return;
1378 if (TREE_CODE (body) == ADDR_EXPR)
1379 body = TREE_OPERAND (body, 0);
1381 emit_insn (gen_rtx (ASM_INPUT, VOIDmode,
1382 TREE_STRING_POINTER (body)));
1383 last_expr_type = 0;
1386 /* Generate RTL for an asm statement with arguments.
1387 STRING is the instruction template.
1388 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
1389 Each output or input has an expression in the TREE_VALUE and
1390 a constraint-string in the TREE_PURPOSE.
1391 CLOBBERS is a list of STRING_CST nodes each naming a hard register
1392 that is clobbered by this insn.
1394 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
1395 Some elements of OUTPUTS may be replaced with trees representing temporary
1396 values. The caller should copy those temporary values to the originally
1397 specified lvalues.
1399 VOL nonzero means the insn is volatile; don't optimize it. */
1401 void
1402 expand_asm_operands (string, outputs, inputs, clobbers, vol, filename, line)
1403 tree string, outputs, inputs, clobbers;
1404 int vol;
1405 char *filename;
1406 int line;
1408 rtvec argvec, constraints;
1409 rtx body;
1410 int ninputs = list_length (inputs);
1411 int noutputs = list_length (outputs);
1412 int ninout = 0;
1413 int nclobbers;
1414 tree tail;
1415 register int i;
1416 /* Vector of RTX's of evaluated output operands. */
1417 rtx *output_rtx = (rtx *) alloca (noutputs * sizeof (rtx));
1418 int *inout_opnum = (int *) alloca (noutputs * sizeof (int));
1419 enum machine_mode *inout_mode
1420 = (enum machine_mode *) alloca (noutputs * sizeof (enum machine_mode));
1421 /* The insn we have emitted. */
1422 rtx insn;
1424 /* An ASM with no outputs needs to be treated as volatile. */
1425 if (noutputs == 0)
1426 vol = 1;
1428 if (output_bytecode)
1430 error ("`asm' is invalid when generating bytecode");
1431 return;
1434 if (flag_check_memory_usage)
1436 error ("`asm' cannot be used with `-fcheck-memory-usage'");
1437 return;
1440 /* Count the number of meaningful clobbered registers, ignoring what
1441 we would ignore later. */
1442 nclobbers = 0;
1443 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1445 char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1446 i = decode_reg_name (regname);
1447 if (i >= 0 || i == -4)
1448 ++nclobbers;
1449 else if (i == -2)
1450 error ("unknown register name `%s' in `asm'", regname);
1453 last_expr_type = 0;
1455 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1457 tree val = TREE_VALUE (tail);
1458 tree type = TREE_TYPE (val);
1459 tree val1;
1460 int j;
1461 int found_equal = 0;
1462 int found_plus = 0;
1463 int allows_reg = 0;
1465 /* If there's an erroneous arg, emit no insn. */
1466 if (TREE_TYPE (val) == error_mark_node)
1467 return;
1469 /* Make sure constraint has `=' and does not have `+'. Also, see
1470 if it allows any register. Be liberal on the latter test, since
1471 the worst that happens if we get it wrong is we issue an error
1472 message. */
1474 for (j = 0; j < TREE_STRING_LENGTH (TREE_PURPOSE (tail)) - 1; j++)
1475 switch (TREE_STRING_POINTER (TREE_PURPOSE (tail))[j])
1477 case '+':
1478 /* Make sure we can specify the matching operand. */
1479 if (i > 9)
1481 error ("output operand constraint %d contains `+'", i);
1482 return;
1485 /* Replace '+' with '='. */
1486 TREE_STRING_POINTER (TREE_PURPOSE (tail))[j] = '=';
1487 found_plus = 1;
1488 break;
1490 case '=':
1491 found_equal = 1;
1492 break;
1494 case '?': case '!': case '*': case '%': case '&':
1495 case 'V': case 'm': case 'o': case '<': case '>':
1496 case 'E': case 'F': case 'G': case 'H': case 'X':
1497 case 's': case 'i': case 'n':
1498 case 'I': case 'J': case 'K': case 'L': case 'M':
1499 case 'N': case 'O': case 'P': case ',':
1500 #ifdef EXTRA_CONSTRAINT
1501 case 'Q': case 'R': case 'S': case 'T': case 'U':
1502 #endif
1503 break;
1505 case '0': case '1': case '2': case '3': case '4':
1506 case '5': case '6': case '7': case '8': case '9':
1507 error ("matching constraint not valid in output operand");
1508 break;
1510 case 'p': case 'g': case 'r':
1511 default:
1512 allows_reg = 1;
1513 break;
1516 if (! found_equal && ! found_plus)
1518 error ("output operand constraint lacks `='");
1519 return;
1522 /* If an output operand is not a decl or indirect ref and our constraint
1523 allows a register, make a temporary to act as an intermediate.
1524 Make the asm insn write into that, then our caller will copy it to
1525 the real output operand. Likewise for promoted variables. */
1527 if (TREE_CODE (val) == INDIRECT_REF
1528 || (TREE_CODE_CLASS (TREE_CODE (val)) == 'd'
1529 && ! (GET_CODE (DECL_RTL (val)) == REG
1530 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
1531 || ! allows_reg
1532 || found_plus)
1534 if (! allows_reg)
1535 mark_addressable (TREE_VALUE (tail));
1537 output_rtx[i]
1538 = expand_expr (TREE_VALUE (tail), NULL_RTX, VOIDmode,
1539 EXPAND_MEMORY_USE_WO);
1541 if (! allows_reg && GET_CODE (output_rtx[i]) != MEM)
1542 error ("output number %d not directly addressable", i);
1544 else
1546 output_rtx[i] = assign_temp (type, 0, 0, 0);
1547 TREE_VALUE (tail) = make_tree (type, output_rtx[i]);
1550 if (found_plus)
1552 inout_mode[ninout] = TYPE_MODE (TREE_TYPE (TREE_VALUE (tail)));
1553 inout_opnum[ninout++] = i;
1557 ninputs += ninout;
1558 if (ninputs + noutputs > MAX_RECOG_OPERANDS)
1560 error ("more than %d operands in `asm'", MAX_RECOG_OPERANDS);
1561 return;
1564 /* Make vectors for the expression-rtx and constraint strings. */
1566 argvec = rtvec_alloc (ninputs);
1567 constraints = rtvec_alloc (ninputs);
1569 body = gen_rtx (ASM_OPERANDS, VOIDmode,
1570 TREE_STRING_POINTER (string), "", 0, argvec, constraints,
1571 filename, line);
1572 MEM_VOLATILE_P (body) = vol;
1574 /* Eval the inputs and put them into ARGVEC.
1575 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
1577 i = 0;
1578 for (tail = inputs; tail; tail = TREE_CHAIN (tail))
1580 int j;
1581 int allows_reg = 0;
1583 /* If there's an erroneous arg, emit no insn,
1584 because the ASM_INPUT would get VOIDmode
1585 and that could cause a crash in reload. */
1586 if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
1587 return;
1588 if (TREE_PURPOSE (tail) == NULL_TREE)
1590 error ("hard register `%s' listed as input operand to `asm'",
1591 TREE_STRING_POINTER (TREE_VALUE (tail)) );
1592 return;
1595 /* Make sure constraint has neither `=' nor `+'. */
1597 for (j = 0; j < TREE_STRING_LENGTH (TREE_PURPOSE (tail)) - 1; j++)
1598 switch (TREE_STRING_POINTER (TREE_PURPOSE (tail))[j])
1600 case '+': case '=':
1601 error ("input operand constraint contains `%c'",
1602 TREE_STRING_POINTER (TREE_PURPOSE (tail))[j]);
1603 return;
1605 case '?': case '!': case '*': case '%': case '&':
1606 case 'V': case 'm': case 'o': case '<': case '>':
1607 case 'E': case 'F': case 'G': case 'H': case 'X':
1608 case 's': case 'i': case 'n':
1609 case 'I': case 'J': case 'K': case 'L': case 'M':
1610 case 'N': case 'O': case 'P': case ',':
1611 #ifdef EXTRA_CONSTRAINT
1612 case 'Q': case 'R': case 'S': case 'T': case 'U':
1613 #endif
1614 break;
1616 /* Whether or not a numeric constraint allows a register is
1617 decided by the matching constraint, and so there is no need
1618 to do anything special with them. We must handle them in
1619 the default case, so that we don't unnecessarily force
1620 operands to memory. */
1621 case '0': case '1': case '2': case '3': case '4':
1622 case '5': case '6': case '7': case '8': case '9':
1623 if (TREE_STRING_POINTER (TREE_PURPOSE (tail))[j]
1624 >= '0' + noutputs)
1625 error ("matching constraint references invalid operand number");
1627 /* ... fall through ... */
1629 case 'p': case 'g': case 'r':
1630 default:
1631 allows_reg = 1;
1632 break;
1635 if (! allows_reg)
1636 mark_addressable (TREE_VALUE (tail));
1638 XVECEXP (body, 3, i) /* argvec */
1639 = expand_expr (TREE_VALUE (tail), NULL_RTX, VOIDmode, 0);
1640 if (CONSTANT_P (XVECEXP (body, 3, i))
1641 && ! general_operand (XVECEXP (body, 3, i),
1642 TYPE_MODE (TREE_TYPE (TREE_VALUE (tail)))))
1644 if (allows_reg)
1645 XVECEXP (body, 3, i)
1646 = force_reg (TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))),
1647 XVECEXP (body, 3, i));
1648 else
1649 XVECEXP (body, 3, i)
1650 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))),
1651 XVECEXP (body, 3, i));
1654 if (! allows_reg
1655 && (GET_CODE (XVECEXP (body, 3, i)) == REG
1656 || GET_CODE (XVECEXP (body, 3, i)) == SUBREG
1657 || GET_CODE (XVECEXP (body, 3, i)) == CONCAT))
1659 tree type = TREE_TYPE (TREE_VALUE (tail));
1660 rtx memloc = assign_temp (type, 1, 1, 1);
1662 emit_move_insn (memloc, XVECEXP (body, 3, i));
1663 XVECEXP (body, 3, i) = memloc;
1666 XVECEXP (body, 4, i) /* constraints */
1667 = gen_rtx (ASM_INPUT, TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))),
1668 TREE_STRING_POINTER (TREE_PURPOSE (tail)));
1669 i++;
1672 /* Protect all the operands from the queue,
1673 now that they have all been evaluated. */
1675 for (i = 0; i < ninputs - ninout; i++)
1676 XVECEXP (body, 3, i) = protect_from_queue (XVECEXP (body, 3, i), 0);
1678 for (i = 0; i < noutputs; i++)
1679 output_rtx[i] = protect_from_queue (output_rtx[i], 1);
1681 /* For in-out operands, copy output rtx to input rtx. */
1682 for (i = 0; i < ninout; i++)
1684 static char match[9+1][2]
1685 = {"0", "1", "2", "3", "4", "5", "6", "7", "8", "9"};
1686 int j = inout_opnum[i];
1688 XVECEXP (body, 3, ninputs - ninout + i) /* argvec */
1689 = output_rtx[j];
1690 XVECEXP (body, 4, ninputs - ninout + i) /* constraints */
1691 = gen_rtx (ASM_INPUT, inout_mode[j], match[j]);
1694 /* Now, for each output, construct an rtx
1695 (set OUTPUT (asm_operands INSN OUTPUTNUMBER OUTPUTCONSTRAINT
1696 ARGVEC CONSTRAINTS))
1697 If there is more than one, put them inside a PARALLEL. */
1699 if (noutputs == 1 && nclobbers == 0)
1701 XSTR (body, 1) = TREE_STRING_POINTER (TREE_PURPOSE (outputs));
1702 insn = emit_insn (gen_rtx (SET, VOIDmode, output_rtx[0], body));
1704 else if (noutputs == 0 && nclobbers == 0)
1706 /* No output operands: put in a raw ASM_OPERANDS rtx. */
1707 insn = emit_insn (body);
1709 else
1711 rtx obody = body;
1712 int num = noutputs;
1713 if (num == 0) num = 1;
1714 body = gen_rtx (PARALLEL, VOIDmode, rtvec_alloc (num + nclobbers));
1716 /* For each output operand, store a SET. */
1718 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1720 XVECEXP (body, 0, i)
1721 = gen_rtx (SET, VOIDmode,
1722 output_rtx[i],
1723 gen_rtx (ASM_OPERANDS, VOIDmode,
1724 TREE_STRING_POINTER (string),
1725 TREE_STRING_POINTER (TREE_PURPOSE (tail)),
1726 i, argvec, constraints,
1727 filename, line));
1728 MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
1731 /* If there are no outputs (but there are some clobbers)
1732 store the bare ASM_OPERANDS into the PARALLEL. */
1734 if (i == 0)
1735 XVECEXP (body, 0, i++) = obody;
1737 /* Store (clobber REG) for each clobbered register specified. */
1739 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1741 char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1742 int j = decode_reg_name (regname);
1744 if (j < 0)
1746 if (j == -3) /* `cc', which is not a register */
1747 continue;
1749 if (j == -4) /* `memory', don't cache memory across asm */
1751 XVECEXP (body, 0, i++)
1752 = gen_rtx (CLOBBER, VOIDmode,
1753 gen_rtx (MEM, BLKmode,
1754 gen_rtx (SCRATCH, VOIDmode, 0)));
1755 continue;
1758 /* Ignore unknown register, error already signalled. */
1759 continue;
1762 /* Use QImode since that's guaranteed to clobber just one reg. */
1763 XVECEXP (body, 0, i++)
1764 = gen_rtx (CLOBBER, VOIDmode, gen_rtx (REG, QImode, j));
1767 insn = emit_insn (body);
1770 free_temp_slots ();
1773 /* Generate RTL to evaluate the expression EXP
1774 and remember it in case this is the VALUE in a ({... VALUE; }) constr. */
1776 void
1777 expand_expr_stmt (exp)
1778 tree exp;
1780 if (output_bytecode)
1782 int org_stack_depth = stack_depth;
1784 bc_expand_expr (exp);
1786 /* Restore stack depth */
1787 if (stack_depth < org_stack_depth)
1788 abort ();
1790 bc_emit_instruction (drop);
1792 last_expr_type = TREE_TYPE (exp);
1793 return;
1796 /* If -W, warn about statements with no side effects,
1797 except for an explicit cast to void (e.g. for assert()), and
1798 except inside a ({...}) where they may be useful. */
1799 if (expr_stmts_for_value == 0 && exp != error_mark_node)
1801 if (! TREE_SIDE_EFFECTS (exp) && (extra_warnings || warn_unused)
1802 && !(TREE_CODE (exp) == CONVERT_EXPR
1803 && TREE_TYPE (exp) == void_type_node))
1804 warning_with_file_and_line (emit_filename, emit_lineno,
1805 "statement with no effect");
1806 else if (warn_unused)
1807 warn_if_unused_value (exp);
1810 /* If EXP is of function type and we are expanding statements for
1811 value, convert it to pointer-to-function. */
1812 if (expr_stmts_for_value && TREE_CODE (TREE_TYPE (exp)) == FUNCTION_TYPE)
1813 exp = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (exp)), exp);
1815 last_expr_type = TREE_TYPE (exp);
1816 if (! flag_syntax_only)
1817 last_expr_value = expand_expr (exp,
1818 (expr_stmts_for_value
1819 ? NULL_RTX : const0_rtx),
1820 VOIDmode, 0);
1822 /* If all we do is reference a volatile value in memory,
1823 copy it to a register to be sure it is actually touched. */
1824 if (last_expr_value != 0 && GET_CODE (last_expr_value) == MEM
1825 && TREE_THIS_VOLATILE (exp))
1827 if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode)
1829 else if (TYPE_MODE (TREE_TYPE (exp)) != BLKmode)
1830 copy_to_reg (last_expr_value);
1831 else
1833 rtx lab = gen_label_rtx ();
1835 /* Compare the value with itself to reference it. */
1836 emit_cmp_insn (last_expr_value, last_expr_value, EQ,
1837 expand_expr (TYPE_SIZE (last_expr_type),
1838 NULL_RTX, VOIDmode, 0),
1839 BLKmode, 0,
1840 TYPE_ALIGN (last_expr_type) / BITS_PER_UNIT);
1841 emit_jump_insn ((*bcc_gen_fctn[(int) EQ]) (lab));
1842 emit_label (lab);
1846 /* If this expression is part of a ({...}) and is in memory, we may have
1847 to preserve temporaries. */
1848 preserve_temp_slots (last_expr_value);
1850 /* Free any temporaries used to evaluate this expression. Any temporary
1851 used as a result of this expression will already have been preserved
1852 above. */
1853 free_temp_slots ();
1855 emit_queue ();
1858 /* Warn if EXP contains any computations whose results are not used.
1859 Return 1 if a warning is printed; 0 otherwise. */
1862 warn_if_unused_value (exp)
1863 tree exp;
1865 if (TREE_USED (exp))
1866 return 0;
1868 switch (TREE_CODE (exp))
1870 case PREINCREMENT_EXPR:
1871 case POSTINCREMENT_EXPR:
1872 case PREDECREMENT_EXPR:
1873 case POSTDECREMENT_EXPR:
1874 case MODIFY_EXPR:
1875 case INIT_EXPR:
1876 case TARGET_EXPR:
1877 case CALL_EXPR:
1878 case METHOD_CALL_EXPR:
1879 case RTL_EXPR:
1880 case WITH_CLEANUP_EXPR:
1881 case EXIT_EXPR:
1882 /* We don't warn about COND_EXPR because it may be a useful
1883 construct if either arm contains a side effect. */
1884 case COND_EXPR:
1885 return 0;
1887 case BIND_EXPR:
1888 /* For a binding, warn if no side effect within it. */
1889 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1891 case SAVE_EXPR:
1892 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1894 case TRUTH_ORIF_EXPR:
1895 case TRUTH_ANDIF_EXPR:
1896 /* In && or ||, warn if 2nd operand has no side effect. */
1897 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1899 case COMPOUND_EXPR:
1900 if (TREE_NO_UNUSED_WARNING (exp))
1901 return 0;
1902 if (warn_if_unused_value (TREE_OPERAND (exp, 0)))
1903 return 1;
1904 /* Let people do `(foo (), 0)' without a warning. */
1905 if (TREE_CONSTANT (TREE_OPERAND (exp, 1)))
1906 return 0;
1907 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1909 case NOP_EXPR:
1910 case CONVERT_EXPR:
1911 case NON_LVALUE_EXPR:
1912 /* Don't warn about values cast to void. */
1913 if (TREE_TYPE (exp) == void_type_node)
1914 return 0;
1915 /* Don't warn about conversions not explicit in the user's program. */
1916 if (TREE_NO_UNUSED_WARNING (exp))
1917 return 0;
1918 /* Assignment to a cast usually results in a cast of a modify.
1919 Don't complain about that. There can be an arbitrary number of
1920 casts before the modify, so we must loop until we find the first
1921 non-cast expression and then test to see if that is a modify. */
1923 tree tem = TREE_OPERAND (exp, 0);
1925 while (TREE_CODE (tem) == CONVERT_EXPR || TREE_CODE (tem) == NOP_EXPR)
1926 tem = TREE_OPERAND (tem, 0);
1928 if (TREE_CODE (tem) == MODIFY_EXPR || TREE_CODE (tem) == INIT_EXPR
1929 || TREE_CODE (tem) == CALL_EXPR)
1930 return 0;
1932 goto warn;
1934 case INDIRECT_REF:
1935 /* Don't warn about automatic dereferencing of references, since
1936 the user cannot control it. */
1937 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == REFERENCE_TYPE)
1938 return warn_if_unused_value (TREE_OPERAND (exp, 0));
1939 /* ... fall through ... */
1941 default:
1942 /* Referencing a volatile value is a side effect, so don't warn. */
1943 if ((TREE_CODE_CLASS (TREE_CODE (exp)) == 'd'
1944 || TREE_CODE_CLASS (TREE_CODE (exp)) == 'r')
1945 && TREE_THIS_VOLATILE (exp))
1946 return 0;
1947 warn:
1948 warning_with_file_and_line (emit_filename, emit_lineno,
1949 "value computed is not used");
1950 return 1;
1954 /* Clear out the memory of the last expression evaluated. */
1956 void
1957 clear_last_expr ()
1959 last_expr_type = 0;
1962 /* Begin a statement which will return a value.
1963 Return the RTL_EXPR for this statement expr.
1964 The caller must save that value and pass it to expand_end_stmt_expr. */
1966 tree
1967 expand_start_stmt_expr ()
1969 int momentary;
1970 tree t;
1972 /* When generating bytecode just note down the stack depth */
1973 if (output_bytecode)
1974 return (build_int_2 (stack_depth, 0));
1976 /* Make the RTL_EXPR node temporary, not momentary,
1977 so that rtl_expr_chain doesn't become garbage. */
1978 momentary = suspend_momentary ();
1979 t = make_node (RTL_EXPR);
1980 resume_momentary (momentary);
1981 do_pending_stack_adjust ();
1982 start_sequence_for_rtl_expr (t);
1983 NO_DEFER_POP;
1984 expr_stmts_for_value++;
1985 return t;
1988 /* Restore the previous state at the end of a statement that returns a value.
1989 Returns a tree node representing the statement's value and the
1990 insns to compute the value.
1992 The nodes of that expression have been freed by now, so we cannot use them.
1993 But we don't want to do that anyway; the expression has already been
1994 evaluated and now we just want to use the value. So generate a RTL_EXPR
1995 with the proper type and RTL value.
1997 If the last substatement was not an expression,
1998 return something with type `void'. */
2000 tree
2001 expand_end_stmt_expr (t)
2002 tree t;
2004 if (output_bytecode)
2006 int i;
2007 tree t;
2010 /* At this point, all expressions have been evaluated in order.
2011 However, all expression values have been popped when evaluated,
2012 which means we have to recover the last expression value. This is
2013 the last value removed by means of a `drop' instruction. Instead
2014 of adding code to inhibit dropping the last expression value, it
2015 is here recovered by undoing the `drop'. Since `drop' is
2016 equivalent to `adjustackSI [1]', it can be undone with `adjstackSI
2017 [-1]'. */
2019 bc_adjust_stack (-1);
2021 if (!last_expr_type)
2022 last_expr_type = void_type_node;
2024 t = make_node (RTL_EXPR);
2025 TREE_TYPE (t) = last_expr_type;
2026 RTL_EXPR_RTL (t) = NULL;
2027 RTL_EXPR_SEQUENCE (t) = NULL;
2029 /* Don't consider deleting this expr or containing exprs at tree level. */
2030 TREE_THIS_VOLATILE (t) = 1;
2032 last_expr_type = 0;
2033 return t;
2036 OK_DEFER_POP;
2038 if (last_expr_type == 0)
2040 last_expr_type = void_type_node;
2041 last_expr_value = const0_rtx;
2043 else if (last_expr_value == 0)
2044 /* There are some cases where this can happen, such as when the
2045 statement is void type. */
2046 last_expr_value = const0_rtx;
2047 else if (GET_CODE (last_expr_value) != REG && ! CONSTANT_P (last_expr_value))
2048 /* Remove any possible QUEUED. */
2049 last_expr_value = protect_from_queue (last_expr_value, 0);
2051 emit_queue ();
2053 TREE_TYPE (t) = last_expr_type;
2054 RTL_EXPR_RTL (t) = last_expr_value;
2055 RTL_EXPR_SEQUENCE (t) = get_insns ();
2057 rtl_expr_chain = tree_cons (NULL_TREE, t, rtl_expr_chain);
2059 end_sequence ();
2061 /* Don't consider deleting this expr or containing exprs at tree level. */
2062 TREE_SIDE_EFFECTS (t) = 1;
2063 /* Propagate volatility of the actual RTL expr. */
2064 TREE_THIS_VOLATILE (t) = volatile_refs_p (last_expr_value);
2066 last_expr_type = 0;
2067 expr_stmts_for_value--;
2069 return t;
2072 /* Generate RTL for the start of an if-then. COND is the expression
2073 whose truth should be tested.
2075 If EXITFLAG is nonzero, this conditional is visible to
2076 `exit_something'. */
2078 void
2079 expand_start_cond (cond, exitflag)
2080 tree cond;
2081 int exitflag;
2083 struct nesting *thiscond = ALLOC_NESTING ();
2085 /* Make an entry on cond_stack for the cond we are entering. */
2087 thiscond->next = cond_stack;
2088 thiscond->all = nesting_stack;
2089 thiscond->depth = ++nesting_depth;
2090 thiscond->data.cond.next_label = gen_label_rtx ();
2091 /* Before we encounter an `else', we don't need a separate exit label
2092 unless there are supposed to be exit statements
2093 to exit this conditional. */
2094 thiscond->exit_label = exitflag ? gen_label_rtx () : 0;
2095 thiscond->data.cond.endif_label = thiscond->exit_label;
2096 cond_stack = thiscond;
2097 nesting_stack = thiscond;
2099 if (output_bytecode)
2100 bc_expand_start_cond (cond, exitflag);
2101 else
2102 do_jump (cond, thiscond->data.cond.next_label, NULL_RTX);
2105 /* Generate RTL between then-clause and the elseif-clause
2106 of an if-then-elseif-.... */
2108 void
2109 expand_start_elseif (cond)
2110 tree cond;
2112 if (cond_stack->data.cond.endif_label == 0)
2113 cond_stack->data.cond.endif_label = gen_label_rtx ();
2114 emit_jump (cond_stack->data.cond.endif_label);
2115 emit_label (cond_stack->data.cond.next_label);
2116 cond_stack->data.cond.next_label = gen_label_rtx ();
2117 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
2120 /* Generate RTL between the then-clause and the else-clause
2121 of an if-then-else. */
2123 void
2124 expand_start_else ()
2126 if (cond_stack->data.cond.endif_label == 0)
2127 cond_stack->data.cond.endif_label = gen_label_rtx ();
2129 if (output_bytecode)
2131 bc_expand_start_else ();
2132 return;
2135 emit_jump (cond_stack->data.cond.endif_label);
2136 emit_label (cond_stack->data.cond.next_label);
2137 cond_stack->data.cond.next_label = 0; /* No more _else or _elseif calls. */
2140 /* After calling expand_start_else, turn this "else" into an "else if"
2141 by providing another condition. */
2143 void
2144 expand_elseif (cond)
2145 tree cond;
2147 cond_stack->data.cond.next_label = gen_label_rtx ();
2148 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
2151 /* Generate RTL for the end of an if-then.
2152 Pop the record for it off of cond_stack. */
2154 void
2155 expand_end_cond ()
2157 struct nesting *thiscond = cond_stack;
2159 if (output_bytecode)
2160 bc_expand_end_cond ();
2161 else
2163 do_pending_stack_adjust ();
2164 if (thiscond->data.cond.next_label)
2165 emit_label (thiscond->data.cond.next_label);
2166 if (thiscond->data.cond.endif_label)
2167 emit_label (thiscond->data.cond.endif_label);
2170 POPSTACK (cond_stack);
2171 last_expr_type = 0;
2175 /* Generate code for the start of an if-then. COND is the expression
2176 whose truth is to be tested; if EXITFLAG is nonzero this conditional
2177 is to be visible to exit_something. It is assumed that the caller
2178 has pushed the previous context on the cond stack. */
2180 static void
2181 bc_expand_start_cond (cond, exitflag)
2182 tree cond;
2183 int exitflag;
2185 struct nesting *thiscond = cond_stack;
2187 thiscond->data.case_stmt.nominal_type = cond;
2188 if (! exitflag)
2189 thiscond->exit_label = gen_label_rtx ();
2190 bc_expand_expr (cond);
2191 bc_emit_bytecode (xjumpifnot);
2192 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (thiscond->exit_label));
2194 #ifdef DEBUG_PRINT_CODE
2195 fputc ('\n', stderr);
2196 #endif
2199 /* Generate the label for the end of an if with
2200 no else- clause. */
2202 static void
2203 bc_expand_end_cond ()
2205 struct nesting *thiscond = cond_stack;
2207 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thiscond->exit_label));
2210 /* Generate code for the start of the else- clause of
2211 an if-then-else. */
2213 static void
2214 bc_expand_start_else ()
2216 struct nesting *thiscond = cond_stack;
2218 thiscond->data.cond.endif_label = thiscond->exit_label;
2219 thiscond->exit_label = gen_label_rtx ();
2220 bc_emit_bytecode (jump);
2221 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (thiscond->exit_label));
2223 #ifdef DEBUG_PRINT_CODE
2224 fputc ('\n', stderr);
2225 #endif
2227 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thiscond->data.cond.endif_label));
2230 /* Generate RTL for the start of a loop. EXIT_FLAG is nonzero if this
2231 loop should be exited by `exit_something'. This is a loop for which
2232 `expand_continue' will jump to the top of the loop.
2234 Make an entry on loop_stack to record the labels associated with
2235 this loop. */
2237 struct nesting *
2238 expand_start_loop (exit_flag)
2239 int exit_flag;
2241 register struct nesting *thisloop = ALLOC_NESTING ();
2243 /* Make an entry on loop_stack for the loop we are entering. */
2245 thisloop->next = loop_stack;
2246 thisloop->all = nesting_stack;
2247 thisloop->depth = ++nesting_depth;
2248 thisloop->data.loop.start_label = gen_label_rtx ();
2249 thisloop->data.loop.end_label = gen_label_rtx ();
2250 thisloop->data.loop.alt_end_label = 0;
2251 thisloop->data.loop.continue_label = thisloop->data.loop.start_label;
2252 thisloop->exit_label = exit_flag ? thisloop->data.loop.end_label : 0;
2253 loop_stack = thisloop;
2254 nesting_stack = thisloop;
2256 if (output_bytecode)
2258 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thisloop->data.loop.start_label));
2259 return thisloop;
2262 do_pending_stack_adjust ();
2263 emit_queue ();
2264 emit_note (NULL_PTR, NOTE_INSN_LOOP_BEG);
2265 emit_label (thisloop->data.loop.start_label);
2267 return thisloop;
2270 /* Like expand_start_loop but for a loop where the continuation point
2271 (for expand_continue_loop) will be specified explicitly. */
2273 struct nesting *
2274 expand_start_loop_continue_elsewhere (exit_flag)
2275 int exit_flag;
2277 struct nesting *thisloop = expand_start_loop (exit_flag);
2278 loop_stack->data.loop.continue_label = gen_label_rtx ();
2279 return thisloop;
2282 /* Specify the continuation point for a loop started with
2283 expand_start_loop_continue_elsewhere.
2284 Use this at the point in the code to which a continue statement
2285 should jump. */
2287 void
2288 expand_loop_continue_here ()
2290 if (output_bytecode)
2292 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (loop_stack->data.loop.continue_label));
2293 return;
2295 do_pending_stack_adjust ();
2296 emit_note (NULL_PTR, NOTE_INSN_LOOP_CONT);
2297 emit_label (loop_stack->data.loop.continue_label);
2300 /* End a loop. */
2302 static void
2303 bc_expand_end_loop ()
2305 struct nesting *thisloop = loop_stack;
2307 bc_emit_bytecode (jump);
2308 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (thisloop->data.loop.start_label));
2310 #ifdef DEBUG_PRINT_CODE
2311 fputc ('\n', stderr);
2312 #endif
2314 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thisloop->exit_label));
2315 POPSTACK (loop_stack);
2316 last_expr_type = 0;
2320 /* Finish a loop. Generate a jump back to the top and the loop-exit label.
2321 Pop the block off of loop_stack. */
2323 void
2324 expand_end_loop ()
2326 register rtx insn;
2327 register rtx start_label;
2328 rtx last_test_insn = 0;
2329 int num_insns = 0;
2331 if (output_bytecode)
2333 bc_expand_end_loop ();
2334 return;
2337 insn = get_last_insn ();
2338 start_label = loop_stack->data.loop.start_label;
2340 /* Mark the continue-point at the top of the loop if none elsewhere. */
2341 if (start_label == loop_stack->data.loop.continue_label)
2342 emit_note_before (NOTE_INSN_LOOP_CONT, start_label);
2344 do_pending_stack_adjust ();
2346 /* If optimizing, perhaps reorder the loop. If the loop
2347 starts with a conditional exit, roll that to the end
2348 where it will optimize together with the jump back.
2350 We look for the last conditional branch to the exit that we encounter
2351 before hitting 30 insns or a CALL_INSN. If we see an unconditional
2352 branch to the exit first, use it.
2354 We must also stop at NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes
2355 because moving them is not valid. */
2357 if (optimize
2359 ! (GET_CODE (insn) == JUMP_INSN
2360 && GET_CODE (PATTERN (insn)) == SET
2361 && SET_DEST (PATTERN (insn)) == pc_rtx
2362 && GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE))
2364 /* Scan insns from the top of the loop looking for a qualified
2365 conditional exit. */
2366 for (insn = NEXT_INSN (loop_stack->data.loop.start_label); insn;
2367 insn = NEXT_INSN (insn))
2369 if (GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == CODE_LABEL)
2370 break;
2372 if (GET_CODE (insn) == NOTE
2373 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2374 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
2375 break;
2377 if (GET_CODE (insn) == JUMP_INSN || GET_CODE (insn) == INSN)
2378 num_insns++;
2380 if (last_test_insn && num_insns > 30)
2381 break;
2383 if (GET_CODE (insn) == JUMP_INSN && GET_CODE (PATTERN (insn)) == SET
2384 && SET_DEST (PATTERN (insn)) == pc_rtx
2385 && GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE
2386 && ((GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 1)) == LABEL_REF
2387 && ((XEXP (XEXP (SET_SRC (PATTERN (insn)), 1), 0)
2388 == loop_stack->data.loop.end_label)
2389 || (XEXP (XEXP (SET_SRC (PATTERN (insn)), 1), 0)
2390 == loop_stack->data.loop.alt_end_label)))
2391 || (GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 2)) == LABEL_REF
2392 && ((XEXP (XEXP (SET_SRC (PATTERN (insn)), 2), 0)
2393 == loop_stack->data.loop.end_label)
2394 || (XEXP (XEXP (SET_SRC (PATTERN (insn)), 2), 0)
2395 == loop_stack->data.loop.alt_end_label)))))
2396 last_test_insn = insn;
2398 if (last_test_insn == 0 && GET_CODE (insn) == JUMP_INSN
2399 && GET_CODE (PATTERN (insn)) == SET
2400 && SET_DEST (PATTERN (insn)) == pc_rtx
2401 && GET_CODE (SET_SRC (PATTERN (insn))) == LABEL_REF
2402 && ((XEXP (SET_SRC (PATTERN (insn)), 0)
2403 == loop_stack->data.loop.end_label)
2404 || (XEXP (SET_SRC (PATTERN (insn)), 0)
2405 == loop_stack->data.loop.alt_end_label)))
2406 /* Include BARRIER. */
2407 last_test_insn = NEXT_INSN (insn);
2410 if (last_test_insn != 0 && last_test_insn != get_last_insn ())
2412 /* We found one. Move everything from there up
2413 to the end of the loop, and add a jump into the loop
2414 to jump to there. */
2415 register rtx newstart_label = gen_label_rtx ();
2416 register rtx start_move = start_label;
2418 /* If the start label is preceded by a NOTE_INSN_LOOP_CONT note,
2419 then we want to move this note also. */
2420 if (GET_CODE (PREV_INSN (start_move)) == NOTE
2421 && (NOTE_LINE_NUMBER (PREV_INSN (start_move))
2422 == NOTE_INSN_LOOP_CONT))
2423 start_move = PREV_INSN (start_move);
2425 emit_label_after (newstart_label, PREV_INSN (start_move));
2426 reorder_insns (start_move, last_test_insn, get_last_insn ());
2427 emit_jump_insn_after (gen_jump (start_label),
2428 PREV_INSN (newstart_label));
2429 emit_barrier_after (PREV_INSN (newstart_label));
2430 start_label = newstart_label;
2434 emit_jump (start_label);
2435 emit_note (NULL_PTR, NOTE_INSN_LOOP_END);
2436 emit_label (loop_stack->data.loop.end_label);
2438 POPSTACK (loop_stack);
2440 last_expr_type = 0;
2443 /* Generate a jump to the current loop's continue-point.
2444 This is usually the top of the loop, but may be specified
2445 explicitly elsewhere. If not currently inside a loop,
2446 return 0 and do nothing; caller will print an error message. */
2449 expand_continue_loop (whichloop)
2450 struct nesting *whichloop;
2452 last_expr_type = 0;
2453 if (whichloop == 0)
2454 whichloop = loop_stack;
2455 if (whichloop == 0)
2456 return 0;
2457 expand_goto_internal (NULL_TREE, whichloop->data.loop.continue_label,
2458 NULL_RTX);
2459 return 1;
2462 /* Generate a jump to exit the current loop. If not currently inside a loop,
2463 return 0 and do nothing; caller will print an error message. */
2466 expand_exit_loop (whichloop)
2467 struct nesting *whichloop;
2469 last_expr_type = 0;
2470 if (whichloop == 0)
2471 whichloop = loop_stack;
2472 if (whichloop == 0)
2473 return 0;
2474 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label, NULL_RTX);
2475 return 1;
2478 /* Generate a conditional jump to exit the current loop if COND
2479 evaluates to zero. If not currently inside a loop,
2480 return 0 and do nothing; caller will print an error message. */
2483 expand_exit_loop_if_false (whichloop, cond)
2484 struct nesting *whichloop;
2485 tree cond;
2487 last_expr_type = 0;
2488 if (whichloop == 0)
2489 whichloop = loop_stack;
2490 if (whichloop == 0)
2491 return 0;
2492 if (output_bytecode)
2494 bc_expand_expr (cond);
2495 bc_expand_goto_internal (xjumpifnot,
2496 BYTECODE_BC_LABEL (whichloop->exit_label),
2497 NULL_TREE);
2499 else
2501 /* In order to handle fixups, we actually create a conditional jump
2502 around a unconditional branch to exit the loop. If fixups are
2503 necessary, they go before the unconditional branch. */
2505 rtx label = gen_label_rtx ();
2506 rtx last_insn;
2508 do_jump (cond, NULL_RTX, label);
2509 last_insn = get_last_insn ();
2510 if (GET_CODE (last_insn) == CODE_LABEL)
2511 whichloop->data.loop.alt_end_label = last_insn;
2512 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label,
2513 NULL_RTX);
2514 emit_label (label);
2517 return 1;
2520 /* Return non-zero if we should preserve sub-expressions as separate
2521 pseudos. We never do so if we aren't optimizing. We always do so
2522 if -fexpensive-optimizations.
2524 Otherwise, we only do so if we are in the "early" part of a loop. I.e.,
2525 the loop may still be a small one. */
2528 preserve_subexpressions_p ()
2530 rtx insn;
2532 if (flag_expensive_optimizations)
2533 return 1;
2535 if (optimize == 0 || loop_stack == 0)
2536 return 0;
2538 insn = get_last_insn_anywhere ();
2540 return (insn
2541 && (INSN_UID (insn) - INSN_UID (loop_stack->data.loop.start_label)
2542 < n_non_fixed_regs * 3));
2546 /* Generate a jump to exit the current loop, conditional, binding contour
2547 or case statement. Not all such constructs are visible to this function,
2548 only those started with EXIT_FLAG nonzero. Individual languages use
2549 the EXIT_FLAG parameter to control which kinds of constructs you can
2550 exit this way.
2552 If not currently inside anything that can be exited,
2553 return 0 and do nothing; caller will print an error message. */
2556 expand_exit_something ()
2558 struct nesting *n;
2559 last_expr_type = 0;
2560 for (n = nesting_stack; n; n = n->all)
2561 if (n->exit_label != 0)
2563 expand_goto_internal (NULL_TREE, n->exit_label, NULL_RTX);
2564 return 1;
2567 return 0;
2570 /* Generate RTL to return from the current function, with no value.
2571 (That is, we do not do anything about returning any value.) */
2573 void
2574 expand_null_return ()
2576 struct nesting *block = block_stack;
2577 rtx last_insn = 0;
2579 if (output_bytecode)
2581 bc_emit_instruction (ret);
2582 return;
2585 /* Does any pending block have cleanups? */
2587 while (block && block->data.block.cleanups == 0)
2588 block = block->next;
2590 /* If yes, use a goto to return, since that runs cleanups. */
2592 expand_null_return_1 (last_insn, block != 0);
2595 /* Generate RTL to return from the current function, with value VAL. */
2597 static void
2598 expand_value_return (val)
2599 rtx val;
2601 struct nesting *block = block_stack;
2602 rtx last_insn = get_last_insn ();
2603 rtx return_reg = DECL_RTL (DECL_RESULT (current_function_decl));
2605 /* Copy the value to the return location
2606 unless it's already there. */
2608 if (return_reg != val)
2610 #ifdef PROMOTE_FUNCTION_RETURN
2611 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
2612 int unsignedp = TREE_UNSIGNED (type);
2613 enum machine_mode mode
2614 = promote_mode (type, DECL_MODE (DECL_RESULT (current_function_decl)),
2615 &unsignedp, 1);
2617 if (GET_MODE (val) != VOIDmode && GET_MODE (val) != mode)
2618 convert_move (return_reg, val, unsignedp);
2619 else
2620 #endif
2621 emit_move_insn (return_reg, val);
2623 if (GET_CODE (return_reg) == REG
2624 && REGNO (return_reg) < FIRST_PSEUDO_REGISTER)
2625 emit_insn (gen_rtx (USE, VOIDmode, return_reg));
2626 /* Handle calls that return values in multiple non-contiguous locations.
2627 The Irix 6 ABI has examples of this. */
2628 else if (GET_CODE (return_reg) == PARALLEL)
2630 int i;
2632 for (i = 0; i < XVECLEN (return_reg, 0); i++)
2634 rtx x = XEXP (XVECEXP (return_reg, 0, i), 0);
2636 if (GET_CODE (x) == REG
2637 && REGNO (x) < FIRST_PSEUDO_REGISTER)
2638 emit_insn (gen_rtx (USE, VOIDmode, x));
2642 /* Does any pending block have cleanups? */
2644 while (block && block->data.block.cleanups == 0)
2645 block = block->next;
2647 /* If yes, use a goto to return, since that runs cleanups.
2648 Use LAST_INSN to put cleanups *before* the move insn emitted above. */
2650 expand_null_return_1 (last_insn, block != 0);
2653 /* Output a return with no value. If LAST_INSN is nonzero,
2654 pretend that the return takes place after LAST_INSN.
2655 If USE_GOTO is nonzero then don't use a return instruction;
2656 go to the return label instead. This causes any cleanups
2657 of pending blocks to be executed normally. */
2659 static void
2660 expand_null_return_1 (last_insn, use_goto)
2661 rtx last_insn;
2662 int use_goto;
2664 rtx end_label = cleanup_label ? cleanup_label : return_label;
2666 clear_pending_stack_adjust ();
2667 do_pending_stack_adjust ();
2668 last_expr_type = 0;
2670 /* PCC-struct return always uses an epilogue. */
2671 if (current_function_returns_pcc_struct || use_goto)
2673 if (end_label == 0)
2674 end_label = return_label = gen_label_rtx ();
2675 expand_goto_internal (NULL_TREE, end_label, last_insn);
2676 return;
2679 /* Otherwise output a simple return-insn if one is available,
2680 unless it won't do the job. */
2681 #ifdef HAVE_return
2682 if (HAVE_return && use_goto == 0 && cleanup_label == 0)
2684 emit_jump_insn (gen_return ());
2685 emit_barrier ();
2686 return;
2688 #endif
2690 /* Otherwise jump to the epilogue. */
2691 expand_goto_internal (NULL_TREE, end_label, last_insn);
2694 /* Generate RTL to evaluate the expression RETVAL and return it
2695 from the current function. */
2697 void
2698 expand_return (retval)
2699 tree retval;
2701 /* If there are any cleanups to be performed, then they will
2702 be inserted following LAST_INSN. It is desirable
2703 that the last_insn, for such purposes, should be the
2704 last insn before computing the return value. Otherwise, cleanups
2705 which call functions can clobber the return value. */
2706 /* ??? rms: I think that is erroneous, because in C++ it would
2707 run destructors on variables that might be used in the subsequent
2708 computation of the return value. */
2709 rtx last_insn = 0;
2710 register rtx val = 0;
2711 register rtx op0;
2712 tree retval_rhs;
2713 int cleanups;
2714 struct nesting *block;
2716 /* Bytecode returns are quite simple, just leave the result on the
2717 arithmetic stack. */
2718 if (output_bytecode)
2720 bc_expand_expr (retval);
2721 bc_emit_instruction (ret);
2722 return;
2725 /* If function wants no value, give it none. */
2726 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
2728 expand_expr (retval, NULL_RTX, VOIDmode, 0);
2729 emit_queue ();
2730 expand_null_return ();
2731 return;
2734 /* Are any cleanups needed? E.g. C++ destructors to be run? */
2735 /* This is not sufficient. We also need to watch for cleanups of the
2736 expression we are about to expand. Unfortunately, we cannot know
2737 if it has cleanups until we expand it, and we want to change how we
2738 expand it depending upon if we need cleanups. We can't win. */
2739 #if 0
2740 cleanups = any_pending_cleanups (1);
2741 #else
2742 cleanups = 1;
2743 #endif
2745 if (TREE_CODE (retval) == RESULT_DECL)
2746 retval_rhs = retval;
2747 else if ((TREE_CODE (retval) == MODIFY_EXPR || TREE_CODE (retval) == INIT_EXPR)
2748 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
2749 retval_rhs = TREE_OPERAND (retval, 1);
2750 else if (TREE_TYPE (retval) == void_type_node)
2751 /* Recognize tail-recursive call to void function. */
2752 retval_rhs = retval;
2753 else
2754 retval_rhs = NULL_TREE;
2756 /* Only use `last_insn' if there are cleanups which must be run. */
2757 if (cleanups || cleanup_label != 0)
2758 last_insn = get_last_insn ();
2760 /* Distribute return down conditional expr if either of the sides
2761 may involve tail recursion (see test below). This enhances the number
2762 of tail recursions we see. Don't do this always since it can produce
2763 sub-optimal code in some cases and we distribute assignments into
2764 conditional expressions when it would help. */
2766 if (optimize && retval_rhs != 0
2767 && frame_offset == 0
2768 && TREE_CODE (retval_rhs) == COND_EXPR
2769 && (TREE_CODE (TREE_OPERAND (retval_rhs, 1)) == CALL_EXPR
2770 || TREE_CODE (TREE_OPERAND (retval_rhs, 2)) == CALL_EXPR))
2772 rtx label = gen_label_rtx ();
2773 tree expr;
2775 do_jump (TREE_OPERAND (retval_rhs, 0), label, NULL_RTX);
2776 expr = build (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (current_function_decl)),
2777 DECL_RESULT (current_function_decl),
2778 TREE_OPERAND (retval_rhs, 1));
2779 TREE_SIDE_EFFECTS (expr) = 1;
2780 expand_return (expr);
2781 emit_label (label);
2783 expr = build (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (current_function_decl)),
2784 DECL_RESULT (current_function_decl),
2785 TREE_OPERAND (retval_rhs, 2));
2786 TREE_SIDE_EFFECTS (expr) = 1;
2787 expand_return (expr);
2788 return;
2791 /* For tail-recursive call to current function,
2792 just jump back to the beginning.
2793 It's unsafe if any auto variable in this function
2794 has its address taken; for simplicity,
2795 require stack frame to be empty. */
2796 if (optimize && retval_rhs != 0
2797 && frame_offset == 0
2798 && TREE_CODE (retval_rhs) == CALL_EXPR
2799 && TREE_CODE (TREE_OPERAND (retval_rhs, 0)) == ADDR_EXPR
2800 && TREE_OPERAND (TREE_OPERAND (retval_rhs, 0), 0) == current_function_decl
2801 /* Finish checking validity, and if valid emit code
2802 to set the argument variables for the new call. */
2803 && tail_recursion_args (TREE_OPERAND (retval_rhs, 1),
2804 DECL_ARGUMENTS (current_function_decl)))
2806 if (tail_recursion_label == 0)
2808 tail_recursion_label = gen_label_rtx ();
2809 emit_label_after (tail_recursion_label,
2810 tail_recursion_reentry);
2812 emit_queue ();
2813 expand_goto_internal (NULL_TREE, tail_recursion_label, last_insn);
2814 emit_barrier ();
2815 return;
2817 #ifdef HAVE_return
2818 /* This optimization is safe if there are local cleanups
2819 because expand_null_return takes care of them.
2820 ??? I think it should also be safe when there is a cleanup label,
2821 because expand_null_return takes care of them, too.
2822 Any reason why not? */
2823 if (HAVE_return && cleanup_label == 0
2824 && ! current_function_returns_pcc_struct
2825 && BRANCH_COST <= 1)
2827 /* If this is return x == y; then generate
2828 if (x == y) return 1; else return 0;
2829 if we can do it with explicit return insns and branches are cheap,
2830 but not if we have the corresponding scc insn. */
2831 int has_scc = 0;
2832 if (retval_rhs)
2833 switch (TREE_CODE (retval_rhs))
2835 case EQ_EXPR:
2836 #ifdef HAVE_seq
2837 has_scc = HAVE_seq;
2838 #endif
2839 case NE_EXPR:
2840 #ifdef HAVE_sne
2841 has_scc = HAVE_sne;
2842 #endif
2843 case GT_EXPR:
2844 #ifdef HAVE_sgt
2845 has_scc = HAVE_sgt;
2846 #endif
2847 case GE_EXPR:
2848 #ifdef HAVE_sge
2849 has_scc = HAVE_sge;
2850 #endif
2851 case LT_EXPR:
2852 #ifdef HAVE_slt
2853 has_scc = HAVE_slt;
2854 #endif
2855 case LE_EXPR:
2856 #ifdef HAVE_sle
2857 has_scc = HAVE_sle;
2858 #endif
2859 case TRUTH_ANDIF_EXPR:
2860 case TRUTH_ORIF_EXPR:
2861 case TRUTH_AND_EXPR:
2862 case TRUTH_OR_EXPR:
2863 case TRUTH_NOT_EXPR:
2864 case TRUTH_XOR_EXPR:
2865 if (! has_scc)
2867 op0 = gen_label_rtx ();
2868 jumpifnot (retval_rhs, op0);
2869 expand_value_return (const1_rtx);
2870 emit_label (op0);
2871 expand_value_return (const0_rtx);
2872 return;
2876 #endif /* HAVE_return */
2878 /* If the result is an aggregate that is being returned in one (or more)
2879 registers, load the registers here. The compiler currently can't handle
2880 copying a BLKmode value into registers. We could put this code in a
2881 more general area (for use by everyone instead of just function
2882 call/return), but until this feature is generally usable it is kept here
2883 (and in expand_call). The value must go into a pseudo in case there
2884 are cleanups that will clobber the real return register. */
2886 if (retval_rhs != 0
2887 && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
2888 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG)
2890 int i, bitpos, xbitpos;
2891 int big_endian_correction = 0;
2892 int bytes = int_size_in_bytes (TREE_TYPE (retval_rhs));
2893 int n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
2894 int bitsize = MIN (TYPE_ALIGN (TREE_TYPE (retval_rhs)),BITS_PER_WORD);
2895 rtx *result_pseudos = (rtx *) alloca (sizeof (rtx) * n_regs);
2896 rtx result_reg, src, dst;
2897 rtx result_val = expand_expr (retval_rhs, NULL_RTX, VOIDmode, 0);
2898 enum machine_mode tmpmode, result_reg_mode;
2900 /* Structures whose size is not a multiple of a word are aligned
2901 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2902 machine, this means we must skip the empty high order bytes when
2903 calculating the bit offset. */
2904 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2905 big_endian_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2906 * BITS_PER_UNIT));
2908 /* Copy the structure BITSIZE bits at a time. */
2909 for (bitpos = 0, xbitpos = big_endian_correction;
2910 bitpos < bytes * BITS_PER_UNIT;
2911 bitpos += bitsize, xbitpos += bitsize)
2913 /* We need a new destination pseudo each time xbitpos is
2914 on a word boundary and when xbitpos == big_endian_correction
2915 (the first time through). */
2916 if (xbitpos % BITS_PER_WORD == 0
2917 || xbitpos == big_endian_correction)
2919 /* Generate an appropriate register. */
2920 dst = gen_reg_rtx (word_mode);
2921 result_pseudos[xbitpos / BITS_PER_WORD] = dst;
2923 /* Clobber the destination before we move anything into it. */
2924 emit_insn (gen_rtx (CLOBBER, VOIDmode, dst));
2927 /* We need a new source operand each time bitpos is on a word
2928 boundary. */
2929 if (bitpos % BITS_PER_WORD == 0)
2930 src = operand_subword_force (result_val,
2931 bitpos / BITS_PER_WORD,
2932 BLKmode);
2934 /* Use bitpos for the source extraction (left justified) and
2935 xbitpos for the destination store (right justified). */
2936 store_bit_field (dst, bitsize, xbitpos % BITS_PER_WORD, word_mode,
2937 extract_bit_field (src, bitsize,
2938 bitpos % BITS_PER_WORD, 1,
2939 NULL_RTX, word_mode,
2940 word_mode,
2941 bitsize / BITS_PER_UNIT,
2942 BITS_PER_WORD),
2943 bitsize / BITS_PER_UNIT, BITS_PER_WORD);
2946 /* Find the smallest integer mode large enough to hold the
2947 entire structure and use that mode instead of BLKmode
2948 on the USE insn for the return register. */
2949 bytes = int_size_in_bytes (TREE_TYPE (retval_rhs));
2950 for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2951 tmpmode != MAX_MACHINE_MODE;
2952 tmpmode = GET_MODE_WIDER_MODE (tmpmode))
2954 /* Have we found a large enough mode? */
2955 if (GET_MODE_SIZE (tmpmode) >= bytes)
2956 break;
2959 /* No suitable mode found. */
2960 if (tmpmode == MAX_MACHINE_MODE)
2961 abort ();
2963 PUT_MODE (DECL_RTL (DECL_RESULT (current_function_decl)), tmpmode);
2965 if (GET_MODE_SIZE (tmpmode) < GET_MODE_SIZE (word_mode))
2966 result_reg_mode = word_mode;
2967 else
2968 result_reg_mode = tmpmode;
2969 result_reg = gen_reg_rtx (result_reg_mode);
2971 emit_queue ();
2972 for (i = 0; i < n_regs; i++)
2973 emit_move_insn (operand_subword (result_reg, i, 0, result_reg_mode),
2974 result_pseudos[i]);
2976 if (tmpmode != result_reg_mode)
2977 result_reg = gen_lowpart (tmpmode, result_reg);
2979 expand_value_return (result_reg);
2981 else if (cleanups
2982 && retval_rhs != 0
2983 && TREE_TYPE (retval_rhs) != void_type_node
2984 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG)
2986 /* Calculate the return value into a pseudo reg. */
2987 val = gen_reg_rtx (DECL_MODE (DECL_RESULT (current_function_decl)));
2988 val = expand_expr (retval_rhs, val, GET_MODE (val), 0);
2989 val = force_not_mem (val);
2990 emit_queue ();
2991 /* Return the calculated value, doing cleanups first. */
2992 expand_value_return (val);
2994 else
2996 /* No cleanups or no hard reg used;
2997 calculate value into hard return reg. */
2998 expand_expr (retval, const0_rtx, VOIDmode, 0);
2999 emit_queue ();
3000 expand_value_return (DECL_RTL (DECL_RESULT (current_function_decl)));
3004 /* Return 1 if the end of the generated RTX is not a barrier.
3005 This means code already compiled can drop through. */
3008 drop_through_at_end_p ()
3010 rtx insn = get_last_insn ();
3011 while (insn && GET_CODE (insn) == NOTE)
3012 insn = PREV_INSN (insn);
3013 return insn && GET_CODE (insn) != BARRIER;
3016 /* Emit code to alter this function's formal parms for a tail-recursive call.
3017 ACTUALS is a list of actual parameter expressions (chain of TREE_LISTs).
3018 FORMALS is the chain of decls of formals.
3019 Return 1 if this can be done;
3020 otherwise return 0 and do not emit any code. */
3022 static int
3023 tail_recursion_args (actuals, formals)
3024 tree actuals, formals;
3026 register tree a = actuals, f = formals;
3027 register int i;
3028 register rtx *argvec;
3030 /* Check that number and types of actuals are compatible
3031 with the formals. This is not always true in valid C code.
3032 Also check that no formal needs to be addressable
3033 and that all formals are scalars. */
3035 /* Also count the args. */
3037 for (a = actuals, f = formals, i = 0; a && f; a = TREE_CHAIN (a), f = TREE_CHAIN (f), i++)
3039 if (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_VALUE (a)))
3040 != TYPE_MAIN_VARIANT (TREE_TYPE (f)))
3041 return 0;
3042 if (GET_CODE (DECL_RTL (f)) != REG || DECL_MODE (f) == BLKmode)
3043 return 0;
3045 if (a != 0 || f != 0)
3046 return 0;
3048 /* Compute all the actuals. */
3050 argvec = (rtx *) alloca (i * sizeof (rtx));
3052 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
3053 argvec[i] = expand_expr (TREE_VALUE (a), NULL_RTX, VOIDmode, 0);
3055 /* Find which actual values refer to current values of previous formals.
3056 Copy each of them now, before any formal is changed. */
3058 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
3060 int copy = 0;
3061 register int j;
3062 for (f = formals, j = 0; j < i; f = TREE_CHAIN (f), j++)
3063 if (reg_mentioned_p (DECL_RTL (f), argvec[i]))
3064 { copy = 1; break; }
3065 if (copy)
3066 argvec[i] = copy_to_reg (argvec[i]);
3069 /* Store the values of the actuals into the formals. */
3071 for (f = formals, a = actuals, i = 0; f;
3072 f = TREE_CHAIN (f), a = TREE_CHAIN (a), i++)
3074 if (GET_MODE (DECL_RTL (f)) == GET_MODE (argvec[i]))
3075 emit_move_insn (DECL_RTL (f), argvec[i]);
3076 else
3077 convert_move (DECL_RTL (f), argvec[i],
3078 TREE_UNSIGNED (TREE_TYPE (TREE_VALUE (a))));
3081 free_temp_slots ();
3082 return 1;
3085 /* Generate the RTL code for entering a binding contour.
3086 The variables are declared one by one, by calls to `expand_decl'.
3088 EXIT_FLAG is nonzero if this construct should be visible to
3089 `exit_something'. */
3091 void
3092 expand_start_bindings (exit_flag)
3093 int exit_flag;
3095 struct nesting *thisblock = ALLOC_NESTING ();
3096 rtx note = output_bytecode ? 0 : emit_note (NULL_PTR, NOTE_INSN_BLOCK_BEG);
3098 /* Make an entry on block_stack for the block we are entering. */
3100 thisblock->next = block_stack;
3101 thisblock->all = nesting_stack;
3102 thisblock->depth = ++nesting_depth;
3103 thisblock->data.block.stack_level = 0;
3104 thisblock->data.block.cleanups = 0;
3105 thisblock->data.block.function_call_count = 0;
3106 thisblock->data.block.exception_region = 0;
3107 thisblock->data.block.target_temp_slot_level = target_temp_slot_level;
3109 thisblock->data.block.conditional_code = 0;
3110 thisblock->data.block.last_unconditional_cleanup = note;
3111 thisblock->data.block.cleanup_ptr = &thisblock->data.block.cleanups;
3113 if (block_stack
3114 && !(block_stack->data.block.cleanups == NULL_TREE
3115 && block_stack->data.block.outer_cleanups == NULL_TREE))
3116 thisblock->data.block.outer_cleanups
3117 = tree_cons (NULL_TREE, block_stack->data.block.cleanups,
3118 block_stack->data.block.outer_cleanups);
3119 else
3120 thisblock->data.block.outer_cleanups = 0;
3121 thisblock->data.block.label_chain = 0;
3122 thisblock->data.block.innermost_stack_block = stack_block_stack;
3123 thisblock->data.block.first_insn = note;
3124 thisblock->data.block.block_start_count = ++block_start_count;
3125 thisblock->exit_label = exit_flag ? gen_label_rtx () : 0;
3126 block_stack = thisblock;
3127 nesting_stack = thisblock;
3129 if (!output_bytecode)
3131 /* Make a new level for allocating stack slots. */
3132 push_temp_slots ();
3136 /* Specify the scope of temporaries created by TARGET_EXPRs. Similar
3137 to CLEANUP_POINT_EXPR, but handles cases when a series of calls to
3138 expand_expr are made. After we end the region, we know that all
3139 space for all temporaries that were created by TARGET_EXPRs will be
3140 destroyed and their space freed for reuse. */
3142 void
3143 expand_start_target_temps ()
3145 /* This is so that even if the result is preserved, the space
3146 allocated will be freed, as we know that it is no longer in use. */
3147 push_temp_slots ();
3149 /* Start a new binding layer that will keep track of all cleanup
3150 actions to be performed. */
3151 expand_start_bindings (0);
3153 target_temp_slot_level = temp_slot_level;
3156 void
3157 expand_end_target_temps ()
3159 expand_end_bindings (NULL_TREE, 0, 0);
3161 /* This is so that even if the result is preserved, the space
3162 allocated will be freed, as we know that it is no longer in use. */
3163 pop_temp_slots ();
3166 /* Mark top block of block_stack as an implicit binding for an
3167 exception region. This is used to prevent infinite recursion when
3168 ending a binding with expand_end_bindings. It is only ever called
3169 by expand_eh_region_start, as that it the only way to create a
3170 block stack for a exception region. */
3172 void
3173 mark_block_as_eh_region ()
3175 block_stack->data.block.exception_region = 1;
3176 if (block_stack->next
3177 && block_stack->next->data.block.conditional_code)
3179 block_stack->data.block.conditional_code
3180 = block_stack->next->data.block.conditional_code;
3181 block_stack->data.block.last_unconditional_cleanup
3182 = block_stack->next->data.block.last_unconditional_cleanup;
3183 block_stack->data.block.cleanup_ptr
3184 = block_stack->next->data.block.cleanup_ptr;
3188 /* True if we are currently emitting insns in an area of output code
3189 that is controlled by a conditional expression. This is used by
3190 the cleanup handling code to generate conditional cleanup actions. */
3193 conditional_context ()
3195 return block_stack && block_stack->data.block.conditional_code;
3198 /* Mark top block of block_stack as not for an implicit binding for an
3199 exception region. This is only ever done by expand_eh_region_end
3200 to let expand_end_bindings know that it is being called explicitly
3201 to end the binding layer for just the binding layer associated with
3202 the exception region, otherwise expand_end_bindings would try and
3203 end all implicit binding layers for exceptions regions, and then
3204 one normal binding layer. */
3206 void
3207 mark_block_as_not_eh_region ()
3209 block_stack->data.block.exception_region = 0;
3212 /* True if the top block of block_stack was marked as for an exception
3213 region by mark_block_as_eh_region. */
3216 is_eh_region ()
3218 return block_stack && block_stack->data.block.exception_region;
3221 /* Given a pointer to a BLOCK node, save a pointer to the most recently
3222 generated NOTE_INSN_BLOCK_END in the BLOCK_END_NOTE field of the given
3223 BLOCK node. */
3225 void
3226 remember_end_note (block)
3227 register tree block;
3229 BLOCK_END_NOTE (block) = last_block_end_note;
3230 last_block_end_note = NULL_RTX;
3233 /* Generate RTL code to terminate a binding contour.
3234 VARS is the chain of VAR_DECL nodes
3235 for the variables bound in this contour.
3236 MARK_ENDS is nonzero if we should put a note at the beginning
3237 and end of this binding contour.
3239 DONT_JUMP_IN is nonzero if it is not valid to jump into this contour.
3240 (That is true automatically if the contour has a saved stack level.) */
3242 void
3243 expand_end_bindings (vars, mark_ends, dont_jump_in)
3244 tree vars;
3245 int mark_ends;
3246 int dont_jump_in;
3248 register struct nesting *thisblock;
3249 register tree decl;
3251 while (block_stack->data.block.exception_region)
3253 /* Because we don't need or want a new temporary level and
3254 because we didn't create one in expand_eh_region_start,
3255 create a fake one now to avoid removing one in
3256 expand_end_bindings. */
3257 push_temp_slots ();
3259 block_stack->data.block.exception_region = 0;
3261 expand_end_bindings (NULL_TREE, 0, 0);
3264 if (output_bytecode)
3266 bc_expand_end_bindings (vars, mark_ends, dont_jump_in);
3267 return;
3270 /* Since expand_eh_region_start does an expand_start_bindings, we
3271 have to first end all the bindings that were created by
3272 expand_eh_region_start. */
3274 thisblock = block_stack;
3276 if (warn_unused)
3277 for (decl = vars; decl; decl = TREE_CHAIN (decl))
3278 if (! TREE_USED (decl) && TREE_CODE (decl) == VAR_DECL
3279 && ! DECL_IN_SYSTEM_HEADER (decl))
3280 warning_with_decl (decl, "unused variable `%s'");
3282 if (thisblock->exit_label)
3284 do_pending_stack_adjust ();
3285 emit_label (thisblock->exit_label);
3288 /* If necessary, make a handler for nonlocal gotos taking
3289 place in the function calls in this block. */
3290 if (function_call_count != thisblock->data.block.function_call_count
3291 && nonlocal_labels
3292 /* Make handler for outermost block
3293 if there were any nonlocal gotos to this function. */
3294 && (thisblock->next == 0 ? current_function_has_nonlocal_label
3295 /* Make handler for inner block if it has something
3296 special to do when you jump out of it. */
3297 : (thisblock->data.block.cleanups != 0
3298 || thisblock->data.block.stack_level != 0)))
3300 tree link;
3301 rtx afterward = gen_label_rtx ();
3302 rtx handler_label = gen_label_rtx ();
3303 rtx save_receiver = gen_reg_rtx (Pmode);
3304 rtx insns;
3306 /* Don't let jump_optimize delete the handler. */
3307 LABEL_PRESERVE_P (handler_label) = 1;
3309 /* Record the handler address in the stack slot for that purpose,
3310 during this block, saving and restoring the outer value. */
3311 if (thisblock->next != 0)
3313 emit_move_insn (nonlocal_goto_handler_slot, save_receiver);
3315 start_sequence ();
3316 emit_move_insn (save_receiver, nonlocal_goto_handler_slot);
3317 insns = get_insns ();
3318 end_sequence ();
3319 emit_insns_before (insns, thisblock->data.block.first_insn);
3322 start_sequence ();
3323 emit_move_insn (nonlocal_goto_handler_slot,
3324 gen_rtx (LABEL_REF, Pmode, handler_label));
3325 insns = get_insns ();
3326 end_sequence ();
3327 emit_insns_before (insns, thisblock->data.block.first_insn);
3329 /* Jump around the handler; it runs only when specially invoked. */
3330 emit_jump (afterward);
3331 emit_label (handler_label);
3333 #ifdef HAVE_nonlocal_goto
3334 if (! HAVE_nonlocal_goto)
3335 #endif
3336 /* First adjust our frame pointer to its actual value. It was
3337 previously set to the start of the virtual area corresponding to
3338 the stacked variables when we branched here and now needs to be
3339 adjusted to the actual hardware fp value.
3341 Assignments are to virtual registers are converted by
3342 instantiate_virtual_regs into the corresponding assignment
3343 to the underlying register (fp in this case) that makes
3344 the original assignment true.
3345 So the following insn will actually be
3346 decrementing fp by STARTING_FRAME_OFFSET. */
3347 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
3349 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3350 if (fixed_regs[ARG_POINTER_REGNUM])
3352 #ifdef ELIMINABLE_REGS
3353 /* If the argument pointer can be eliminated in favor of the
3354 frame pointer, we don't need to restore it. We assume here
3355 that if such an elimination is present, it can always be used.
3356 This is the case on all known machines; if we don't make this
3357 assumption, we do unnecessary saving on many machines. */
3358 static struct elims {int from, to;} elim_regs[] = ELIMINABLE_REGS;
3359 int i;
3361 for (i = 0; i < sizeof elim_regs / sizeof elim_regs[0]; i++)
3362 if (elim_regs[i].from == ARG_POINTER_REGNUM
3363 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
3364 break;
3366 if (i == sizeof elim_regs / sizeof elim_regs [0])
3367 #endif
3369 /* Now restore our arg pointer from the address at which it
3370 was saved in our stack frame.
3371 If there hasn't be space allocated for it yet, make
3372 some now. */
3373 if (arg_pointer_save_area == 0)
3374 arg_pointer_save_area
3375 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
3376 emit_move_insn (virtual_incoming_args_rtx,
3377 /* We need a pseudo here, or else
3378 instantiate_virtual_regs_1 complains. */
3379 copy_to_reg (arg_pointer_save_area));
3382 #endif
3384 #ifdef HAVE_nonlocal_goto_receiver
3385 if (HAVE_nonlocal_goto_receiver)
3386 emit_insn (gen_nonlocal_goto_receiver ());
3387 #endif
3389 /* The handler expects the desired label address in the static chain
3390 register. It tests the address and does an appropriate jump
3391 to whatever label is desired. */
3392 for (link = nonlocal_labels; link; link = TREE_CHAIN (link))
3393 /* Skip any labels we shouldn't be able to jump to from here. */
3394 if (! DECL_TOO_LATE (TREE_VALUE (link)))
3396 rtx not_this = gen_label_rtx ();
3397 rtx this = gen_label_rtx ();
3398 do_jump_if_equal (static_chain_rtx,
3399 gen_rtx (LABEL_REF, Pmode, DECL_RTL (TREE_VALUE (link))),
3400 this, 0);
3401 emit_jump (not_this);
3402 emit_label (this);
3403 expand_goto (TREE_VALUE (link));
3404 emit_label (not_this);
3406 /* If label is not recognized, abort. */
3407 emit_library_call (gen_rtx (SYMBOL_REF, Pmode, "abort"), 0,
3408 VOIDmode, 0);
3409 emit_barrier ();
3410 emit_label (afterward);
3413 /* Don't allow jumping into a block that has cleanups or a stack level. */
3414 if (dont_jump_in
3415 || thisblock->data.block.stack_level != 0
3416 || thisblock->data.block.cleanups != 0)
3418 struct label_chain *chain;
3420 /* Any labels in this block are no longer valid to go to.
3421 Mark them to cause an error message. */
3422 for (chain = thisblock->data.block.label_chain; chain; chain = chain->next)
3424 DECL_TOO_LATE (chain->label) = 1;
3425 /* If any goto without a fixup came to this label,
3426 that must be an error, because gotos without fixups
3427 come from outside all saved stack-levels and all cleanups. */
3428 if (TREE_ADDRESSABLE (chain->label))
3429 error_with_decl (chain->label,
3430 "label `%s' used before containing binding contour");
3434 /* Restore stack level in effect before the block
3435 (only if variable-size objects allocated). */
3436 /* Perform any cleanups associated with the block. */
3438 if (thisblock->data.block.stack_level != 0
3439 || thisblock->data.block.cleanups != 0)
3441 /* Only clean up here if this point can actually be reached. */
3442 int reachable = GET_CODE (get_last_insn ()) != BARRIER;
3444 /* Don't let cleanups affect ({...}) constructs. */
3445 int old_expr_stmts_for_value = expr_stmts_for_value;
3446 rtx old_last_expr_value = last_expr_value;
3447 tree old_last_expr_type = last_expr_type;
3448 expr_stmts_for_value = 0;
3450 /* Do the cleanups. */
3451 expand_cleanups (thisblock->data.block.cleanups, NULL_TREE, 0, reachable);
3452 if (reachable)
3453 do_pending_stack_adjust ();
3455 expr_stmts_for_value = old_expr_stmts_for_value;
3456 last_expr_value = old_last_expr_value;
3457 last_expr_type = old_last_expr_type;
3459 /* Restore the stack level. */
3461 if (reachable && thisblock->data.block.stack_level != 0)
3463 emit_stack_restore (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3464 thisblock->data.block.stack_level, NULL_RTX);
3465 if (nonlocal_goto_handler_slot != 0)
3466 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level,
3467 NULL_RTX);
3470 /* Any gotos out of this block must also do these things.
3471 Also report any gotos with fixups that came to labels in this
3472 level. */
3473 fixup_gotos (thisblock,
3474 thisblock->data.block.stack_level,
3475 thisblock->data.block.cleanups,
3476 thisblock->data.block.first_insn,
3477 dont_jump_in);
3480 /* Mark the beginning and end of the scope if requested.
3481 We do this now, after running cleanups on the variables
3482 just going out of scope, so they are in scope for their cleanups. */
3484 if (mark_ends)
3485 last_block_end_note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_END);
3486 else
3487 /* Get rid of the beginning-mark if we don't make an end-mark. */
3488 NOTE_LINE_NUMBER (thisblock->data.block.first_insn) = NOTE_INSN_DELETED;
3490 /* If doing stupid register allocation, make sure lives of all
3491 register variables declared here extend thru end of scope. */
3493 if (obey_regdecls)
3494 for (decl = vars; decl; decl = TREE_CHAIN (decl))
3496 rtx rtl = DECL_RTL (decl);
3497 if (TREE_CODE (decl) == VAR_DECL && rtl != 0)
3498 use_variable (rtl);
3501 /* Restore the temporary level of TARGET_EXPRs. */
3502 target_temp_slot_level = thisblock->data.block.target_temp_slot_level;
3504 /* Restore block_stack level for containing block. */
3506 stack_block_stack = thisblock->data.block.innermost_stack_block;
3507 POPSTACK (block_stack);
3509 /* Pop the stack slot nesting and free any slots at this level. */
3510 pop_temp_slots ();
3514 /* End a binding contour.
3515 VARS is the chain of VAR_DECL nodes for the variables bound
3516 in this contour. MARK_ENDS is nonzer if we should put a note
3517 at the beginning and end of this binding contour.
3518 DONT_JUMP_IN is nonzero if it is not valid to jump into this
3519 contour. */
3521 static void
3522 bc_expand_end_bindings (vars, mark_ends, dont_jump_in)
3523 tree vars;
3524 int mark_ends;
3525 int dont_jump_in;
3527 struct nesting *thisbind = nesting_stack;
3528 tree decl;
3530 if (warn_unused)
3531 for (decl = vars; decl; decl = TREE_CHAIN (decl))
3532 if (! TREE_USED (TREE_VALUE (decl)) && TREE_CODE (TREE_VALUE (decl)) == VAR_DECL)
3533 warning_with_decl (decl, "unused variable `%s'");
3535 if (thisbind->exit_label)
3536 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thisbind->exit_label));
3538 /* Pop block/bindings off stack */
3539 POPSTACK (block_stack);
3542 /* Generate RTL for the automatic variable declaration DECL.
3543 (Other kinds of declarations are simply ignored if seen here.) */
3545 void
3546 expand_decl (decl)
3547 register tree decl;
3549 struct nesting *thisblock = block_stack;
3550 tree type;
3552 if (output_bytecode)
3554 bc_expand_decl (decl, 0);
3555 return;
3558 type = TREE_TYPE (decl);
3560 /* Only automatic variables need any expansion done.
3561 Static and external variables, and external functions,
3562 will be handled by `assemble_variable' (called from finish_decl).
3563 TYPE_DECL and CONST_DECL require nothing.
3564 PARM_DECLs are handled in `assign_parms'. */
3566 if (TREE_CODE (decl) != VAR_DECL)
3567 return;
3568 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
3569 return;
3571 /* Create the RTL representation for the variable. */
3573 if (type == error_mark_node)
3574 DECL_RTL (decl) = gen_rtx (MEM, BLKmode, const0_rtx);
3575 else if (DECL_SIZE (decl) == 0)
3576 /* Variable with incomplete type. */
3578 if (DECL_INITIAL (decl) == 0)
3579 /* Error message was already done; now avoid a crash. */
3580 DECL_RTL (decl) = assign_stack_temp (DECL_MODE (decl), 0, 1);
3581 else
3582 /* An initializer is going to decide the size of this array.
3583 Until we know the size, represent its address with a reg. */
3584 DECL_RTL (decl) = gen_rtx (MEM, BLKmode, gen_reg_rtx (Pmode));
3585 MEM_IN_STRUCT_P (DECL_RTL (decl)) = AGGREGATE_TYPE_P (type);
3587 else if (DECL_MODE (decl) != BLKmode
3588 /* If -ffloat-store, don't put explicit float vars
3589 into regs. */
3590 && !(flag_float_store
3591 && TREE_CODE (type) == REAL_TYPE)
3592 && ! TREE_THIS_VOLATILE (decl)
3593 && ! TREE_ADDRESSABLE (decl)
3594 && (DECL_REGISTER (decl) || ! obey_regdecls))
3596 /* Automatic variable that can go in a register. */
3597 int unsignedp = TREE_UNSIGNED (type);
3598 enum machine_mode reg_mode
3599 = promote_mode (type, DECL_MODE (decl), &unsignedp, 0);
3601 DECL_RTL (decl) = gen_reg_rtx (reg_mode);
3602 mark_user_reg (DECL_RTL (decl));
3604 if (TREE_CODE (type) == POINTER_TYPE)
3605 mark_reg_pointer (DECL_RTL (decl),
3606 (TYPE_ALIGN (TREE_TYPE (TREE_TYPE (decl)))
3607 / BITS_PER_UNIT));
3610 else if (TREE_CODE (DECL_SIZE (decl)) == INTEGER_CST
3611 && ! (flag_stack_check && ! STACK_CHECK_BUILTIN
3612 && (TREE_INT_CST_HIGH (DECL_SIZE (decl)) != 0
3613 || (TREE_INT_CST_LOW (DECL_SIZE (decl))
3614 > STACK_CHECK_MAX_VAR_SIZE * BITS_PER_UNIT))))
3616 /* Variable of fixed size that goes on the stack. */
3617 rtx oldaddr = 0;
3618 rtx addr;
3620 /* If we previously made RTL for this decl, it must be an array
3621 whose size was determined by the initializer.
3622 The old address was a register; set that register now
3623 to the proper address. */
3624 if (DECL_RTL (decl) != 0)
3626 if (GET_CODE (DECL_RTL (decl)) != MEM
3627 || GET_CODE (XEXP (DECL_RTL (decl), 0)) != REG)
3628 abort ();
3629 oldaddr = XEXP (DECL_RTL (decl), 0);
3632 DECL_RTL (decl)
3633 = assign_stack_temp (DECL_MODE (decl),
3634 ((TREE_INT_CST_LOW (DECL_SIZE (decl))
3635 + BITS_PER_UNIT - 1)
3636 / BITS_PER_UNIT),
3638 MEM_IN_STRUCT_P (DECL_RTL (decl)) = AGGREGATE_TYPE_P (TREE_TYPE (decl));
3640 /* Set alignment we actually gave this decl. */
3641 DECL_ALIGN (decl) = (DECL_MODE (decl) == BLKmode ? BIGGEST_ALIGNMENT
3642 : GET_MODE_BITSIZE (DECL_MODE (decl)));
3644 if (oldaddr)
3646 addr = force_operand (XEXP (DECL_RTL (decl), 0), oldaddr);
3647 if (addr != oldaddr)
3648 emit_move_insn (oldaddr, addr);
3651 /* If this is a memory ref that contains aggregate components,
3652 mark it as such for cse and loop optimize. */
3653 MEM_IN_STRUCT_P (DECL_RTL (decl)) = AGGREGATE_TYPE_P (TREE_TYPE (decl));
3654 #if 0
3655 /* If this is in memory because of -ffloat-store,
3656 set the volatile bit, to prevent optimizations from
3657 undoing the effects. */
3658 if (flag_float_store && TREE_CODE (type) == REAL_TYPE)
3659 MEM_VOLATILE_P (DECL_RTL (decl)) = 1;
3660 #endif
3662 else
3663 /* Dynamic-size object: must push space on the stack. */
3665 rtx address, size;
3667 /* Record the stack pointer on entry to block, if have
3668 not already done so. */
3669 if (thisblock->data.block.stack_level == 0)
3671 do_pending_stack_adjust ();
3672 emit_stack_save (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3673 &thisblock->data.block.stack_level,
3674 thisblock->data.block.first_insn);
3675 stack_block_stack = thisblock;
3678 /* Compute the variable's size, in bytes. */
3679 size = expand_expr (size_binop (CEIL_DIV_EXPR,
3680 DECL_SIZE (decl),
3681 size_int (BITS_PER_UNIT)),
3682 NULL_RTX, VOIDmode, 0);
3683 free_temp_slots ();
3685 /* Allocate space on the stack for the variable. Note that
3686 DECL_ALIGN says how the variable is to be aligned and we
3687 cannot use it to conclude anything about the alignment of
3688 the size. */
3689 address = allocate_dynamic_stack_space (size, NULL_RTX,
3690 TYPE_ALIGN (TREE_TYPE (decl)));
3692 /* Reference the variable indirect through that rtx. */
3693 DECL_RTL (decl) = gen_rtx (MEM, DECL_MODE (decl), address);
3695 /* If this is a memory ref that contains aggregate components,
3696 mark it as such for cse and loop optimize. */
3697 MEM_IN_STRUCT_P (DECL_RTL (decl)) = AGGREGATE_TYPE_P (TREE_TYPE (decl));
3699 /* Indicate the alignment we actually gave this variable. */
3700 #ifdef STACK_BOUNDARY
3701 DECL_ALIGN (decl) = STACK_BOUNDARY;
3702 #else
3703 DECL_ALIGN (decl) = BIGGEST_ALIGNMENT;
3704 #endif
3707 if (TREE_THIS_VOLATILE (decl))
3708 MEM_VOLATILE_P (DECL_RTL (decl)) = 1;
3709 #if 0 /* A variable is not necessarily unchanging
3710 just because it is const. RTX_UNCHANGING_P
3711 means no change in the function,
3712 not merely no change in the variable's scope.
3713 It is correct to set RTX_UNCHANGING_P if the variable's scope
3714 is the whole function. There's no convenient way to test that. */
3715 if (TREE_READONLY (decl))
3716 RTX_UNCHANGING_P (DECL_RTL (decl)) = 1;
3717 #endif
3719 /* If doing stupid register allocation, make sure life of any
3720 register variable starts here, at the start of its scope. */
3722 if (obey_regdecls)
3723 use_variable (DECL_RTL (decl));
3727 /* Generate code for the automatic variable declaration DECL. For
3728 most variables this just means we give it a stack offset. The
3729 compiler sometimes emits cleanups without variables and we will
3730 have to deal with those too. */
3732 static void
3733 bc_expand_decl (decl, cleanup)
3734 tree decl;
3735 tree cleanup;
3737 tree type;
3739 if (!decl)
3741 /* A cleanup with no variable. */
3742 if (!cleanup)
3743 abort ();
3745 return;
3748 /* Only auto variables need any work. */
3749 if (TREE_CODE (decl) != VAR_DECL || TREE_STATIC (decl) || DECL_EXTERNAL (decl))
3750 return;
3752 type = TREE_TYPE (decl);
3754 if (type == error_mark_node)
3755 DECL_RTL (decl) = bc_gen_rtx ((char *) 0, 0, (struct bc_label *) 0);
3757 else if (DECL_SIZE (decl) == 0)
3759 /* Variable with incomplete type. The stack offset herein will be
3760 fixed later in expand_decl_init. */
3761 DECL_RTL (decl) = bc_gen_rtx ((char *) 0, 0, (struct bc_label *) 0);
3763 else if (TREE_CONSTANT (DECL_SIZE (decl)))
3765 DECL_RTL (decl) = bc_allocate_local (TREE_INT_CST_LOW (DECL_SIZE (decl)) / BITS_PER_UNIT,
3766 DECL_ALIGN (decl));
3768 else
3769 DECL_RTL (decl) = bc_allocate_variable_array (DECL_SIZE (decl));
3772 /* Emit code to perform the initialization of a declaration DECL. */
3774 void
3775 expand_decl_init (decl)
3776 tree decl;
3778 int was_used = TREE_USED (decl);
3780 if (output_bytecode)
3782 bc_expand_decl_init (decl);
3783 return;
3786 /* If this is a CONST_DECL, we don't have to generate any code, but
3787 if DECL_INITIAL is a constant, call expand_expr to force TREE_CST_RTL
3788 to be set while in the obstack containing the constant. If we don't
3789 do this, we can lose if we have functions nested three deep and the middle
3790 function makes a CONST_DECL whose DECL_INITIAL is a STRING_CST while
3791 the innermost function is the first to expand that STRING_CST. */
3792 if (TREE_CODE (decl) == CONST_DECL)
3794 if (DECL_INITIAL (decl) && TREE_CONSTANT (DECL_INITIAL (decl)))
3795 expand_expr (DECL_INITIAL (decl), NULL_RTX, VOIDmode,
3796 EXPAND_INITIALIZER);
3797 return;
3800 if (TREE_STATIC (decl))
3801 return;
3803 /* Compute and store the initial value now. */
3805 if (DECL_INITIAL (decl) == error_mark_node)
3807 enum tree_code code = TREE_CODE (TREE_TYPE (decl));
3808 if (code == INTEGER_TYPE || code == REAL_TYPE || code == ENUMERAL_TYPE
3809 || code == POINTER_TYPE)
3810 expand_assignment (decl, convert (TREE_TYPE (decl), integer_zero_node),
3811 0, 0);
3812 emit_queue ();
3814 else if (DECL_INITIAL (decl) && TREE_CODE (DECL_INITIAL (decl)) != TREE_LIST)
3816 emit_line_note (DECL_SOURCE_FILE (decl), DECL_SOURCE_LINE (decl));
3817 expand_assignment (decl, DECL_INITIAL (decl), 0, 0);
3818 emit_queue ();
3821 /* Don't let the initialization count as "using" the variable. */
3822 TREE_USED (decl) = was_used;
3824 /* Free any temporaries we made while initializing the decl. */
3825 preserve_temp_slots (NULL_RTX);
3826 free_temp_slots ();
3829 /* Expand initialization for variable-sized types. Allocate array
3830 using newlocalSI and set local variable, which is a pointer to the
3831 storage. */
3833 static void
3834 bc_expand_variable_local_init (decl)
3835 tree decl;
3837 /* Evaluate size expression and coerce to SI */
3838 bc_expand_expr (DECL_SIZE (decl));
3840 /* Type sizes are always (?) of TREE_CODE INTEGER_CST, so
3841 no coercion is necessary (?) */
3843 /* emit_typecode_conversion (preferred_typecode (TYPE_MODE (DECL_SIZE (decl)),
3844 TREE_UNSIGNED (DECL_SIZE (decl))), SIcode); */
3846 /* Emit code to allocate array */
3847 bc_emit_instruction (newlocalSI);
3849 /* Store array pointer in local variable. This is the only instance
3850 where we actually want the address of the pointer to the
3851 variable-size block, rather than the pointer itself. We avoid
3852 using expand_address() since that would cause the pointer to be
3853 pushed rather than its address. Hence the hard-coded reference;
3854 notice also that the variable is always local (no global
3855 variable-size type variables). */
3857 bc_load_localaddr (DECL_RTL (decl));
3858 bc_emit_instruction (storeP);
3862 /* Emit code to initialize a declaration. */
3864 static void
3865 bc_expand_decl_init (decl)
3866 tree decl;
3868 int org_stack_depth;
3870 /* Statical initializers are handled elsewhere */
3872 if (TREE_STATIC (decl))
3873 return;
3875 /* Memory original stack depth */
3876 org_stack_depth = stack_depth;
3878 /* If the type is variable-size, we first create its space (we ASSUME
3879 it CAN'T be static). We do this regardless of whether there's an
3880 initializer assignment or not. */
3882 if (TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
3883 bc_expand_variable_local_init (decl);
3885 /* Expand initializer assignment */
3886 if (DECL_INITIAL (decl) == error_mark_node)
3888 enum tree_code code = TREE_CODE (TREE_TYPE (decl));
3890 if (code == INTEGER_TYPE || code == REAL_TYPE || code == ENUMERAL_TYPE
3891 || code == POINTER_TYPE)
3893 expand_assignment (TREE_TYPE (decl), decl, 0, 0);
3895 else if (DECL_INITIAL (decl))
3896 expand_assignment (TREE_TYPE (decl), decl, 0, 0);
3898 /* Restore stack depth */
3899 if (org_stack_depth > stack_depth)
3900 abort ();
3902 bc_adjust_stack (stack_depth - org_stack_depth);
3906 /* CLEANUP is an expression to be executed at exit from this binding contour;
3907 for example, in C++, it might call the destructor for this variable.
3909 We wrap CLEANUP in an UNSAVE_EXPR node, so that we can expand the
3910 CLEANUP multiple times, and have the correct semantics. This
3911 happens in exception handling, for gotos, returns, breaks that
3912 leave the current scope.
3914 If CLEANUP is nonzero and DECL is zero, we record a cleanup
3915 that is not associated with any particular variable. */
3918 expand_decl_cleanup (decl, cleanup)
3919 tree decl, cleanup;
3921 struct nesting *thisblock = block_stack;
3923 /* Error if we are not in any block. */
3924 if (thisblock == 0)
3925 return 0;
3927 /* Record the cleanup if there is one. */
3929 if (cleanup != 0)
3931 tree t;
3932 rtx seq;
3933 tree *cleanups = &thisblock->data.block.cleanups;
3934 int cond_context = conditional_context ();
3936 if (cond_context)
3938 rtx flag = gen_reg_rtx (word_mode);
3939 rtx set_flag_0;
3940 tree cond;
3942 start_sequence ();
3943 emit_move_insn (flag, const0_rtx);
3944 set_flag_0 = get_insns ();
3945 end_sequence ();
3947 thisblock->data.block.last_unconditional_cleanup
3948 = emit_insns_after (set_flag_0,
3949 thisblock->data.block.last_unconditional_cleanup);
3951 emit_move_insn (flag, const1_rtx);
3953 /* All cleanups must be on the function_obstack. */
3954 push_obstacks_nochange ();
3955 resume_temporary_allocation ();
3957 cond = build_decl (VAR_DECL, NULL_TREE, type_for_mode (word_mode, 1));
3958 DECL_RTL (cond) = flag;
3960 /* Conditionalize the cleanup. */
3961 cleanup = build (COND_EXPR, void_type_node,
3962 truthvalue_conversion (cond),
3963 cleanup, integer_zero_node);
3964 cleanup = fold (cleanup);
3966 pop_obstacks ();
3968 cleanups = thisblock->data.block.cleanup_ptr;
3971 /* All cleanups must be on the function_obstack. */
3972 push_obstacks_nochange ();
3973 resume_temporary_allocation ();
3974 cleanup = unsave_expr (cleanup);
3975 pop_obstacks ();
3977 t = *cleanups = temp_tree_cons (decl, cleanup, *cleanups);
3979 if (! cond_context)
3980 /* If this block has a cleanup, it belongs in stack_block_stack. */
3981 stack_block_stack = thisblock;
3983 if (cond_context)
3985 start_sequence ();
3988 /* If this was optimized so that there is no exception region for the
3989 cleanup, then mark the TREE_LIST node, so that we can later tell
3990 if we need to call expand_eh_region_end. */
3991 if (! using_eh_for_cleanups_p
3992 || expand_eh_region_start_tree (decl, cleanup))
3993 TREE_ADDRESSABLE (t) = 1;
3994 /* If that started a new EH region, we're in a new block. */
3995 thisblock = block_stack;
3997 if (cond_context)
3999 seq = get_insns ();
4000 end_sequence ();
4001 if (seq)
4002 thisblock->data.block.last_unconditional_cleanup
4003 = emit_insns_after (seq,
4004 thisblock->data.block.last_unconditional_cleanup);
4006 else
4008 thisblock->data.block.last_unconditional_cleanup
4009 = get_last_insn ();
4010 thisblock->data.block.cleanup_ptr = &thisblock->data.block.cleanups;
4013 return 1;
4016 /* Like expand_decl_cleanup, but suppress generating an exception handler
4017 to perform the cleanup. */
4020 expand_decl_cleanup_no_eh (decl, cleanup)
4021 tree decl, cleanup;
4023 int save_eh = using_eh_for_cleanups_p;
4024 using_eh_for_cleanups_p = 0;
4025 expand_decl_cleanup (decl, cleanup);
4026 using_eh_for_cleanups_p = save_eh;
4029 /* Arrange for the top element of the dynamic cleanup chain to be
4030 popped if we exit the current binding contour. DECL is the
4031 associated declaration, if any, otherwise NULL_TREE. If the
4032 current contour is left via an exception, then __sjthrow will pop
4033 the top element off the dynamic cleanup chain. The code that
4034 avoids doing the action we push into the cleanup chain in the
4035 exceptional case is contained in expand_cleanups.
4037 This routine is only used by expand_eh_region_start, and that is
4038 the only way in which an exception region should be started. This
4039 routine is only used when using the setjmp/longjmp codegen method
4040 for exception handling. */
4043 expand_dcc_cleanup (decl)
4044 tree decl;
4046 struct nesting *thisblock = block_stack;
4047 tree cleanup;
4049 /* Error if we are not in any block. */
4050 if (thisblock == 0)
4051 return 0;
4053 /* Record the cleanup for the dynamic handler chain. */
4055 /* All cleanups must be on the function_obstack. */
4056 push_obstacks_nochange ();
4057 resume_temporary_allocation ();
4058 cleanup = make_node (POPDCC_EXPR);
4059 pop_obstacks ();
4061 /* Add the cleanup in a manner similar to expand_decl_cleanup. */
4062 thisblock->data.block.cleanups
4063 = temp_tree_cons (decl, cleanup, thisblock->data.block.cleanups);
4065 /* If this block has a cleanup, it belongs in stack_block_stack. */
4066 stack_block_stack = thisblock;
4067 return 1;
4070 /* Arrange for the top element of the dynamic handler chain to be
4071 popped if we exit the current binding contour. DECL is the
4072 assciated declaration, if any, otherwise NULL_TREE. If the current
4073 contour is left via an exception, then __sjthrow will pop the top
4074 element off the dynamic handler chain. The code that avoids doing
4075 the action we push into the handler chain in the exceptional case
4076 is contained in expand_cleanups.
4078 This routine is only used by expand_eh_region_start, and that is
4079 the only way in which an exception region should be started. This
4080 routine is only used when using the setjmp/longjmp codegen method
4081 for exception handling. */
4084 expand_dhc_cleanup (decl)
4085 tree decl;
4087 struct nesting *thisblock = block_stack;
4088 tree cleanup;
4090 /* Error if we are not in any block. */
4091 if (thisblock == 0)
4092 return 0;
4094 /* Record the cleanup for the dynamic handler chain. */
4096 /* All cleanups must be on the function_obstack. */
4097 push_obstacks_nochange ();
4098 resume_temporary_allocation ();
4099 cleanup = make_node (POPDHC_EXPR);
4100 pop_obstacks ();
4102 /* Add the cleanup in a manner similar to expand_decl_cleanup. */
4103 thisblock->data.block.cleanups
4104 = temp_tree_cons (decl, cleanup, thisblock->data.block.cleanups);
4106 /* If this block has a cleanup, it belongs in stack_block_stack. */
4107 stack_block_stack = thisblock;
4108 return 1;
4111 /* DECL is an anonymous union. CLEANUP is a cleanup for DECL.
4112 DECL_ELTS is the list of elements that belong to DECL's type.
4113 In each, the TREE_VALUE is a VAR_DECL, and the TREE_PURPOSE a cleanup. */
4115 void
4116 expand_anon_union_decl (decl, cleanup, decl_elts)
4117 tree decl, cleanup, decl_elts;
4119 struct nesting *thisblock = block_stack;
4120 rtx x;
4122 expand_decl (decl);
4123 expand_decl_cleanup (decl, cleanup);
4124 x = DECL_RTL (decl);
4126 while (decl_elts)
4128 tree decl_elt = TREE_VALUE (decl_elts);
4129 tree cleanup_elt = TREE_PURPOSE (decl_elts);
4130 enum machine_mode mode = TYPE_MODE (TREE_TYPE (decl_elt));
4132 /* Propagate the union's alignment to the elements. */
4133 DECL_ALIGN (decl_elt) = DECL_ALIGN (decl);
4135 /* If the element has BLKmode and the union doesn't, the union is
4136 aligned such that the element doesn't need to have BLKmode, so
4137 change the element's mode to the appropriate one for its size. */
4138 if (mode == BLKmode && DECL_MODE (decl) != BLKmode)
4139 DECL_MODE (decl_elt) = mode
4140 = mode_for_size (TREE_INT_CST_LOW (DECL_SIZE (decl_elt)),
4141 MODE_INT, 1);
4143 /* (SUBREG (MEM ...)) at RTL generation time is invalid, so we
4144 instead create a new MEM rtx with the proper mode. */
4145 if (GET_CODE (x) == MEM)
4147 if (mode == GET_MODE (x))
4148 DECL_RTL (decl_elt) = x;
4149 else
4151 DECL_RTL (decl_elt) = gen_rtx (MEM, mode, copy_rtx (XEXP (x, 0)));
4152 MEM_IN_STRUCT_P (DECL_RTL (decl_elt)) = MEM_IN_STRUCT_P (x);
4153 RTX_UNCHANGING_P (DECL_RTL (decl_elt)) = RTX_UNCHANGING_P (x);
4156 else if (GET_CODE (x) == REG)
4158 if (mode == GET_MODE (x))
4159 DECL_RTL (decl_elt) = x;
4160 else
4161 DECL_RTL (decl_elt) = gen_rtx (SUBREG, mode, x, 0);
4163 else
4164 abort ();
4166 /* Record the cleanup if there is one. */
4168 if (cleanup != 0)
4169 thisblock->data.block.cleanups
4170 = temp_tree_cons (decl_elt, cleanup_elt,
4171 thisblock->data.block.cleanups);
4173 decl_elts = TREE_CHAIN (decl_elts);
4177 /* Expand a list of cleanups LIST.
4178 Elements may be expressions or may be nested lists.
4180 If DONT_DO is nonnull, then any list-element
4181 whose TREE_PURPOSE matches DONT_DO is omitted.
4182 This is sometimes used to avoid a cleanup associated with
4183 a value that is being returned out of the scope.
4185 If IN_FIXUP is non-zero, we are generating this cleanup for a fixup
4186 goto and handle protection regions specially in that case.
4188 If REACHABLE, we emit code, otherwise just inform the exception handling
4189 code about this finalization. */
4191 static void
4192 expand_cleanups (list, dont_do, in_fixup, reachable)
4193 tree list;
4194 tree dont_do;
4195 int in_fixup;
4196 int reachable;
4198 tree tail;
4199 for (tail = list; tail; tail = TREE_CHAIN (tail))
4200 if (dont_do == 0 || TREE_PURPOSE (tail) != dont_do)
4202 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
4203 expand_cleanups (TREE_VALUE (tail), dont_do, in_fixup, reachable);
4204 else
4206 if (! in_fixup)
4208 tree cleanup = TREE_VALUE (tail);
4210 /* See expand_d{h,c}c_cleanup for why we avoid this. */
4211 if (TREE_CODE (cleanup) != POPDHC_EXPR
4212 && TREE_CODE (cleanup) != POPDCC_EXPR
4213 /* See expand_eh_region_start_tree for this case. */
4214 && ! TREE_ADDRESSABLE (tail))
4216 cleanup = protect_with_terminate (cleanup);
4217 expand_eh_region_end (cleanup);
4221 if (reachable)
4223 /* Cleanups may be run multiple times. For example,
4224 when exiting a binding contour, we expand the
4225 cleanups associated with that contour. When a goto
4226 within that binding contour has a target outside that
4227 contour, it will expand all cleanups from its scope to
4228 the target. Though the cleanups are expanded multiple
4229 times, the control paths are non-overlapping so the
4230 cleanups will not be executed twice. */
4231 expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
4232 free_temp_slots ();
4238 /* Mark when the context we are emitting RTL for as a conditional
4239 context, so that any cleanup actions we register with
4240 expand_decl_init will be properly conditionalized when those
4241 cleanup actions are later performed. Must be called before any
4242 expression (tree) is expanded that is within a contidional context. */
4244 void
4245 start_cleanup_deferal ()
4247 /* block_stack can be NULL if we are inside the parameter list. It is
4248 OK to do nothing, because cleanups aren't possible here. */
4249 if (block_stack)
4250 ++block_stack->data.block.conditional_code;
4253 /* Mark the end of a conditional region of code. Because cleanup
4254 deferals may be nested, we may still be in a conditional region
4255 after we end the currently deferred cleanups, only after we end all
4256 deferred cleanups, are we back in unconditional code. */
4258 void
4259 end_cleanup_deferal ()
4261 /* block_stack can be NULL if we are inside the parameter list. It is
4262 OK to do nothing, because cleanups aren't possible here. */
4263 if (block_stack)
4264 --block_stack->data.block.conditional_code;
4267 /* Move all cleanups from the current block_stack
4268 to the containing block_stack, where they are assumed to
4269 have been created. If anything can cause a temporary to
4270 be created, but not expanded for more than one level of
4271 block_stacks, then this code will have to change. */
4273 void
4274 move_cleanups_up ()
4276 struct nesting *block = block_stack;
4277 struct nesting *outer = block->next;
4279 outer->data.block.cleanups
4280 = chainon (block->data.block.cleanups,
4281 outer->data.block.cleanups);
4282 block->data.block.cleanups = 0;
4285 tree
4286 last_cleanup_this_contour ()
4288 if (block_stack == 0)
4289 return 0;
4291 return block_stack->data.block.cleanups;
4294 /* Return 1 if there are any pending cleanups at this point.
4295 If THIS_CONTOUR is nonzero, check the current contour as well.
4296 Otherwise, look only at the contours that enclose this one. */
4299 any_pending_cleanups (this_contour)
4300 int this_contour;
4302 struct nesting *block;
4304 if (block_stack == 0)
4305 return 0;
4307 if (this_contour && block_stack->data.block.cleanups != NULL)
4308 return 1;
4309 if (block_stack->data.block.cleanups == 0
4310 && block_stack->data.block.outer_cleanups == 0)
4311 return 0;
4313 for (block = block_stack->next; block; block = block->next)
4314 if (block->data.block.cleanups != 0)
4315 return 1;
4317 return 0;
4320 /* Enter a case (Pascal) or switch (C) statement.
4321 Push a block onto case_stack and nesting_stack
4322 to accumulate the case-labels that are seen
4323 and to record the labels generated for the statement.
4325 EXIT_FLAG is nonzero if `exit_something' should exit this case stmt.
4326 Otherwise, this construct is transparent for `exit_something'.
4328 EXPR is the index-expression to be dispatched on.
4329 TYPE is its nominal type. We could simply convert EXPR to this type,
4330 but instead we take short cuts. */
4332 void
4333 expand_start_case (exit_flag, expr, type, printname)
4334 int exit_flag;
4335 tree expr;
4336 tree type;
4337 char *printname;
4339 register struct nesting *thiscase = ALLOC_NESTING ();
4341 /* Make an entry on case_stack for the case we are entering. */
4343 thiscase->next = case_stack;
4344 thiscase->all = nesting_stack;
4345 thiscase->depth = ++nesting_depth;
4346 thiscase->exit_label = exit_flag ? gen_label_rtx () : 0;
4347 thiscase->data.case_stmt.case_list = 0;
4348 thiscase->data.case_stmt.index_expr = expr;
4349 thiscase->data.case_stmt.nominal_type = type;
4350 thiscase->data.case_stmt.default_label = 0;
4351 thiscase->data.case_stmt.num_ranges = 0;
4352 thiscase->data.case_stmt.printname = printname;
4353 thiscase->data.case_stmt.seenlabel = 0;
4354 case_stack = thiscase;
4355 nesting_stack = thiscase;
4357 if (output_bytecode)
4359 bc_expand_start_case (thiscase, expr, type, printname);
4360 return;
4363 do_pending_stack_adjust ();
4365 /* Make sure case_stmt.start points to something that won't
4366 need any transformation before expand_end_case. */
4367 if (GET_CODE (get_last_insn ()) != NOTE)
4368 emit_note (NULL_PTR, NOTE_INSN_DELETED);
4370 thiscase->data.case_stmt.start = get_last_insn ();
4372 start_cleanup_deferal ();
4376 /* Enter a case statement. It is assumed that the caller has pushed
4377 the current context onto the case stack. */
4379 static void
4380 bc_expand_start_case (thiscase, expr, type, printname)
4381 struct nesting *thiscase;
4382 tree expr;
4383 tree type;
4384 char *printname;
4386 bc_expand_expr (expr);
4387 bc_expand_conversion (TREE_TYPE (expr), type);
4389 /* For cases, the skip is a place we jump to that's emitted after
4390 the size of the jump table is known. */
4392 thiscase->data.case_stmt.skip_label = gen_label_rtx ();
4393 bc_emit_bytecode (jump);
4394 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (thiscase->data.case_stmt.skip_label));
4396 #ifdef DEBUG_PRINT_CODE
4397 fputc ('\n', stderr);
4398 #endif
4402 /* Start a "dummy case statement" within which case labels are invalid
4403 and are not connected to any larger real case statement.
4404 This can be used if you don't want to let a case statement jump
4405 into the middle of certain kinds of constructs. */
4407 void
4408 expand_start_case_dummy ()
4410 register struct nesting *thiscase = ALLOC_NESTING ();
4412 /* Make an entry on case_stack for the dummy. */
4414 thiscase->next = case_stack;
4415 thiscase->all = nesting_stack;
4416 thiscase->depth = ++nesting_depth;
4417 thiscase->exit_label = 0;
4418 thiscase->data.case_stmt.case_list = 0;
4419 thiscase->data.case_stmt.start = 0;
4420 thiscase->data.case_stmt.nominal_type = 0;
4421 thiscase->data.case_stmt.default_label = 0;
4422 thiscase->data.case_stmt.num_ranges = 0;
4423 case_stack = thiscase;
4424 nesting_stack = thiscase;
4425 start_cleanup_deferal ();
4428 /* End a dummy case statement. */
4430 void
4431 expand_end_case_dummy ()
4433 end_cleanup_deferal ();
4434 POPSTACK (case_stack);
4437 /* Return the data type of the index-expression
4438 of the innermost case statement, or null if none. */
4440 tree
4441 case_index_expr_type ()
4443 if (case_stack)
4444 return TREE_TYPE (case_stack->data.case_stmt.index_expr);
4445 return 0;
4448 /* Accumulate one case or default label inside a case or switch statement.
4449 VALUE is the value of the case (a null pointer, for a default label).
4450 The function CONVERTER, when applied to arguments T and V,
4451 converts the value V to the type T.
4453 If not currently inside a case or switch statement, return 1 and do
4454 nothing. The caller will print a language-specific error message.
4455 If VALUE is a duplicate or overlaps, return 2 and do nothing
4456 except store the (first) duplicate node in *DUPLICATE.
4457 If VALUE is out of range, return 3 and do nothing.
4458 If we are jumping into the scope of a cleanup or var-sized array, return 5.
4459 Return 0 on success.
4461 Extended to handle range statements. */
4464 pushcase (value, converter, label, duplicate)
4465 register tree value;
4466 tree (*converter) PROTO((tree, tree));
4467 register tree label;
4468 tree *duplicate;
4470 register struct case_node **l;
4471 register struct case_node *n;
4472 tree index_type;
4473 tree nominal_type;
4475 if (output_bytecode)
4476 return bc_pushcase (value, label);
4478 /* Fail if not inside a real case statement. */
4479 if (! (case_stack && case_stack->data.case_stmt.start))
4480 return 1;
4482 if (stack_block_stack
4483 && stack_block_stack->depth > case_stack->depth)
4484 return 5;
4486 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4487 nominal_type = case_stack->data.case_stmt.nominal_type;
4489 /* If the index is erroneous, avoid more problems: pretend to succeed. */
4490 if (index_type == error_mark_node)
4491 return 0;
4493 /* Convert VALUE to the type in which the comparisons are nominally done. */
4494 if (value != 0)
4495 value = (*converter) (nominal_type, value);
4497 /* If this is the first label, warn if any insns have been emitted. */
4498 if (case_stack->data.case_stmt.seenlabel == 0)
4500 rtx insn;
4501 for (insn = case_stack->data.case_stmt.start;
4502 insn;
4503 insn = NEXT_INSN (insn))
4505 if (GET_CODE (insn) == CODE_LABEL)
4506 break;
4507 if (GET_CODE (insn) != NOTE
4508 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn)) != USE))
4510 warning ("unreachable code at beginning of %s",
4511 case_stack->data.case_stmt.printname);
4512 break;
4516 case_stack->data.case_stmt.seenlabel = 1;
4518 /* Fail if this value is out of range for the actual type of the index
4519 (which may be narrower than NOMINAL_TYPE). */
4520 if (value != 0 && ! int_fits_type_p (value, index_type))
4521 return 3;
4523 /* Fail if this is a duplicate or overlaps another entry. */
4524 if (value == 0)
4526 if (case_stack->data.case_stmt.default_label != 0)
4528 *duplicate = case_stack->data.case_stmt.default_label;
4529 return 2;
4531 case_stack->data.case_stmt.default_label = label;
4533 else
4534 return add_case_node (value, value, label, duplicate);
4536 expand_label (label);
4537 return 0;
4540 /* Like pushcase but this case applies to all values
4541 between VALUE1 and VALUE2 (inclusive).
4542 The return value is the same as that of pushcase
4543 but there is one additional error code:
4544 4 means the specified range was empty. */
4547 pushcase_range (value1, value2, converter, label, duplicate)
4548 register tree value1, value2;
4549 tree (*converter) PROTO((tree, tree));
4550 register tree label;
4551 tree *duplicate;
4553 register struct case_node **l;
4554 register struct case_node *n;
4555 tree index_type;
4556 tree nominal_type;
4558 /* Fail if not inside a real case statement. */
4559 if (! (case_stack && case_stack->data.case_stmt.start))
4560 return 1;
4562 /* Fail if the range is empty. */
4563 if (tree_int_cst_lt (value2, value1))
4564 return 4;
4566 if (stack_block_stack
4567 && stack_block_stack->depth > case_stack->depth)
4568 return 5;
4570 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4571 nominal_type = case_stack->data.case_stmt.nominal_type;
4573 /* If the index is erroneous, avoid more problems: pretend to succeed. */
4574 if (index_type == error_mark_node)
4575 return 0;
4577 /* If this is the first label, warn if any insns have been emitted. */
4578 if (case_stack->data.case_stmt.seenlabel == 0)
4580 rtx insn;
4581 for (insn = case_stack->data.case_stmt.start;
4582 insn;
4583 insn = NEXT_INSN (insn))
4585 if (GET_CODE (insn) == CODE_LABEL)
4586 break;
4587 if (GET_CODE (insn) != NOTE
4588 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn)) != USE))
4590 warning ("unreachable code at beginning of %s",
4591 case_stack->data.case_stmt.printname);
4592 break;
4596 case_stack->data.case_stmt.seenlabel = 1;
4598 /* Convert VALUEs to type in which the comparisons are nominally done. */
4599 if (value1 == 0) /* Negative infinity. */
4600 value1 = TYPE_MIN_VALUE (index_type);
4601 value1 = (*converter) (nominal_type, value1);
4603 if (value2 == 0) /* Positive infinity. */
4604 value2 = TYPE_MAX_VALUE (index_type);
4605 value2 = (*converter) (nominal_type, value2);
4607 /* Fail if these values are out of range. */
4608 if (! int_fits_type_p (value1, index_type))
4609 return 3;
4611 if (! int_fits_type_p (value2, index_type))
4612 return 3;
4614 return add_case_node (value1, value2, label, duplicate);
4617 /* Do the actual insertion of a case label for pushcase and pushcase_range
4618 into case_stack->data.case_stmt.case_list. Use an AVL tree to avoid
4619 slowdown for large switch statements. */
4621 static int
4622 add_case_node (low, high, label, duplicate)
4623 tree low, high;
4624 tree label;
4625 tree *duplicate;
4627 struct case_node *p, **q, *r;
4629 q = &case_stack->data.case_stmt.case_list;
4630 p = *q;
4632 while (r = *q)
4634 p = r;
4636 /* Keep going past elements distinctly greater than HIGH. */
4637 if (tree_int_cst_lt (high, p->low))
4638 q = &p->left;
4640 /* or distinctly less than LOW. */
4641 else if (tree_int_cst_lt (p->high, low))
4642 q = &p->right;
4644 else
4646 /* We have an overlap; this is an error. */
4647 *duplicate = p->code_label;
4648 return 2;
4652 /* Add this label to the chain, and succeed.
4653 Copy LOW, HIGH so they are on temporary rather than momentary
4654 obstack and will thus survive till the end of the case statement. */
4656 r = (struct case_node *) oballoc (sizeof (struct case_node));
4657 r->low = copy_node (low);
4659 /* If the bounds are equal, turn this into the one-value case. */
4661 if (tree_int_cst_equal (low, high))
4662 r->high = r->low;
4663 else
4665 r->high = copy_node (high);
4666 case_stack->data.case_stmt.num_ranges++;
4669 r->code_label = label;
4670 expand_label (label);
4672 *q = r;
4673 r->parent = p;
4674 r->left = 0;
4675 r->right = 0;
4676 r->balance = 0;
4678 while (p)
4680 struct case_node *s;
4682 if (r == p->left)
4684 int b;
4686 if (! (b = p->balance))
4687 /* Growth propagation from left side. */
4688 p->balance = -1;
4689 else if (b < 0)
4691 if (r->balance < 0)
4693 /* R-Rotation */
4694 if (p->left = s = r->right)
4695 s->parent = p;
4697 r->right = p;
4698 p->balance = 0;
4699 r->balance = 0;
4700 s = p->parent;
4701 p->parent = r;
4703 if (r->parent = s)
4705 if (s->left == p)
4706 s->left = r;
4707 else
4708 s->right = r;
4710 else
4711 case_stack->data.case_stmt.case_list = r;
4713 else
4714 /* r->balance == +1 */
4716 /* LR-Rotation */
4718 int b2;
4719 struct case_node *t = r->right;
4721 if (p->left = s = t->right)
4722 s->parent = p;
4724 t->right = p;
4725 if (r->right = s = t->left)
4726 s->parent = r;
4728 t->left = r;
4729 b = t->balance;
4730 b2 = b < 0;
4731 p->balance = b2;
4732 b2 = -b2 - b;
4733 r->balance = b2;
4734 t->balance = 0;
4735 s = p->parent;
4736 p->parent = t;
4737 r->parent = t;
4739 if (t->parent = s)
4741 if (s->left == p)
4742 s->left = t;
4743 else
4744 s->right = t;
4746 else
4747 case_stack->data.case_stmt.case_list = t;
4749 break;
4752 else
4754 /* p->balance == +1; growth of left side balances the node. */
4755 p->balance = 0;
4756 break;
4759 else
4760 /* r == p->right */
4762 int b;
4764 if (! (b = p->balance))
4765 /* Growth propagation from right side. */
4766 p->balance++;
4767 else if (b > 0)
4769 if (r->balance > 0)
4771 /* L-Rotation */
4773 if (p->right = s = r->left)
4774 s->parent = p;
4776 r->left = p;
4777 p->balance = 0;
4778 r->balance = 0;
4779 s = p->parent;
4780 p->parent = r;
4781 if (r->parent = s)
4783 if (s->left == p)
4784 s->left = r;
4785 else
4786 s->right = r;
4789 else
4790 case_stack->data.case_stmt.case_list = r;
4793 else
4794 /* r->balance == -1 */
4796 /* RL-Rotation */
4797 int b2;
4798 struct case_node *t = r->left;
4800 if (p->right = s = t->left)
4801 s->parent = p;
4803 t->left = p;
4805 if (r->left = s = t->right)
4806 s->parent = r;
4808 t->right = r;
4809 b = t->balance;
4810 b2 = b < 0;
4811 r->balance = b2;
4812 b2 = -b2 - b;
4813 p->balance = b2;
4814 t->balance = 0;
4815 s = p->parent;
4816 p->parent = t;
4817 r->parent = t;
4819 if (t->parent = s)
4821 if (s->left == p)
4822 s->left = t;
4823 else
4824 s->right = t;
4827 else
4828 case_stack->data.case_stmt.case_list = t;
4830 break;
4832 else
4834 /* p->balance == -1; growth of right side balances the node. */
4835 p->balance = 0;
4836 break;
4840 r = p;
4841 p = p->parent;
4844 return 0;
4847 /* Accumulate one case or default label; VALUE is the value of the
4848 case, or nil for a default label. If not currently inside a case,
4849 return 1 and do nothing. If VALUE is a duplicate or overlaps, return
4850 2 and do nothing. If VALUE is out of range, return 3 and do nothing.
4851 Return 0 on success. This function is a leftover from the earlier
4852 bytecode compiler, which was based on gcc 1.37. It should be
4853 merged into pushcase. */
4855 static int
4856 bc_pushcase (value, label)
4857 tree value;
4858 tree label;
4860 struct nesting *thiscase = case_stack;
4861 struct case_node *case_label, *new_label;
4863 if (! thiscase)
4864 return 1;
4866 /* Fail if duplicate, overlap, or out of type range. */
4867 if (value)
4869 value = convert (thiscase->data.case_stmt.nominal_type, value);
4870 if (! int_fits_type_p (value, thiscase->data.case_stmt.nominal_type))
4871 return 3;
4873 for (case_label = thiscase->data.case_stmt.case_list;
4874 case_label->left; case_label = case_label->left)
4875 if (! tree_int_cst_lt (case_label->left->high, value))
4876 break;
4878 if (case_label != thiscase->data.case_stmt.case_list
4879 && ! tree_int_cst_lt (case_label->high, value)
4880 || (case_label->left && ! tree_int_cst_lt (value, case_label->left->low)))
4881 return 2;
4883 new_label = (struct case_node *) oballoc (sizeof (struct case_node));
4884 new_label->low = new_label->high = copy_node (value);
4885 new_label->code_label = label;
4886 new_label->left = case_label->left;
4888 case_label->left = new_label;
4889 thiscase->data.case_stmt.num_ranges++;
4891 else
4893 if (thiscase->data.case_stmt.default_label)
4894 return 2;
4895 thiscase->data.case_stmt.default_label = label;
4898 expand_label (label);
4899 return 0;
4902 /* Returns the number of possible values of TYPE.
4903 Returns -1 if the number is unknown or variable.
4904 Returns -2 if the number does not fit in a HOST_WIDE_INT.
4905 Sets *SPARENESS to 2 if TYPE is an ENUMERAL_TYPE whose values
4906 do not increase monotonically (there may be duplicates);
4907 to 1 if the values increase monotonically, but not always by 1;
4908 otherwise sets it to 0. */
4910 HOST_WIDE_INT
4911 all_cases_count (type, spareness)
4912 tree type;
4913 int *spareness;
4915 HOST_WIDE_INT count, count_high = 0;
4916 *spareness = 0;
4918 switch (TREE_CODE (type))
4920 tree t;
4921 case BOOLEAN_TYPE:
4922 count = 2;
4923 break;
4924 case CHAR_TYPE:
4925 count = 1 << BITS_PER_UNIT;
4926 break;
4927 default:
4928 case INTEGER_TYPE:
4929 if (TREE_CODE (TYPE_MIN_VALUE (type)) != INTEGER_CST
4930 || TREE_CODE (TYPE_MAX_VALUE (type)) != INTEGER_CST)
4931 return -1;
4932 else
4934 /* count
4935 = TREE_INT_CST_LOW (TYPE_MAX_VALUE (type))
4936 - TREE_INT_CST_LOW (TYPE_MIN_VALUE (type)) + 1
4937 but with overflow checking. */
4938 tree mint = TYPE_MIN_VALUE (type);
4939 tree maxt = TYPE_MAX_VALUE (type);
4940 HOST_WIDE_INT lo, hi;
4941 neg_double(TREE_INT_CST_LOW (mint), TREE_INT_CST_HIGH (mint),
4942 &lo, &hi);
4943 add_double(TREE_INT_CST_LOW (maxt), TREE_INT_CST_HIGH (maxt),
4944 lo, hi, &lo, &hi);
4945 add_double (lo, hi, 1, 0, &lo, &hi);
4946 if (hi != 0 || lo < 0)
4947 return -2;
4948 count = lo;
4950 break;
4951 case ENUMERAL_TYPE:
4952 count = 0;
4953 for (t = TYPE_VALUES (type); t != NULL_TREE; t = TREE_CHAIN (t))
4955 if (TREE_CODE (TYPE_MIN_VALUE (type)) != INTEGER_CST
4956 || TREE_CODE (TREE_VALUE (t)) != INTEGER_CST
4957 || TREE_INT_CST_LOW (TYPE_MIN_VALUE (type)) + count
4958 != TREE_INT_CST_LOW (TREE_VALUE (t)))
4959 *spareness = 1;
4960 count++;
4962 if (*spareness == 1)
4964 tree prev = TREE_VALUE (TYPE_VALUES (type));
4965 for (t = TYPE_VALUES (type); t = TREE_CHAIN (t), t != NULL_TREE; )
4967 if (! tree_int_cst_lt (prev, TREE_VALUE (t)))
4969 *spareness = 2;
4970 break;
4972 prev = TREE_VALUE (t);
4977 return count;
4981 #define BITARRAY_TEST(ARRAY, INDEX) \
4982 ((ARRAY)[(unsigned) (INDEX) / HOST_BITS_PER_CHAR]\
4983 & (1 << ((unsigned) (INDEX) % HOST_BITS_PER_CHAR)))
4984 #define BITARRAY_SET(ARRAY, INDEX) \
4985 ((ARRAY)[(unsigned) (INDEX) / HOST_BITS_PER_CHAR]\
4986 |= 1 << ((unsigned) (INDEX) % HOST_BITS_PER_CHAR))
4988 /* Set the elements of the bitstring CASES_SEEN (which has length COUNT),
4989 with the case values we have seen, assuming the case expression
4990 has the given TYPE.
4991 SPARSENESS is as determined by all_cases_count.
4993 The time needed is proportional to COUNT, unless
4994 SPARSENESS is 2, in which case quadratic time is needed. */
4996 void
4997 mark_seen_cases (type, cases_seen, count, sparseness)
4998 tree type;
4999 unsigned char *cases_seen;
5000 long count;
5001 int sparseness;
5003 long i;
5005 tree next_node_to_try = NULL_TREE;
5006 long next_node_offset = 0;
5008 register struct case_node *n, *root = case_stack->data.case_stmt.case_list;
5009 tree val = make_node (INTEGER_CST);
5010 TREE_TYPE (val) = type;
5011 if (! root)
5012 ; /* Do nothing */
5013 else if (sparseness == 2)
5015 tree t;
5016 HOST_WIDE_INT xlo;
5018 /* This less efficient loop is only needed to handle
5019 duplicate case values (multiple enum constants
5020 with the same value). */
5021 TREE_TYPE (val) = TREE_TYPE (root->low);
5022 for (t = TYPE_VALUES (type), xlo = 0; t != NULL_TREE;
5023 t = TREE_CHAIN (t), xlo++)
5025 TREE_INT_CST_LOW (val) = TREE_INT_CST_LOW (TREE_VALUE (t));
5026 TREE_INT_CST_HIGH (val) = TREE_INT_CST_HIGH (TREE_VALUE (t));
5027 n = root;
5030 /* Keep going past elements distinctly greater than VAL. */
5031 if (tree_int_cst_lt (val, n->low))
5032 n = n->left;
5034 /* or distinctly less than VAL. */
5035 else if (tree_int_cst_lt (n->high, val))
5036 n = n->right;
5038 else
5040 /* We have found a matching range. */
5041 BITARRAY_SET (cases_seen, xlo);
5042 break;
5045 while (n);
5048 else
5050 if (root->left)
5051 case_stack->data.case_stmt.case_list = root = case_tree2list (root, 0);
5052 for (n = root; n; n = n->right)
5054 TREE_INT_CST_LOW (val) = TREE_INT_CST_LOW (n->low);
5055 TREE_INT_CST_HIGH (val) = TREE_INT_CST_HIGH (n->low);
5056 while ( ! tree_int_cst_lt (n->high, val))
5058 /* Calculate (into xlo) the "offset" of the integer (val).
5059 The element with lowest value has offset 0, the next smallest
5060 element has offset 1, etc. */
5062 HOST_WIDE_INT xlo, xhi;
5063 tree t;
5064 if (sparseness && TYPE_VALUES (type) != NULL_TREE)
5066 /* The TYPE_VALUES will be in increasing order, so
5067 starting searching where we last ended. */
5068 t = next_node_to_try;
5069 xlo = next_node_offset;
5070 xhi = 0;
5071 for (;;)
5073 if (t == NULL_TREE)
5075 t = TYPE_VALUES (type);
5076 xlo = 0;
5078 if (tree_int_cst_equal (val, TREE_VALUE (t)))
5080 next_node_to_try = TREE_CHAIN (t);
5081 next_node_offset = xlo + 1;
5082 break;
5084 xlo++;
5085 t = TREE_CHAIN (t);
5086 if (t == next_node_to_try)
5088 xlo = -1;
5089 break;
5093 else
5095 t = TYPE_MIN_VALUE (type);
5096 if (t)
5097 neg_double (TREE_INT_CST_LOW (t), TREE_INT_CST_HIGH (t),
5098 &xlo, &xhi);
5099 else
5100 xlo = xhi = 0;
5101 add_double (xlo, xhi,
5102 TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
5103 &xlo, &xhi);
5106 if (xhi == 0 && xlo >= 0 && xlo < count)
5107 BITARRAY_SET (cases_seen, xlo);
5108 add_double (TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
5109 1, 0,
5110 &TREE_INT_CST_LOW (val), &TREE_INT_CST_HIGH (val));
5116 /* Called when the index of a switch statement is an enumerated type
5117 and there is no default label.
5119 Checks that all enumeration literals are covered by the case
5120 expressions of a switch. Also, warn if there are any extra
5121 switch cases that are *not* elements of the enumerated type.
5123 If all enumeration literals were covered by the case expressions,
5124 turn one of the expressions into the default expression since it should
5125 not be possible to fall through such a switch. */
5127 void
5128 check_for_full_enumeration_handling (type)
5129 tree type;
5131 register struct case_node *n;
5132 register struct case_node **l;
5133 register tree chain;
5134 int all_values = 1;
5136 /* True iff the selector type is a numbered set mode. */
5137 int sparseness = 0;
5139 /* The number of possible selector values. */
5140 HOST_WIDE_INT size;
5142 /* For each possible selector value. a one iff it has been matched
5143 by a case value alternative. */
5144 unsigned char *cases_seen;
5146 /* The allocated size of cases_seen, in chars. */
5147 long bytes_needed;
5148 tree t;
5150 if (output_bytecode)
5152 bc_check_for_full_enumeration_handling (type);
5153 return;
5156 if (! warn_switch)
5157 return;
5159 size = all_cases_count (type, &sparseness);
5160 bytes_needed = (size + HOST_BITS_PER_CHAR) / HOST_BITS_PER_CHAR;
5162 if (size > 0 && size < 600000
5163 /* We deliberately use malloc here - not xmalloc. */
5164 && (cases_seen = (unsigned char *) malloc (bytes_needed)) != NULL)
5166 long i;
5167 tree v = TYPE_VALUES (type);
5168 bzero (cases_seen, bytes_needed);
5170 /* The time complexity of this code is normally O(N), where
5171 N being the number of members in the enumerated type.
5172 However, if type is a ENUMERAL_TYPE whose values do not
5173 increase monotonically, O(N*log(N)) time may be needed. */
5175 mark_seen_cases (type, cases_seen, size, sparseness);
5177 for (i = 0; v != NULL_TREE && i < size; i++, v = TREE_CHAIN (v))
5179 if (BITARRAY_TEST(cases_seen, i) == 0)
5180 warning ("enumeration value `%s' not handled in switch",
5181 IDENTIFIER_POINTER (TREE_PURPOSE (v)));
5184 free (cases_seen);
5187 /* Now we go the other way around; we warn if there are case
5188 expressions that don't correspond to enumerators. This can
5189 occur since C and C++ don't enforce type-checking of
5190 assignments to enumeration variables. */
5192 if (case_stack->data.case_stmt.case_list
5193 && case_stack->data.case_stmt.case_list->left)
5194 case_stack->data.case_stmt.case_list
5195 = case_tree2list (case_stack->data.case_stmt.case_list, 0);
5196 if (warn_switch)
5197 for (n = case_stack->data.case_stmt.case_list; n; n = n->right)
5199 for (chain = TYPE_VALUES (type);
5200 chain && !tree_int_cst_equal (n->low, TREE_VALUE (chain));
5201 chain = TREE_CHAIN (chain))
5204 if (!chain)
5206 if (TYPE_NAME (type) == 0)
5207 warning ("case value `%d' not in enumerated type",
5208 TREE_INT_CST_LOW (n->low));
5209 else
5210 warning ("case value `%d' not in enumerated type `%s'",
5211 TREE_INT_CST_LOW (n->low),
5212 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
5213 == IDENTIFIER_NODE)
5214 ? TYPE_NAME (type)
5215 : DECL_NAME (TYPE_NAME (type))));
5217 if (!tree_int_cst_equal (n->low, n->high))
5219 for (chain = TYPE_VALUES (type);
5220 chain && !tree_int_cst_equal (n->high, TREE_VALUE (chain));
5221 chain = TREE_CHAIN (chain))
5224 if (!chain)
5226 if (TYPE_NAME (type) == 0)
5227 warning ("case value `%d' not in enumerated type",
5228 TREE_INT_CST_LOW (n->high));
5229 else
5230 warning ("case value `%d' not in enumerated type `%s'",
5231 TREE_INT_CST_LOW (n->high),
5232 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
5233 == IDENTIFIER_NODE)
5234 ? TYPE_NAME (type)
5235 : DECL_NAME (TYPE_NAME (type))));
5240 #if 0
5241 /* ??? This optimization is disabled because it causes valid programs to
5242 fail. ANSI C does not guarantee that an expression with enum type
5243 will have a value that is the same as one of the enumeration literals. */
5245 /* If all values were found as case labels, make one of them the default
5246 label. Thus, this switch will never fall through. We arbitrarily pick
5247 the last one to make the default since this is likely the most
5248 efficient choice. */
5250 if (all_values)
5252 for (l = &case_stack->data.case_stmt.case_list;
5253 (*l)->right != 0;
5254 l = &(*l)->right)
5257 case_stack->data.case_stmt.default_label = (*l)->code_label;
5258 *l = 0;
5260 #endif /* 0 */
5264 /* Check that all enumeration literals are covered by the case
5265 expressions of a switch. Also warn if there are any cases
5266 that are not elements of the enumerated type. */
5268 static void
5269 bc_check_for_full_enumeration_handling (type)
5270 tree type;
5272 struct nesting *thiscase = case_stack;
5273 struct case_node *c;
5274 tree e;
5276 /* Check for enums not handled. */
5277 for (e = TYPE_VALUES (type); e; e = TREE_CHAIN (e))
5279 for (c = thiscase->data.case_stmt.case_list->left;
5280 c && tree_int_cst_lt (c->high, TREE_VALUE (e));
5281 c = c->left)
5283 if (! (c && tree_int_cst_equal (c->low, TREE_VALUE (e))))
5284 warning ("enumerated value `%s' not handled in switch",
5285 IDENTIFIER_POINTER (TREE_PURPOSE (e)));
5288 /* Check for cases not in the enumeration. */
5289 for (c = thiscase->data.case_stmt.case_list->left; c; c = c->left)
5291 for (e = TYPE_VALUES (type);
5292 e && !tree_int_cst_equal (c->low, TREE_VALUE (e));
5293 e = TREE_CHAIN (e))
5295 if (! e)
5296 warning ("case value `%d' not in enumerated type `%s'",
5297 TREE_INT_CST_LOW (c->low),
5298 IDENTIFIER_POINTER (TREE_CODE (TYPE_NAME (type)) == IDENTIFIER_NODE
5299 ? TYPE_NAME (type)
5300 : DECL_NAME (TYPE_NAME (type))));
5304 /* Terminate a case (Pascal) or switch (C) statement
5305 in which ORIG_INDEX is the expression to be tested.
5306 Generate the code to test it and jump to the right place. */
5308 void
5309 expand_end_case (orig_index)
5310 tree orig_index;
5312 tree minval, maxval, range, orig_minval;
5313 rtx default_label = 0;
5314 register struct case_node *n;
5315 int count;
5316 rtx index;
5317 rtx table_label;
5318 int ncases;
5319 rtx *labelvec;
5320 register int i;
5321 rtx before_case;
5322 register struct nesting *thiscase = case_stack;
5323 tree index_expr, index_type;
5324 int unsignedp;
5326 if (output_bytecode)
5328 bc_expand_end_case (orig_index);
5329 return;
5332 table_label = gen_label_rtx ();
5333 index_expr = thiscase->data.case_stmt.index_expr;
5334 index_type = TREE_TYPE (index_expr);
5335 unsignedp = TREE_UNSIGNED (index_type);
5337 do_pending_stack_adjust ();
5339 /* An ERROR_MARK occurs for various reasons including invalid data type. */
5340 if (index_type != error_mark_node)
5342 /* If switch expression was an enumerated type, check that all
5343 enumeration literals are covered by the cases.
5344 No sense trying this if there's a default case, however. */
5346 if (!thiscase->data.case_stmt.default_label
5347 && TREE_CODE (TREE_TYPE (orig_index)) == ENUMERAL_TYPE
5348 && TREE_CODE (index_expr) != INTEGER_CST)
5349 check_for_full_enumeration_handling (TREE_TYPE (orig_index));
5351 /* If this is the first label, warn if any insns have been emitted. */
5352 if (thiscase->data.case_stmt.seenlabel == 0)
5354 rtx insn;
5355 for (insn = get_last_insn ();
5356 insn != case_stack->data.case_stmt.start;
5357 insn = PREV_INSN (insn))
5358 if (GET_CODE (insn) != NOTE
5359 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn))!= USE))
5361 warning ("unreachable code at beginning of %s",
5362 case_stack->data.case_stmt.printname);
5363 break;
5367 /* If we don't have a default-label, create one here,
5368 after the body of the switch. */
5369 if (thiscase->data.case_stmt.default_label == 0)
5371 thiscase->data.case_stmt.default_label
5372 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
5373 expand_label (thiscase->data.case_stmt.default_label);
5375 default_label = label_rtx (thiscase->data.case_stmt.default_label);
5377 before_case = get_last_insn ();
5379 if (thiscase->data.case_stmt.case_list
5380 && thiscase->data.case_stmt.case_list->left)
5381 thiscase->data.case_stmt.case_list
5382 = case_tree2list(thiscase->data.case_stmt.case_list, 0);
5384 /* Simplify the case-list before we count it. */
5385 group_case_nodes (thiscase->data.case_stmt.case_list);
5387 /* Get upper and lower bounds of case values.
5388 Also convert all the case values to the index expr's data type. */
5390 count = 0;
5391 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5393 /* Check low and high label values are integers. */
5394 if (TREE_CODE (n->low) != INTEGER_CST)
5395 abort ();
5396 if (TREE_CODE (n->high) != INTEGER_CST)
5397 abort ();
5399 n->low = convert (index_type, n->low);
5400 n->high = convert (index_type, n->high);
5402 /* Count the elements and track the largest and smallest
5403 of them (treating them as signed even if they are not). */
5404 if (count++ == 0)
5406 minval = n->low;
5407 maxval = n->high;
5409 else
5411 if (INT_CST_LT (n->low, minval))
5412 minval = n->low;
5413 if (INT_CST_LT (maxval, n->high))
5414 maxval = n->high;
5416 /* A range counts double, since it requires two compares. */
5417 if (! tree_int_cst_equal (n->low, n->high))
5418 count++;
5421 orig_minval = minval;
5423 /* Compute span of values. */
5424 if (count != 0)
5425 range = fold (build (MINUS_EXPR, index_type, maxval, minval));
5427 end_cleanup_deferal ();
5429 if (count == 0)
5431 expand_expr (index_expr, const0_rtx, VOIDmode, 0);
5432 emit_queue ();
5433 emit_jump (default_label);
5436 /* If range of values is much bigger than number of values,
5437 make a sequence of conditional branches instead of a dispatch.
5438 If the switch-index is a constant, do it this way
5439 because we can optimize it. */
5441 #ifndef CASE_VALUES_THRESHOLD
5442 #ifdef HAVE_casesi
5443 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
5444 #else
5445 /* If machine does not have a case insn that compares the
5446 bounds, this means extra overhead for dispatch tables
5447 which raises the threshold for using them. */
5448 #define CASE_VALUES_THRESHOLD 5
5449 #endif /* HAVE_casesi */
5450 #endif /* CASE_VALUES_THRESHOLD */
5452 else if (TREE_INT_CST_HIGH (range) != 0
5453 || count < CASE_VALUES_THRESHOLD
5454 || ((unsigned HOST_WIDE_INT) (TREE_INT_CST_LOW (range))
5455 > 10 * count)
5456 #ifndef ASM_OUTPUT_ADDR_DIFF_ELT
5457 || flag_pic
5458 #endif
5459 || TREE_CODE (index_expr) == INTEGER_CST
5460 /* These will reduce to a constant. */
5461 || (TREE_CODE (index_expr) == CALL_EXPR
5462 && TREE_CODE (TREE_OPERAND (index_expr, 0)) == ADDR_EXPR
5463 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == FUNCTION_DECL
5464 && DECL_FUNCTION_CODE (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == BUILT_IN_CLASSIFY_TYPE)
5465 || (TREE_CODE (index_expr) == COMPOUND_EXPR
5466 && TREE_CODE (TREE_OPERAND (index_expr, 1)) == INTEGER_CST))
5468 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5470 /* If the index is a short or char that we do not have
5471 an insn to handle comparisons directly, convert it to
5472 a full integer now, rather than letting each comparison
5473 generate the conversion. */
5475 if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT
5476 && (cmp_optab->handlers[(int) GET_MODE(index)].insn_code
5477 == CODE_FOR_nothing))
5479 enum machine_mode wider_mode;
5480 for (wider_mode = GET_MODE (index); wider_mode != VOIDmode;
5481 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
5482 if (cmp_optab->handlers[(int) wider_mode].insn_code
5483 != CODE_FOR_nothing)
5485 index = convert_to_mode (wider_mode, index, unsignedp);
5486 break;
5490 emit_queue ();
5491 do_pending_stack_adjust ();
5493 index = protect_from_queue (index, 0);
5494 if (GET_CODE (index) == MEM)
5495 index = copy_to_reg (index);
5496 if (GET_CODE (index) == CONST_INT
5497 || TREE_CODE (index_expr) == INTEGER_CST)
5499 /* Make a tree node with the proper constant value
5500 if we don't already have one. */
5501 if (TREE_CODE (index_expr) != INTEGER_CST)
5503 index_expr
5504 = build_int_2 (INTVAL (index),
5505 unsignedp || INTVAL (index) >= 0 ? 0 : -1);
5506 index_expr = convert (index_type, index_expr);
5509 /* For constant index expressions we need only
5510 issue a unconditional branch to the appropriate
5511 target code. The job of removing any unreachable
5512 code is left to the optimisation phase if the
5513 "-O" option is specified. */
5514 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5515 if (! tree_int_cst_lt (index_expr, n->low)
5516 && ! tree_int_cst_lt (n->high, index_expr))
5517 break;
5519 if (n)
5520 emit_jump (label_rtx (n->code_label));
5521 else
5522 emit_jump (default_label);
5524 else
5526 /* If the index expression is not constant we generate
5527 a binary decision tree to select the appropriate
5528 target code. This is done as follows:
5530 The list of cases is rearranged into a binary tree,
5531 nearly optimal assuming equal probability for each case.
5533 The tree is transformed into RTL, eliminating
5534 redundant test conditions at the same time.
5536 If program flow could reach the end of the
5537 decision tree an unconditional jump to the
5538 default code is emitted. */
5540 use_cost_table
5541 = (TREE_CODE (TREE_TYPE (orig_index)) != ENUMERAL_TYPE
5542 && estimate_case_costs (thiscase->data.case_stmt.case_list));
5543 balance_case_nodes (&thiscase->data.case_stmt.case_list,
5544 NULL_PTR);
5545 emit_case_nodes (index, thiscase->data.case_stmt.case_list,
5546 default_label, index_type);
5547 emit_jump_if_reachable (default_label);
5550 else
5552 int win = 0;
5553 #ifdef HAVE_casesi
5554 if (HAVE_casesi)
5556 enum machine_mode index_mode = SImode;
5557 int index_bits = GET_MODE_BITSIZE (index_mode);
5558 rtx op1, op2;
5559 enum machine_mode op_mode;
5561 /* Convert the index to SImode. */
5562 if (GET_MODE_BITSIZE (TYPE_MODE (index_type))
5563 > GET_MODE_BITSIZE (index_mode))
5565 enum machine_mode omode = TYPE_MODE (index_type);
5566 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
5568 /* We must handle the endpoints in the original mode. */
5569 index_expr = build (MINUS_EXPR, index_type,
5570 index_expr, minval);
5571 minval = integer_zero_node;
5572 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5573 emit_cmp_insn (rangertx, index, LTU, NULL_RTX, omode, 1, 0);
5574 emit_jump_insn (gen_bltu (default_label));
5575 /* Now we can safely truncate. */
5576 index = convert_to_mode (index_mode, index, 0);
5578 else
5580 if (TYPE_MODE (index_type) != index_mode)
5582 index_expr = convert (type_for_size (index_bits, 0),
5583 index_expr);
5584 index_type = TREE_TYPE (index_expr);
5587 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5589 emit_queue ();
5590 index = protect_from_queue (index, 0);
5591 do_pending_stack_adjust ();
5593 op_mode = insn_operand_mode[(int)CODE_FOR_casesi][0];
5594 if (! (*insn_operand_predicate[(int)CODE_FOR_casesi][0])
5595 (index, op_mode))
5596 index = copy_to_mode_reg (op_mode, index);
5598 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
5600 op_mode = insn_operand_mode[(int)CODE_FOR_casesi][1];
5601 if (! (*insn_operand_predicate[(int)CODE_FOR_casesi][1])
5602 (op1, op_mode))
5603 op1 = copy_to_mode_reg (op_mode, op1);
5605 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
5607 op_mode = insn_operand_mode[(int)CODE_FOR_casesi][2];
5608 if (! (*insn_operand_predicate[(int)CODE_FOR_casesi][2])
5609 (op2, op_mode))
5610 op2 = copy_to_mode_reg (op_mode, op2);
5612 emit_jump_insn (gen_casesi (index, op1, op2,
5613 table_label, default_label));
5614 win = 1;
5616 #endif
5617 #ifdef HAVE_tablejump
5618 if (! win && HAVE_tablejump)
5620 index_expr = convert (thiscase->data.case_stmt.nominal_type,
5621 fold (build (MINUS_EXPR, index_type,
5622 index_expr, minval)));
5623 index_type = TREE_TYPE (index_expr);
5624 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5625 emit_queue ();
5626 index = protect_from_queue (index, 0);
5627 do_pending_stack_adjust ();
5629 do_tablejump (index, TYPE_MODE (index_type),
5630 expand_expr (range, NULL_RTX, VOIDmode, 0),
5631 table_label, default_label);
5632 win = 1;
5634 #endif
5635 if (! win)
5636 abort ();
5638 /* Get table of labels to jump to, in order of case index. */
5640 ncases = TREE_INT_CST_LOW (range) + 1;
5641 labelvec = (rtx *) alloca (ncases * sizeof (rtx));
5642 bzero ((char *) labelvec, ncases * sizeof (rtx));
5644 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5646 register HOST_WIDE_INT i
5647 = TREE_INT_CST_LOW (n->low) - TREE_INT_CST_LOW (orig_minval);
5649 while (1)
5651 labelvec[i]
5652 = gen_rtx (LABEL_REF, Pmode, label_rtx (n->code_label));
5653 if (i + TREE_INT_CST_LOW (orig_minval)
5654 == TREE_INT_CST_LOW (n->high))
5655 break;
5656 i++;
5660 /* Fill in the gaps with the default. */
5661 for (i = 0; i < ncases; i++)
5662 if (labelvec[i] == 0)
5663 labelvec[i] = gen_rtx (LABEL_REF, Pmode, default_label);
5665 /* Output the table */
5666 emit_label (table_label);
5668 /* This would be a lot nicer if CASE_VECTOR_PC_RELATIVE
5669 were an expression, instead of an #ifdef/#ifndef. */
5670 if (
5671 #ifdef CASE_VECTOR_PC_RELATIVE
5672 1 ||
5673 #endif
5674 flag_pic)
5675 emit_jump_insn (gen_rtx (ADDR_DIFF_VEC, CASE_VECTOR_MODE,
5676 gen_rtx (LABEL_REF, Pmode, table_label),
5677 gen_rtvec_v (ncases, labelvec)));
5678 else
5679 emit_jump_insn (gen_rtx (ADDR_VEC, CASE_VECTOR_MODE,
5680 gen_rtvec_v (ncases, labelvec)));
5682 /* If the case insn drops through the table,
5683 after the table we must jump to the default-label.
5684 Otherwise record no drop-through after the table. */
5685 #ifdef CASE_DROPS_THROUGH
5686 emit_jump (default_label);
5687 #else
5688 emit_barrier ();
5689 #endif
5692 before_case = squeeze_notes (NEXT_INSN (before_case), get_last_insn ());
5693 reorder_insns (before_case, get_last_insn (),
5694 thiscase->data.case_stmt.start);
5696 else
5697 end_cleanup_deferal ();
5699 if (thiscase->exit_label)
5700 emit_label (thiscase->exit_label);
5702 POPSTACK (case_stack);
5704 free_temp_slots ();
5707 /* Convert the tree NODE into a list linked by the right field, with the left
5708 field zeroed. RIGHT is used for recursion; it is a list to be placed
5709 rightmost in the resulting list. */
5711 static struct case_node *
5712 case_tree2list (node, right)
5713 struct case_node *node, *right;
5715 struct case_node *left;
5717 if (node->right)
5718 right = case_tree2list (node->right, right);
5720 node->right = right;
5721 if (left = node->left)
5723 node->left = 0;
5724 return case_tree2list (left, node);
5727 return node;
5730 /* Terminate a case statement. EXPR is the original index
5731 expression. */
5733 static void
5734 bc_expand_end_case (expr)
5735 tree expr;
5737 struct nesting *thiscase = case_stack;
5738 enum bytecode_opcode opcode;
5739 struct bc_label *jump_label;
5740 struct case_node *c;
5742 bc_emit_bytecode (jump);
5743 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (thiscase->exit_label));
5745 #ifdef DEBUG_PRINT_CODE
5746 fputc ('\n', stderr);
5747 #endif
5749 /* Now that the size of the jump table is known, emit the actual
5750 indexed jump instruction. */
5751 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thiscase->data.case_stmt.skip_label));
5753 opcode = TYPE_MODE (thiscase->data.case_stmt.nominal_type) == SImode
5754 ? TREE_UNSIGNED (thiscase->data.case_stmt.nominal_type) ? caseSU : caseSI
5755 : TREE_UNSIGNED (thiscase->data.case_stmt.nominal_type) ? caseDU : caseDI;
5757 bc_emit_bytecode (opcode);
5759 /* Now emit the case instructions literal arguments, in order.
5760 In addition to the value on the stack, it uses:
5761 1. The address of the jump table.
5762 2. The size of the jump table.
5763 3. The default label. */
5765 jump_label = bc_get_bytecode_label ();
5766 bc_emit_bytecode_labelref (jump_label);
5767 bc_emit_bytecode_const ((char *) &thiscase->data.case_stmt.num_ranges,
5768 sizeof thiscase->data.case_stmt.num_ranges);
5770 if (thiscase->data.case_stmt.default_label)
5771 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (DECL_RTL (thiscase->data.case_stmt.default_label)));
5772 else
5773 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (thiscase->exit_label));
5775 /* Output the jump table. */
5777 bc_align_bytecode (3 /* PTR_ALIGN */);
5778 bc_emit_bytecode_labeldef (jump_label);
5780 if (TYPE_MODE (thiscase->data.case_stmt.nominal_type) == SImode)
5781 for (c = thiscase->data.case_stmt.case_list->left; c; c = c->left)
5783 opcode = TREE_INT_CST_LOW (c->low);
5784 bc_emit_bytecode_const ((char *) &opcode, sizeof opcode);
5786 opcode = TREE_INT_CST_LOW (c->high);
5787 bc_emit_bytecode_const ((char *) &opcode, sizeof opcode);
5789 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (DECL_RTL (c->code_label)));
5791 else
5792 if (TYPE_MODE (thiscase->data.case_stmt.nominal_type) == DImode)
5793 for (c = thiscase->data.case_stmt.case_list->left; c; c = c->left)
5795 bc_emit_bytecode_DI_const (c->low);
5796 bc_emit_bytecode_DI_const (c->high);
5798 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (DECL_RTL (c->code_label)));
5800 else
5801 /* Bad mode */
5802 abort ();
5805 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thiscase->exit_label));
5807 /* Possibly issue enumeration warnings. */
5809 if (!thiscase->data.case_stmt.default_label
5810 && TREE_CODE (TREE_TYPE (expr)) == ENUMERAL_TYPE
5811 && TREE_CODE (expr) != INTEGER_CST
5812 && warn_switch)
5813 check_for_full_enumeration_handling (TREE_TYPE (expr));
5816 #ifdef DEBUG_PRINT_CODE
5817 fputc ('\n', stderr);
5818 #endif
5820 POPSTACK (case_stack);
5824 /* Return unique bytecode ID. */
5826 int
5827 bc_new_uid ()
5829 static int bc_uid = 0;
5831 return (++bc_uid);
5834 /* Generate code to jump to LABEL if OP1 and OP2 are equal. */
5836 static void
5837 do_jump_if_equal (op1, op2, label, unsignedp)
5838 rtx op1, op2, label;
5839 int unsignedp;
5841 if (GET_CODE (op1) == CONST_INT
5842 && GET_CODE (op2) == CONST_INT)
5844 if (INTVAL (op1) == INTVAL (op2))
5845 emit_jump (label);
5847 else
5849 enum machine_mode mode = GET_MODE (op1);
5850 if (mode == VOIDmode)
5851 mode = GET_MODE (op2);
5852 emit_cmp_insn (op1, op2, EQ, NULL_RTX, mode, unsignedp, 0);
5853 emit_jump_insn (gen_beq (label));
5857 /* Not all case values are encountered equally. This function
5858 uses a heuristic to weight case labels, in cases where that
5859 looks like a reasonable thing to do.
5861 Right now, all we try to guess is text, and we establish the
5862 following weights:
5864 chars above space: 16
5865 digits: 16
5866 default: 12
5867 space, punct: 8
5868 tab: 4
5869 newline: 2
5870 other "\" chars: 1
5871 remaining chars: 0
5873 If we find any cases in the switch that are not either -1 or in the range
5874 of valid ASCII characters, or are control characters other than those
5875 commonly used with "\", don't treat this switch scanning text.
5877 Return 1 if these nodes are suitable for cost estimation, otherwise
5878 return 0. */
5880 static int
5881 estimate_case_costs (node)
5882 case_node_ptr node;
5884 tree min_ascii = build_int_2 (-1, -1);
5885 tree max_ascii = convert (TREE_TYPE (node->high), build_int_2 (127, 0));
5886 case_node_ptr n;
5887 int i;
5889 /* If we haven't already made the cost table, make it now. Note that the
5890 lower bound of the table is -1, not zero. */
5892 if (cost_table == NULL)
5894 cost_table = ((short *) xmalloc (129 * sizeof (short))) + 1;
5895 bzero ((char *) (cost_table - 1), 129 * sizeof (short));
5897 for (i = 0; i < 128; i++)
5899 if (isalnum (i))
5900 cost_table[i] = 16;
5901 else if (ispunct (i))
5902 cost_table[i] = 8;
5903 else if (iscntrl (i))
5904 cost_table[i] = -1;
5907 cost_table[' '] = 8;
5908 cost_table['\t'] = 4;
5909 cost_table['\0'] = 4;
5910 cost_table['\n'] = 2;
5911 cost_table['\f'] = 1;
5912 cost_table['\v'] = 1;
5913 cost_table['\b'] = 1;
5916 /* See if all the case expressions look like text. It is text if the
5917 constant is >= -1 and the highest constant is <= 127. Do all comparisons
5918 as signed arithmetic since we don't want to ever access cost_table with a
5919 value less than -1. Also check that none of the constants in a range
5920 are strange control characters. */
5922 for (n = node; n; n = n->right)
5924 if ((INT_CST_LT (n->low, min_ascii)) || INT_CST_LT (max_ascii, n->high))
5925 return 0;
5927 for (i = TREE_INT_CST_LOW (n->low); i <= TREE_INT_CST_LOW (n->high); i++)
5928 if (cost_table[i] < 0)
5929 return 0;
5932 /* All interesting values are within the range of interesting
5933 ASCII characters. */
5934 return 1;
5937 /* Scan an ordered list of case nodes
5938 combining those with consecutive values or ranges.
5940 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
5942 static void
5943 group_case_nodes (head)
5944 case_node_ptr head;
5946 case_node_ptr node = head;
5948 while (node)
5950 rtx lb = next_real_insn (label_rtx (node->code_label));
5951 rtx lb2;
5952 case_node_ptr np = node;
5954 /* Try to group the successors of NODE with NODE. */
5955 while (((np = np->right) != 0)
5956 /* Do they jump to the same place? */
5957 && ((lb2 = next_real_insn (label_rtx (np->code_label))) == lb
5958 || (lb != 0 && lb2 != 0
5959 && simplejump_p (lb)
5960 && simplejump_p (lb2)
5961 && rtx_equal_p (SET_SRC (PATTERN (lb)),
5962 SET_SRC (PATTERN (lb2)))))
5963 /* Are their ranges consecutive? */
5964 && tree_int_cst_equal (np->low,
5965 fold (build (PLUS_EXPR,
5966 TREE_TYPE (node->high),
5967 node->high,
5968 integer_one_node)))
5969 /* An overflow is not consecutive. */
5970 && tree_int_cst_lt (node->high,
5971 fold (build (PLUS_EXPR,
5972 TREE_TYPE (node->high),
5973 node->high,
5974 integer_one_node))))
5976 node->high = np->high;
5978 /* NP is the first node after NODE which can't be grouped with it.
5979 Delete the nodes in between, and move on to that node. */
5980 node->right = np;
5981 node = np;
5985 /* Take an ordered list of case nodes
5986 and transform them into a near optimal binary tree,
5987 on the assumption that any target code selection value is as
5988 likely as any other.
5990 The transformation is performed by splitting the ordered
5991 list into two equal sections plus a pivot. The parts are
5992 then attached to the pivot as left and right branches. Each
5993 branch is is then transformed recursively. */
5995 static void
5996 balance_case_nodes (head, parent)
5997 case_node_ptr *head;
5998 case_node_ptr parent;
6000 register case_node_ptr np;
6002 np = *head;
6003 if (np)
6005 int cost = 0;
6006 int i = 0;
6007 int ranges = 0;
6008 register case_node_ptr *npp;
6009 case_node_ptr left;
6011 /* Count the number of entries on branch. Also count the ranges. */
6013 while (np)
6015 if (!tree_int_cst_equal (np->low, np->high))
6017 ranges++;
6018 if (use_cost_table)
6019 cost += cost_table[TREE_INT_CST_LOW (np->high)];
6022 if (use_cost_table)
6023 cost += cost_table[TREE_INT_CST_LOW (np->low)];
6025 i++;
6026 np = np->right;
6029 if (i > 2)
6031 /* Split this list if it is long enough for that to help. */
6032 npp = head;
6033 left = *npp;
6034 if (use_cost_table)
6036 /* Find the place in the list that bisects the list's total cost,
6037 Here I gets half the total cost. */
6038 int n_moved = 0;
6039 i = (cost + 1) / 2;
6040 while (1)
6042 /* Skip nodes while their cost does not reach that amount. */
6043 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
6044 i -= cost_table[TREE_INT_CST_LOW ((*npp)->high)];
6045 i -= cost_table[TREE_INT_CST_LOW ((*npp)->low)];
6046 if (i <= 0)
6047 break;
6048 npp = &(*npp)->right;
6049 n_moved += 1;
6051 if (n_moved == 0)
6053 /* Leave this branch lopsided, but optimize left-hand
6054 side and fill in `parent' fields for right-hand side. */
6055 np = *head;
6056 np->parent = parent;
6057 balance_case_nodes (&np->left, np);
6058 for (; np->right; np = np->right)
6059 np->right->parent = np;
6060 return;
6063 /* If there are just three nodes, split at the middle one. */
6064 else if (i == 3)
6065 npp = &(*npp)->right;
6066 else
6068 /* Find the place in the list that bisects the list's total cost,
6069 where ranges count as 2.
6070 Here I gets half the total cost. */
6071 i = (i + ranges + 1) / 2;
6072 while (1)
6074 /* Skip nodes while their cost does not reach that amount. */
6075 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
6076 i--;
6077 i--;
6078 if (i <= 0)
6079 break;
6080 npp = &(*npp)->right;
6083 *head = np = *npp;
6084 *npp = 0;
6085 np->parent = parent;
6086 np->left = left;
6088 /* Optimize each of the two split parts. */
6089 balance_case_nodes (&np->left, np);
6090 balance_case_nodes (&np->right, np);
6092 else
6094 /* Else leave this branch as one level,
6095 but fill in `parent' fields. */
6096 np = *head;
6097 np->parent = parent;
6098 for (; np->right; np = np->right)
6099 np->right->parent = np;
6104 /* Search the parent sections of the case node tree
6105 to see if a test for the lower bound of NODE would be redundant.
6106 INDEX_TYPE is the type of the index expression.
6108 The instructions to generate the case decision tree are
6109 output in the same order as nodes are processed so it is
6110 known that if a parent node checks the range of the current
6111 node minus one that the current node is bounded at its lower
6112 span. Thus the test would be redundant. */
6114 static int
6115 node_has_low_bound (node, index_type)
6116 case_node_ptr node;
6117 tree index_type;
6119 tree low_minus_one;
6120 case_node_ptr pnode;
6122 /* If the lower bound of this node is the lowest value in the index type,
6123 we need not test it. */
6125 if (tree_int_cst_equal (node->low, TYPE_MIN_VALUE (index_type)))
6126 return 1;
6128 /* If this node has a left branch, the value at the left must be less
6129 than that at this node, so it cannot be bounded at the bottom and
6130 we need not bother testing any further. */
6132 if (node->left)
6133 return 0;
6135 low_minus_one = fold (build (MINUS_EXPR, TREE_TYPE (node->low),
6136 node->low, integer_one_node));
6138 /* If the subtraction above overflowed, we can't verify anything.
6139 Otherwise, look for a parent that tests our value - 1. */
6141 if (! tree_int_cst_lt (low_minus_one, node->low))
6142 return 0;
6144 for (pnode = node->parent; pnode; pnode = pnode->parent)
6145 if (tree_int_cst_equal (low_minus_one, pnode->high))
6146 return 1;
6148 return 0;
6151 /* Search the parent sections of the case node tree
6152 to see if a test for the upper bound of NODE would be redundant.
6153 INDEX_TYPE is the type of the index expression.
6155 The instructions to generate the case decision tree are
6156 output in the same order as nodes are processed so it is
6157 known that if a parent node checks the range of the current
6158 node plus one that the current node is bounded at its upper
6159 span. Thus the test would be redundant. */
6161 static int
6162 node_has_high_bound (node, index_type)
6163 case_node_ptr node;
6164 tree index_type;
6166 tree high_plus_one;
6167 case_node_ptr pnode;
6169 /* If the upper bound of this node is the highest value in the type
6170 of the index expression, we need not test against it. */
6172 if (tree_int_cst_equal (node->high, TYPE_MAX_VALUE (index_type)))
6173 return 1;
6175 /* If this node has a right branch, the value at the right must be greater
6176 than that at this node, so it cannot be bounded at the top and
6177 we need not bother testing any further. */
6179 if (node->right)
6180 return 0;
6182 high_plus_one = fold (build (PLUS_EXPR, TREE_TYPE (node->high),
6183 node->high, integer_one_node));
6185 /* If the addition above overflowed, we can't verify anything.
6186 Otherwise, look for a parent that tests our value + 1. */
6188 if (! tree_int_cst_lt (node->high, high_plus_one))
6189 return 0;
6191 for (pnode = node->parent; pnode; pnode = pnode->parent)
6192 if (tree_int_cst_equal (high_plus_one, pnode->low))
6193 return 1;
6195 return 0;
6198 /* Search the parent sections of the
6199 case node tree to see if both tests for the upper and lower
6200 bounds of NODE would be redundant. */
6202 static int
6203 node_is_bounded (node, index_type)
6204 case_node_ptr node;
6205 tree index_type;
6207 return (node_has_low_bound (node, index_type)
6208 && node_has_high_bound (node, index_type));
6211 /* Emit an unconditional jump to LABEL unless it would be dead code. */
6213 static void
6214 emit_jump_if_reachable (label)
6215 rtx label;
6217 if (GET_CODE (get_last_insn ()) != BARRIER)
6218 emit_jump (label);
6221 /* Emit step-by-step code to select a case for the value of INDEX.
6222 The thus generated decision tree follows the form of the
6223 case-node binary tree NODE, whose nodes represent test conditions.
6224 INDEX_TYPE is the type of the index of the switch.
6226 Care is taken to prune redundant tests from the decision tree
6227 by detecting any boundary conditions already checked by
6228 emitted rtx. (See node_has_high_bound, node_has_low_bound
6229 and node_is_bounded, above.)
6231 Where the test conditions can be shown to be redundant we emit
6232 an unconditional jump to the target code. As a further
6233 optimization, the subordinates of a tree node are examined to
6234 check for bounded nodes. In this case conditional and/or
6235 unconditional jumps as a result of the boundary check for the
6236 current node are arranged to target the subordinates associated
6237 code for out of bound conditions on the current node node.
6239 We can assume that when control reaches the code generated here,
6240 the index value has already been compared with the parents
6241 of this node, and determined to be on the same side of each parent
6242 as this node is. Thus, if this node tests for the value 51,
6243 and a parent tested for 52, we don't need to consider
6244 the possibility of a value greater than 51. If another parent
6245 tests for the value 50, then this node need not test anything. */
6247 static void
6248 emit_case_nodes (index, node, default_label, index_type)
6249 rtx index;
6250 case_node_ptr node;
6251 rtx default_label;
6252 tree index_type;
6254 /* If INDEX has an unsigned type, we must make unsigned branches. */
6255 int unsignedp = TREE_UNSIGNED (index_type);
6256 typedef rtx rtx_function ();
6257 rtx_function *gen_bgt_pat = unsignedp ? gen_bgtu : gen_bgt;
6258 rtx_function *gen_bge_pat = unsignedp ? gen_bgeu : gen_bge;
6259 rtx_function *gen_blt_pat = unsignedp ? gen_bltu : gen_blt;
6260 rtx_function *gen_ble_pat = unsignedp ? gen_bleu : gen_ble;
6261 enum machine_mode mode = GET_MODE (index);
6263 /* See if our parents have already tested everything for us.
6264 If they have, emit an unconditional jump for this node. */
6265 if (node_is_bounded (node, index_type))
6266 emit_jump (label_rtx (node->code_label));
6268 else if (tree_int_cst_equal (node->low, node->high))
6270 /* Node is single valued. First see if the index expression matches
6271 this node and then check our children, if any. */
6273 do_jump_if_equal (index, expand_expr (node->low, NULL_RTX, VOIDmode, 0),
6274 label_rtx (node->code_label), unsignedp);
6276 if (node->right != 0 && node->left != 0)
6278 /* This node has children on both sides.
6279 Dispatch to one side or the other
6280 by comparing the index value with this node's value.
6281 If one subtree is bounded, check that one first,
6282 so we can avoid real branches in the tree. */
6284 if (node_is_bounded (node->right, index_type))
6286 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
6287 VOIDmode, 0),
6288 GT, NULL_RTX, mode, unsignedp, 0);
6290 emit_jump_insn ((*gen_bgt_pat) (label_rtx (node->right->code_label)));
6291 emit_case_nodes (index, node->left, default_label, index_type);
6294 else if (node_is_bounded (node->left, index_type))
6296 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
6297 VOIDmode, 0),
6298 LT, NULL_RTX, mode, unsignedp, 0);
6299 emit_jump_insn ((*gen_blt_pat) (label_rtx (node->left->code_label)));
6300 emit_case_nodes (index, node->right, default_label, index_type);
6303 else
6305 /* Neither node is bounded. First distinguish the two sides;
6306 then emit the code for one side at a time. */
6308 tree test_label
6309 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
6311 /* See if the value is on the right. */
6312 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
6313 VOIDmode, 0),
6314 GT, NULL_RTX, mode, unsignedp, 0);
6315 emit_jump_insn ((*gen_bgt_pat) (label_rtx (test_label)));
6317 /* Value must be on the left.
6318 Handle the left-hand subtree. */
6319 emit_case_nodes (index, node->left, default_label, index_type);
6320 /* If left-hand subtree does nothing,
6321 go to default. */
6322 emit_jump_if_reachable (default_label);
6324 /* Code branches here for the right-hand subtree. */
6325 expand_label (test_label);
6326 emit_case_nodes (index, node->right, default_label, index_type);
6330 else if (node->right != 0 && node->left == 0)
6332 /* Here we have a right child but no left so we issue conditional
6333 branch to default and process the right child.
6335 Omit the conditional branch to default if we it avoid only one
6336 right child; it costs too much space to save so little time. */
6338 if (node->right->right || node->right->left
6339 || !tree_int_cst_equal (node->right->low, node->right->high))
6341 if (!node_has_low_bound (node, index_type))
6343 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
6344 VOIDmode, 0),
6345 LT, NULL_RTX, mode, unsignedp, 0);
6346 emit_jump_insn ((*gen_blt_pat) (default_label));
6349 emit_case_nodes (index, node->right, default_label, index_type);
6351 else
6352 /* We cannot process node->right normally
6353 since we haven't ruled out the numbers less than
6354 this node's value. So handle node->right explicitly. */
6355 do_jump_if_equal (index,
6356 expand_expr (node->right->low, NULL_RTX,
6357 VOIDmode, 0),
6358 label_rtx (node->right->code_label), unsignedp);
6361 else if (node->right == 0 && node->left != 0)
6363 /* Just one subtree, on the left. */
6365 #if 0 /* The following code and comment were formerly part
6366 of the condition here, but they didn't work
6367 and I don't understand what the idea was. -- rms. */
6368 /* If our "most probable entry" is less probable
6369 than the default label, emit a jump to
6370 the default label using condition codes
6371 already lying around. With no right branch,
6372 a branch-greater-than will get us to the default
6373 label correctly. */
6374 if (use_cost_table
6375 && cost_table[TREE_INT_CST_LOW (node->high)] < 12)
6377 #endif /* 0 */
6378 if (node->left->left || node->left->right
6379 || !tree_int_cst_equal (node->left->low, node->left->high))
6381 if (!node_has_high_bound (node, index_type))
6383 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
6384 VOIDmode, 0),
6385 GT, NULL_RTX, mode, unsignedp, 0);
6386 emit_jump_insn ((*gen_bgt_pat) (default_label));
6389 emit_case_nodes (index, node->left, default_label, index_type);
6391 else
6392 /* We cannot process node->left normally
6393 since we haven't ruled out the numbers less than
6394 this node's value. So handle node->left explicitly. */
6395 do_jump_if_equal (index,
6396 expand_expr (node->left->low, NULL_RTX,
6397 VOIDmode, 0),
6398 label_rtx (node->left->code_label), unsignedp);
6401 else
6403 /* Node is a range. These cases are very similar to those for a single
6404 value, except that we do not start by testing whether this node
6405 is the one to branch to. */
6407 if (node->right != 0 && node->left != 0)
6409 /* Node has subtrees on both sides.
6410 If the right-hand subtree is bounded,
6411 test for it first, since we can go straight there.
6412 Otherwise, we need to make a branch in the control structure,
6413 then handle the two subtrees. */
6414 tree test_label = 0;
6416 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
6417 VOIDmode, 0),
6418 GT, NULL_RTX, mode, unsignedp, 0);
6420 if (node_is_bounded (node->right, index_type))
6421 /* Right hand node is fully bounded so we can eliminate any
6422 testing and branch directly to the target code. */
6423 emit_jump_insn ((*gen_bgt_pat) (label_rtx (node->right->code_label)));
6424 else
6426 /* Right hand node requires testing.
6427 Branch to a label where we will handle it later. */
6429 test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
6430 emit_jump_insn ((*gen_bgt_pat) (label_rtx (test_label)));
6433 /* Value belongs to this node or to the left-hand subtree. */
6435 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX, VOIDmode, 0),
6436 GE, NULL_RTX, mode, unsignedp, 0);
6437 emit_jump_insn ((*gen_bge_pat) (label_rtx (node->code_label)));
6439 /* Handle the left-hand subtree. */
6440 emit_case_nodes (index, node->left, default_label, index_type);
6442 /* If right node had to be handled later, do that now. */
6444 if (test_label)
6446 /* If the left-hand subtree fell through,
6447 don't let it fall into the right-hand subtree. */
6448 emit_jump_if_reachable (default_label);
6450 expand_label (test_label);
6451 emit_case_nodes (index, node->right, default_label, index_type);
6455 else if (node->right != 0 && node->left == 0)
6457 /* Deal with values to the left of this node,
6458 if they are possible. */
6459 if (!node_has_low_bound (node, index_type))
6461 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX,
6462 VOIDmode, 0),
6463 LT, NULL_RTX, mode, unsignedp, 0);
6464 emit_jump_insn ((*gen_blt_pat) (default_label));
6467 /* Value belongs to this node or to the right-hand subtree. */
6469 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
6470 VOIDmode, 0),
6471 LE, NULL_RTX, mode, unsignedp, 0);
6472 emit_jump_insn ((*gen_ble_pat) (label_rtx (node->code_label)));
6474 emit_case_nodes (index, node->right, default_label, index_type);
6477 else if (node->right == 0 && node->left != 0)
6479 /* Deal with values to the right of this node,
6480 if they are possible. */
6481 if (!node_has_high_bound (node, index_type))
6483 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
6484 VOIDmode, 0),
6485 GT, NULL_RTX, mode, unsignedp, 0);
6486 emit_jump_insn ((*gen_bgt_pat) (default_label));
6489 /* Value belongs to this node or to the left-hand subtree. */
6491 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX, VOIDmode, 0),
6492 GE, NULL_RTX, mode, unsignedp, 0);
6493 emit_jump_insn ((*gen_bge_pat) (label_rtx (node->code_label)));
6495 emit_case_nodes (index, node->left, default_label, index_type);
6498 else
6500 /* Node has no children so we check low and high bounds to remove
6501 redundant tests. Only one of the bounds can exist,
6502 since otherwise this node is bounded--a case tested already. */
6504 if (!node_has_high_bound (node, index_type))
6506 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
6507 VOIDmode, 0),
6508 GT, NULL_RTX, mode, unsignedp, 0);
6509 emit_jump_insn ((*gen_bgt_pat) (default_label));
6512 if (!node_has_low_bound (node, index_type))
6514 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX,
6515 VOIDmode, 0),
6516 LT, NULL_RTX, mode, unsignedp, 0);
6517 emit_jump_insn ((*gen_blt_pat) (default_label));
6520 emit_jump (label_rtx (node->code_label));
6525 /* These routines are used by the loop unrolling code. They copy BLOCK trees
6526 so that the debugging info will be correct for the unrolled loop. */
6528 /* Indexed by block number, contains a pointer to the N'th block node. */
6530 static tree *block_vector;
6532 void
6533 find_loop_tree_blocks ()
6535 tree block = DECL_INITIAL (current_function_decl);
6537 block_vector = identify_blocks (block, get_insns ());
6540 void
6541 unroll_block_trees ()
6543 tree block = DECL_INITIAL (current_function_decl);
6545 reorder_blocks (block_vector, block, get_insns ());