import of gcc-2.8
[official-gcc.git] / gcc / stmt.c
blob30bd84c5c6b4fc4951589d33b06b634b133fa482
1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 88, 89, 92-6, 1997 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
22 /* This file handles the generation of rtl code from tree structure
23 above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
24 It also creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
27 The functions whose names start with `expand_' are called by the
28 parser to generate RTL instructions for various kinds of constructs.
30 Some control and binding constructs require calling several such
31 functions at different times. For example, a simple if-then
32 is expanded by calling `expand_start_cond' (with the condition-expression
33 as argument) before parsing the then-clause and calling `expand_end_cond'
34 after parsing the then-clause. */
36 #include "config.h"
38 #include <stdio.h>
39 #include <ctype.h>
41 #include "rtl.h"
42 #include "tree.h"
43 #include "flags.h"
44 #include "except.h"
45 #include "function.h"
46 #include "insn-flags.h"
47 #include "insn-config.h"
48 #include "insn-codes.h"
49 #include "expr.h"
50 #include "hard-reg-set.h"
51 #include "obstack.h"
52 #include "loop.h"
53 #include "recog.h"
54 #include "machmode.h"
56 #include "bytecode.h"
57 #include "bc-typecd.h"
58 #include "bc-opcode.h"
59 #include "bc-optab.h"
60 #include "bc-emit.h"
62 #define obstack_chunk_alloc xmalloc
63 #define obstack_chunk_free free
64 struct obstack stmt_obstack;
66 /* Filename and line number of last line-number note,
67 whether we actually emitted it or not. */
68 char *emit_filename;
69 int emit_lineno;
71 /* Nonzero if within a ({...}) grouping, in which case we must
72 always compute a value for each expr-stmt in case it is the last one. */
74 int expr_stmts_for_value;
76 /* Each time we expand an expression-statement,
77 record the expr's type and its RTL value here. */
79 static tree last_expr_type;
80 static rtx last_expr_value;
82 /* Each time we expand the end of a binding contour (in `expand_end_bindings')
83 and we emit a new NOTE_INSN_BLOCK_END note, we save a pointer to it here.
84 This is used by the `remember_end_note' function to record the endpoint
85 of each generated block in its associated BLOCK node. */
87 static rtx last_block_end_note;
89 /* Number of binding contours started so far in this function. */
91 int block_start_count;
93 /* Nonzero if function being compiled needs to
94 return the address of where it has put a structure value. */
96 extern int current_function_returns_pcc_struct;
98 /* Label that will go on parm cleanup code, if any.
99 Jumping to this label runs cleanup code for parameters, if
100 such code must be run. Following this code is the logical return label. */
102 extern rtx cleanup_label;
104 /* Label that will go on function epilogue.
105 Jumping to this label serves as a "return" instruction
106 on machines which require execution of the epilogue on all returns. */
108 extern rtx return_label;
110 /* Offset to end of allocated area of stack frame.
111 If stack grows down, this is the address of the last stack slot allocated.
112 If stack grows up, this is the address for the next slot. */
113 extern int frame_offset;
115 /* Label to jump back to for tail recursion, or 0 if we have
116 not yet needed one for this function. */
117 extern rtx tail_recursion_label;
119 /* Place after which to insert the tail_recursion_label if we need one. */
120 extern rtx tail_recursion_reentry;
122 /* Location at which to save the argument pointer if it will need to be
123 referenced. There are two cases where this is done: if nonlocal gotos
124 exist, or if vars whose is an offset from the argument pointer will be
125 needed by inner routines. */
127 extern rtx arg_pointer_save_area;
129 /* Chain of all RTL_EXPRs that have insns in them. */
130 extern tree rtl_expr_chain;
132 /* Stack allocation level in which temporaries for TARGET_EXPRs live. */
133 extern int target_temp_slot_level;
135 extern int temp_slot_level;
137 /* Functions and data structures for expanding case statements. */
139 /* Case label structure, used to hold info on labels within case
140 statements. We handle "range" labels; for a single-value label
141 as in C, the high and low limits are the same.
143 An AVL tree of case nodes is initially created, and later transformed
144 to a list linked via the RIGHT fields in the nodes. Nodes with
145 higher case values are later in the list.
147 Switch statements can be output in one of two forms. A branch table
148 is used if there are more than a few labels and the labels are dense
149 within the range between the smallest and largest case value. If a
150 branch table is used, no further manipulations are done with the case
151 node chain.
153 The alternative to the use of a branch table is to generate a series
154 of compare and jump insns. When that is done, we use the LEFT, RIGHT,
155 and PARENT fields to hold a binary tree. Initially the tree is
156 totally unbalanced, with everything on the right. We balance the tree
157 with nodes on the left having lower case values than the parent
158 and nodes on the right having higher values. We then output the tree
159 in order. */
161 struct case_node
163 struct case_node *left; /* Left son in binary tree */
164 struct case_node *right; /* Right son in binary tree; also node chain */
165 struct case_node *parent; /* Parent of node in binary tree */
166 tree low; /* Lowest index value for this label */
167 tree high; /* Highest index value for this label */
168 tree code_label; /* Label to jump to when node matches */
169 int balance;
172 typedef struct case_node case_node;
173 typedef struct case_node *case_node_ptr;
175 /* These are used by estimate_case_costs and balance_case_nodes. */
177 /* This must be a signed type, and non-ANSI compilers lack signed char. */
178 static short *cost_table;
179 static int use_cost_table;
181 /* Stack of control and binding constructs we are currently inside.
183 These constructs begin when you call `expand_start_WHATEVER'
184 and end when you call `expand_end_WHATEVER'. This stack records
185 info about how the construct began that tells the end-function
186 what to do. It also may provide information about the construct
187 to alter the behavior of other constructs within the body.
188 For example, they may affect the behavior of C `break' and `continue'.
190 Each construct gets one `struct nesting' object.
191 All of these objects are chained through the `all' field.
192 `nesting_stack' points to the first object (innermost construct).
193 The position of an entry on `nesting_stack' is in its `depth' field.
195 Each type of construct has its own individual stack.
196 For example, loops have `loop_stack'. Each object points to the
197 next object of the same type through the `next' field.
199 Some constructs are visible to `break' exit-statements and others
200 are not. Which constructs are visible depends on the language.
201 Therefore, the data structure allows each construct to be visible
202 or not, according to the args given when the construct is started.
203 The construct is visible if the `exit_label' field is non-null.
204 In that case, the value should be a CODE_LABEL rtx. */
206 struct nesting
208 struct nesting *all;
209 struct nesting *next;
210 int depth;
211 rtx exit_label;
212 union
214 /* For conds (if-then and if-then-else statements). */
215 struct
217 /* Label for the end of the if construct.
218 There is none if EXITFLAG was not set
219 and no `else' has been seen yet. */
220 rtx endif_label;
221 /* Label for the end of this alternative.
222 This may be the end of the if or the next else/elseif. */
223 rtx next_label;
224 } cond;
225 /* For loops. */
226 struct
228 /* Label at the top of the loop; place to loop back to. */
229 rtx start_label;
230 /* Label at the end of the whole construct. */
231 rtx end_label;
232 /* Label before a jump that branches to the end of the whole
233 construct. This is where destructors go if any. */
234 rtx alt_end_label;
235 /* Label for `continue' statement to jump to;
236 this is in front of the stepper of the loop. */
237 rtx continue_label;
238 } loop;
239 /* For variable binding contours. */
240 struct
242 /* Sequence number of this binding contour within the function,
243 in order of entry. */
244 int block_start_count;
245 /* Nonzero => value to restore stack to on exit. Complemented by
246 bc_stack_level (see below) when generating bytecodes. */
247 rtx stack_level;
248 /* The NOTE that starts this contour.
249 Used by expand_goto to check whether the destination
250 is within each contour or not. */
251 rtx first_insn;
252 /* Innermost containing binding contour that has a stack level. */
253 struct nesting *innermost_stack_block;
254 /* List of cleanups to be run on exit from this contour.
255 This is a list of expressions to be evaluated.
256 The TREE_PURPOSE of each link is the ..._DECL node
257 which the cleanup pertains to. */
258 tree cleanups;
259 /* List of cleanup-lists of blocks containing this block,
260 as they were at the locus where this block appears.
261 There is an element for each containing block,
262 ordered innermost containing block first.
263 The tail of this list can be 0,
264 if all remaining elements would be empty lists.
265 The element's TREE_VALUE is the cleanup-list of that block,
266 which may be null. */
267 tree outer_cleanups;
268 /* Chain of labels defined inside this binding contour.
269 For contours that have stack levels or cleanups. */
270 struct label_chain *label_chain;
271 /* Number of function calls seen, as of start of this block. */
272 int function_call_count;
273 /* Bytecode specific: stack level to restore stack to on exit. */
274 int bc_stack_level;
275 /* Nonzero if this is associated with a EH region. */
276 int exception_region;
277 /* The saved target_temp_slot_level from our outer block.
278 We may reset target_temp_slot_level to be the level of
279 this block, if that is done, target_temp_slot_level
280 reverts to the saved target_temp_slot_level at the very
281 end of the block. */
282 int target_temp_slot_level;
283 /* True if we are currently emitting insns in an area of
284 output code that is controlled by a conditional
285 expression. This is used by the cleanup handling code to
286 generate conditional cleanup actions. */
287 int conditional_code;
288 /* A place to move the start of the exception region for any
289 of the conditional cleanups, must be at the end or after
290 the start of the last unconditional cleanup, and before any
291 conditional branch points. */
292 rtx last_unconditional_cleanup;
293 /* When in a conditional context, this is the specific
294 cleanup list associated with last_unconditional_cleanup,
295 where we place the conditionalized cleanups. */
296 tree *cleanup_ptr;
297 } block;
298 /* For switch (C) or case (Pascal) statements,
299 and also for dummies (see `expand_start_case_dummy'). */
300 struct
302 /* The insn after which the case dispatch should finally
303 be emitted. Zero for a dummy. */
304 rtx start;
305 /* For bytecodes, the case table is in-lined right in the code.
306 A label is needed for skipping over this block. It is only
307 used when generating bytecodes. */
308 rtx skip_label;
309 /* A list of case labels; it is first built as an AVL tree.
310 During expand_end_case, this is converted to a list, and may be
311 rearranged into a nearly balanced binary tree. */
312 struct case_node *case_list;
313 /* Label to jump to if no case matches. */
314 tree default_label;
315 /* The expression to be dispatched on. */
316 tree index_expr;
317 /* Type that INDEX_EXPR should be converted to. */
318 tree nominal_type;
319 /* Number of range exprs in case statement. */
320 int num_ranges;
321 /* Name of this kind of statement, for warnings. */
322 char *printname;
323 /* Nonzero if a case label has been seen in this case stmt. */
324 char seenlabel;
325 } case_stmt;
326 } data;
329 /* Chain of all pending binding contours. */
330 struct nesting *block_stack;
332 /* If any new stacks are added here, add them to POPSTACKS too. */
334 /* Chain of all pending binding contours that restore stack levels
335 or have cleanups. */
336 struct nesting *stack_block_stack;
338 /* Chain of all pending conditional statements. */
339 struct nesting *cond_stack;
341 /* Chain of all pending loops. */
342 struct nesting *loop_stack;
344 /* Chain of all pending case or switch statements. */
345 struct nesting *case_stack;
347 /* Separate chain including all of the above,
348 chained through the `all' field. */
349 struct nesting *nesting_stack;
351 /* Number of entries on nesting_stack now. */
352 int nesting_depth;
354 /* Allocate and return a new `struct nesting'. */
356 #define ALLOC_NESTING() \
357 (struct nesting *) obstack_alloc (&stmt_obstack, sizeof (struct nesting))
359 /* Pop the nesting stack element by element until we pop off
360 the element which is at the top of STACK.
361 Update all the other stacks, popping off elements from them
362 as we pop them from nesting_stack. */
364 #define POPSTACK(STACK) \
365 do { struct nesting *target = STACK; \
366 struct nesting *this; \
367 do { this = nesting_stack; \
368 if (loop_stack == this) \
369 loop_stack = loop_stack->next; \
370 if (cond_stack == this) \
371 cond_stack = cond_stack->next; \
372 if (block_stack == this) \
373 block_stack = block_stack->next; \
374 if (stack_block_stack == this) \
375 stack_block_stack = stack_block_stack->next; \
376 if (case_stack == this) \
377 case_stack = case_stack->next; \
378 nesting_depth = nesting_stack->depth - 1; \
379 nesting_stack = this->all; \
380 obstack_free (&stmt_obstack, this); } \
381 while (this != target); } while (0)
383 /* In some cases it is impossible to generate code for a forward goto
384 until the label definition is seen. This happens when it may be necessary
385 for the goto to reset the stack pointer: we don't yet know how to do that.
386 So expand_goto puts an entry on this fixup list.
387 Each time a binding contour that resets the stack is exited,
388 we check each fixup.
389 If the target label has now been defined, we can insert the proper code. */
391 struct goto_fixup
393 /* Points to following fixup. */
394 struct goto_fixup *next;
395 /* Points to the insn before the jump insn.
396 If more code must be inserted, it goes after this insn. */
397 rtx before_jump;
398 /* The LABEL_DECL that this jump is jumping to, or 0
399 for break, continue or return. */
400 tree target;
401 /* The BLOCK for the place where this goto was found. */
402 tree context;
403 /* The CODE_LABEL rtx that this is jumping to. */
404 rtx target_rtl;
405 /* Number of binding contours started in current function
406 before the label reference. */
407 int block_start_count;
408 /* The outermost stack level that should be restored for this jump.
409 Each time a binding contour that resets the stack is exited,
410 if the target label is *not* yet defined, this slot is updated. */
411 rtx stack_level;
412 /* List of lists of cleanup expressions to be run by this goto.
413 There is one element for each block that this goto is within.
414 The tail of this list can be 0,
415 if all remaining elements would be empty.
416 The TREE_VALUE contains the cleanup list of that block as of the
417 time this goto was seen.
418 The TREE_ADDRESSABLE flag is 1 for a block that has been exited. */
419 tree cleanup_list_list;
421 /* Bytecode specific members follow */
423 /* The label that this jump is jumping to, or 0 for break, continue
424 or return. */
425 struct bc_label *bc_target;
427 /* The label we use for the fixup patch */
428 struct bc_label *label;
430 /* True (non-0) if fixup has been handled */
431 int bc_handled:1;
433 /* Like stack_level above, except refers to the interpreter stack */
434 int bc_stack_level;
437 static struct goto_fixup *goto_fixup_chain;
439 /* Within any binding contour that must restore a stack level,
440 all labels are recorded with a chain of these structures. */
442 struct label_chain
444 /* Points to following fixup. */
445 struct label_chain *next;
446 tree label;
450 /* Non-zero if we are using EH to handle cleanus. */
451 static int using_eh_for_cleanups_p = 0;
454 static void expand_goto_internal PROTO((tree, rtx, rtx));
455 static void bc_expand_goto_internal PROTO((enum bytecode_opcode,
456 struct bc_label *, tree));
457 static int expand_fixup PROTO((tree, rtx, rtx));
458 static void bc_expand_fixup PROTO((enum bytecode_opcode,
459 struct bc_label *, int));
460 static void fixup_gotos PROTO((struct nesting *, rtx, tree,
461 rtx, int));
462 static void bc_fixup_gotos PROTO((struct nesting *, int, tree,
463 rtx, int));
464 static void bc_expand_start_cond PROTO((tree, int));
465 static void bc_expand_end_cond PROTO((void));
466 static void bc_expand_start_else PROTO((void));
467 static void bc_expand_end_loop PROTO((void));
468 static void bc_expand_end_bindings PROTO((tree, int, int));
469 static void bc_expand_decl PROTO((tree, tree));
470 static void bc_expand_variable_local_init PROTO((tree));
471 static void bc_expand_decl_init PROTO((tree));
472 static void expand_null_return_1 PROTO((rtx, int));
473 static void expand_value_return PROTO((rtx));
474 static int tail_recursion_args PROTO((tree, tree));
475 static void expand_cleanups PROTO((tree, tree, int, int));
476 static void bc_expand_start_case PROTO((struct nesting *, tree,
477 tree, char *));
478 static int bc_pushcase PROTO((tree, tree));
479 static void bc_check_for_full_enumeration_handling PROTO((tree));
480 static void bc_expand_end_case PROTO((tree));
481 static void do_jump_if_equal PROTO((rtx, rtx, rtx, int));
482 static int estimate_case_costs PROTO((case_node_ptr));
483 static void group_case_nodes PROTO((case_node_ptr));
484 static void balance_case_nodes PROTO((case_node_ptr *,
485 case_node_ptr));
486 static int node_has_low_bound PROTO((case_node_ptr, tree));
487 static int node_has_high_bound PROTO((case_node_ptr, tree));
488 static int node_is_bounded PROTO((case_node_ptr, tree));
489 static void emit_jump_if_reachable PROTO((rtx));
490 static void emit_case_nodes PROTO((rtx, case_node_ptr, rtx, tree));
491 static int add_case_node PROTO((tree, tree, tree, tree *));
492 static struct case_node *case_tree2list PROTO((case_node *, case_node *));
494 extern rtx bc_allocate_local ();
495 extern rtx bc_allocate_variable_array ();
497 void
498 using_eh_for_cleanups ()
500 using_eh_for_cleanups_p = 1;
503 void
504 init_stmt ()
506 gcc_obstack_init (&stmt_obstack);
507 init_eh ();
510 void
511 init_stmt_for_function ()
513 /* We are not currently within any block, conditional, loop or case. */
514 block_stack = 0;
515 stack_block_stack = 0;
516 loop_stack = 0;
517 case_stack = 0;
518 cond_stack = 0;
519 nesting_stack = 0;
520 nesting_depth = 0;
522 block_start_count = 0;
524 /* No gotos have been expanded yet. */
525 goto_fixup_chain = 0;
527 /* We are not processing a ({...}) grouping. */
528 expr_stmts_for_value = 0;
529 last_expr_type = 0;
531 init_eh_for_function ();
534 void
535 save_stmt_status (p)
536 struct function *p;
538 p->block_stack = block_stack;
539 p->stack_block_stack = stack_block_stack;
540 p->cond_stack = cond_stack;
541 p->loop_stack = loop_stack;
542 p->case_stack = case_stack;
543 p->nesting_stack = nesting_stack;
544 p->nesting_depth = nesting_depth;
545 p->block_start_count = block_start_count;
546 p->last_expr_type = last_expr_type;
547 p->last_expr_value = last_expr_value;
548 p->expr_stmts_for_value = expr_stmts_for_value;
549 p->emit_filename = emit_filename;
550 p->emit_lineno = emit_lineno;
551 p->goto_fixup_chain = goto_fixup_chain;
552 save_eh_status (p);
555 void
556 restore_stmt_status (p)
557 struct function *p;
559 block_stack = p->block_stack;
560 stack_block_stack = p->stack_block_stack;
561 cond_stack = p->cond_stack;
562 loop_stack = p->loop_stack;
563 case_stack = p->case_stack;
564 nesting_stack = p->nesting_stack;
565 nesting_depth = p->nesting_depth;
566 block_start_count = p->block_start_count;
567 last_expr_type = p->last_expr_type;
568 last_expr_value = p->last_expr_value;
569 expr_stmts_for_value = p->expr_stmts_for_value;
570 emit_filename = p->emit_filename;
571 emit_lineno = p->emit_lineno;
572 goto_fixup_chain = p->goto_fixup_chain;
573 restore_eh_status (p);
576 /* Emit a no-op instruction. */
578 void
579 emit_nop ()
581 rtx last_insn;
583 if (!output_bytecode)
585 last_insn = get_last_insn ();
586 if (!optimize
587 && (GET_CODE (last_insn) == CODE_LABEL
588 || (GET_CODE (last_insn) == NOTE
589 && prev_real_insn (last_insn) == 0)))
590 emit_insn (gen_nop ());
594 /* Return the rtx-label that corresponds to a LABEL_DECL,
595 creating it if necessary. */
598 label_rtx (label)
599 tree label;
601 if (TREE_CODE (label) != LABEL_DECL)
602 abort ();
604 if (DECL_RTL (label))
605 return DECL_RTL (label);
607 return DECL_RTL (label) = gen_label_rtx ();
610 /* Add an unconditional jump to LABEL as the next sequential instruction. */
612 void
613 emit_jump (label)
614 rtx label;
616 do_pending_stack_adjust ();
617 emit_jump_insn (gen_jump (label));
618 emit_barrier ();
621 /* Emit code to jump to the address
622 specified by the pointer expression EXP. */
624 void
625 expand_computed_goto (exp)
626 tree exp;
628 if (output_bytecode)
630 bc_expand_expr (exp);
631 bc_emit_instruction (jumpP);
633 else
635 rtx x = expand_expr (exp, NULL_RTX, VOIDmode, 0);
637 #ifdef POINTERS_EXTEND_UNSIGNED
638 x = convert_memory_address (Pmode, x);
639 #endif
641 emit_queue ();
642 /* Be sure the function is executable. */
643 if (flag_check_memory_usage)
644 emit_library_call (chkr_check_exec_libfunc, 1,
645 VOIDmode, 1, x, ptr_mode);
647 do_pending_stack_adjust ();
648 emit_indirect_jump (x);
652 /* Handle goto statements and the labels that they can go to. */
654 /* Specify the location in the RTL code of a label LABEL,
655 which is a LABEL_DECL tree node.
657 This is used for the kind of label that the user can jump to with a
658 goto statement, and for alternatives of a switch or case statement.
659 RTL labels generated for loops and conditionals don't go through here;
660 they are generated directly at the RTL level, by other functions below.
662 Note that this has nothing to do with defining label *names*.
663 Languages vary in how they do that and what that even means. */
665 void
666 expand_label (label)
667 tree label;
669 struct label_chain *p;
671 if (output_bytecode)
673 if (! DECL_RTL (label))
674 DECL_RTL (label) = bc_gen_rtx ((char *) 0, 0, bc_get_bytecode_label ());
675 if (! bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (DECL_RTL (label))))
676 error ("multiply defined label");
677 return;
680 do_pending_stack_adjust ();
681 emit_label (label_rtx (label));
682 if (DECL_NAME (label))
683 LABEL_NAME (DECL_RTL (label)) = IDENTIFIER_POINTER (DECL_NAME (label));
685 if (stack_block_stack != 0)
687 p = (struct label_chain *) oballoc (sizeof (struct label_chain));
688 p->next = stack_block_stack->data.block.label_chain;
689 stack_block_stack->data.block.label_chain = p;
690 p->label = label;
694 /* Declare that LABEL (a LABEL_DECL) may be used for nonlocal gotos
695 from nested functions. */
697 void
698 declare_nonlocal_label (label)
699 tree label;
701 nonlocal_labels = tree_cons (NULL_TREE, label, nonlocal_labels);
702 LABEL_PRESERVE_P (label_rtx (label)) = 1;
703 if (nonlocal_goto_handler_slot == 0)
705 nonlocal_goto_handler_slot
706 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
707 emit_stack_save (SAVE_NONLOCAL,
708 &nonlocal_goto_stack_level,
709 PREV_INSN (tail_recursion_reentry));
713 /* Generate RTL code for a `goto' statement with target label LABEL.
714 LABEL should be a LABEL_DECL tree node that was or will later be
715 defined with `expand_label'. */
717 void
718 expand_goto (label)
719 tree label;
721 tree context;
723 if (output_bytecode)
725 expand_goto_internal (label, label_rtx (label), NULL_RTX);
726 return;
729 /* Check for a nonlocal goto to a containing function. */
730 context = decl_function_context (label);
731 if (context != 0 && context != current_function_decl)
733 struct function *p = find_function_data (context);
734 rtx label_ref = gen_rtx (LABEL_REF, Pmode, label_rtx (label));
735 rtx temp;
737 p->has_nonlocal_label = 1;
738 current_function_has_nonlocal_goto = 1;
739 LABEL_REF_NONLOCAL_P (label_ref) = 1;
741 /* Copy the rtl for the slots so that they won't be shared in
742 case the virtual stack vars register gets instantiated differently
743 in the parent than in the child. */
745 #if HAVE_nonlocal_goto
746 if (HAVE_nonlocal_goto)
747 emit_insn (gen_nonlocal_goto (lookup_static_chain (label),
748 copy_rtx (p->nonlocal_goto_handler_slot),
749 copy_rtx (p->nonlocal_goto_stack_level),
750 label_ref));
751 else
752 #endif
754 rtx addr;
756 /* Restore frame pointer for containing function.
757 This sets the actual hard register used for the frame pointer
758 to the location of the function's incoming static chain info.
759 The non-local goto handler will then adjust it to contain the
760 proper value and reload the argument pointer, if needed. */
761 emit_move_insn (hard_frame_pointer_rtx, lookup_static_chain (label));
763 /* We have now loaded the frame pointer hardware register with
764 the address of that corresponds to the start of the virtual
765 stack vars. So replace virtual_stack_vars_rtx in all
766 addresses we use with stack_pointer_rtx. */
768 /* Get addr of containing function's current nonlocal goto handler,
769 which will do any cleanups and then jump to the label. */
770 addr = copy_rtx (p->nonlocal_goto_handler_slot);
771 temp = copy_to_reg (replace_rtx (addr, virtual_stack_vars_rtx,
772 hard_frame_pointer_rtx));
774 /* Restore the stack pointer. Note this uses fp just restored. */
775 addr = p->nonlocal_goto_stack_level;
776 if (addr)
777 addr = replace_rtx (copy_rtx (addr),
778 virtual_stack_vars_rtx,
779 hard_frame_pointer_rtx);
781 emit_stack_restore (SAVE_NONLOCAL, addr, NULL_RTX);
783 /* Put in the static chain register the nonlocal label address. */
784 emit_move_insn (static_chain_rtx, label_ref);
785 /* USE of hard_frame_pointer_rtx added for consistency; not clear if
786 really needed. */
787 emit_insn (gen_rtx (USE, VOIDmode, hard_frame_pointer_rtx));
788 emit_insn (gen_rtx (USE, VOIDmode, stack_pointer_rtx));
789 emit_insn (gen_rtx (USE, VOIDmode, static_chain_rtx));
790 emit_indirect_jump (temp);
793 else
794 expand_goto_internal (label, label_rtx (label), NULL_RTX);
797 /* Generate RTL code for a `goto' statement with target label BODY.
798 LABEL should be a LABEL_REF.
799 LAST_INSN, if non-0, is the rtx we should consider as the last
800 insn emitted (for the purposes of cleaning up a return). */
802 static void
803 expand_goto_internal (body, label, last_insn)
804 tree body;
805 rtx label;
806 rtx last_insn;
808 struct nesting *block;
809 rtx stack_level = 0;
811 /* NOTICE! If a bytecode instruction other than `jump' is needed,
812 then the caller has to call bc_expand_goto_internal()
813 directly. This is rather an exceptional case, and there aren't
814 that many places where this is necessary. */
815 if (output_bytecode)
817 expand_goto_internal (body, label, last_insn);
818 return;
821 if (GET_CODE (label) != CODE_LABEL)
822 abort ();
824 /* If label has already been defined, we can tell now
825 whether and how we must alter the stack level. */
827 if (PREV_INSN (label) != 0)
829 /* Find the innermost pending block that contains the label.
830 (Check containment by comparing insn-uids.)
831 Then restore the outermost stack level within that block,
832 and do cleanups of all blocks contained in it. */
833 for (block = block_stack; block; block = block->next)
835 if (INSN_UID (block->data.block.first_insn) < INSN_UID (label))
836 break;
837 if (block->data.block.stack_level != 0)
838 stack_level = block->data.block.stack_level;
839 /* Execute the cleanups for blocks we are exiting. */
840 if (block->data.block.cleanups != 0)
842 expand_cleanups (block->data.block.cleanups, NULL_TREE, 1, 1);
843 do_pending_stack_adjust ();
847 if (stack_level)
849 /* Ensure stack adjust isn't done by emit_jump, as this
850 would clobber the stack pointer. This one should be
851 deleted as dead by flow. */
852 clear_pending_stack_adjust ();
853 do_pending_stack_adjust ();
854 emit_stack_restore (SAVE_BLOCK, stack_level, NULL_RTX);
857 if (body != 0 && DECL_TOO_LATE (body))
858 error ("jump to `%s' invalidly jumps into binding contour",
859 IDENTIFIER_POINTER (DECL_NAME (body)));
861 /* Label not yet defined: may need to put this goto
862 on the fixup list. */
863 else if (! expand_fixup (body, label, last_insn))
865 /* No fixup needed. Record that the label is the target
866 of at least one goto that has no fixup. */
867 if (body != 0)
868 TREE_ADDRESSABLE (body) = 1;
871 emit_jump (label);
874 /* Generate a jump with OPCODE to the given bytecode LABEL which is
875 found within BODY. */
877 static void
878 bc_expand_goto_internal (opcode, label, body)
879 enum bytecode_opcode opcode;
880 struct bc_label *label;
881 tree body;
883 struct nesting *block;
884 int stack_level = -1;
886 /* If the label is defined, adjust the stack as necessary.
887 If it's not defined, we have to push the reference on the
888 fixup list. */
890 if (label->defined)
893 /* Find the innermost pending block that contains the label.
894 (Check containment by comparing bytecode uids.) Then restore the
895 outermost stack level within that block. */
897 for (block = block_stack; block; block = block->next)
899 if (BYTECODE_BC_LABEL (block->data.block.first_insn)->uid < label->uid)
900 break;
901 if (block->data.block.bc_stack_level)
902 stack_level = block->data.block.bc_stack_level;
904 /* Execute the cleanups for blocks we are exiting. */
905 if (block->data.block.cleanups != 0)
907 expand_cleanups (block->data.block.cleanups, NULL_TREE, 1, 1);
908 do_pending_stack_adjust ();
912 /* Restore the stack level. If we need to adjust the stack, we
913 must do so after the jump, since the jump may depend on
914 what's on the stack. Thus, any stack-modifying conditional
915 jumps (these are the only ones that rely on what's on the
916 stack) go into the fixup list. */
918 if (stack_level >= 0
919 && stack_depth != stack_level
920 && opcode != jump)
922 bc_expand_fixup (opcode, label, stack_level);
923 else
925 if (stack_level >= 0)
926 bc_adjust_stack (stack_depth - stack_level);
928 if (body && DECL_BIT_FIELD (body))
929 error ("jump to `%s' invalidly jumps into binding contour",
930 IDENTIFIER_POINTER (DECL_NAME (body)));
932 /* Emit immediate jump */
933 bc_emit_bytecode (opcode);
934 bc_emit_bytecode_labelref (label);
936 #ifdef DEBUG_PRINT_CODE
937 fputc ('\n', stderr);
938 #endif
941 else
942 /* Put goto in the fixup list */
943 bc_expand_fixup (opcode, label, stack_level);
946 /* Generate if necessary a fixup for a goto
947 whose target label in tree structure (if any) is TREE_LABEL
948 and whose target in rtl is RTL_LABEL.
950 If LAST_INSN is nonzero, we pretend that the jump appears
951 after insn LAST_INSN instead of at the current point in the insn stream.
953 The fixup will be used later to insert insns just before the goto.
954 Those insns will restore the stack level as appropriate for the
955 target label, and will (in the case of C++) also invoke any object
956 destructors which have to be invoked when we exit the scopes which
957 are exited by the goto.
959 Value is nonzero if a fixup is made. */
961 static int
962 expand_fixup (tree_label, rtl_label, last_insn)
963 tree tree_label;
964 rtx rtl_label;
965 rtx last_insn;
967 struct nesting *block, *end_block;
969 /* See if we can recognize which block the label will be output in.
970 This is possible in some very common cases.
971 If we succeed, set END_BLOCK to that block.
972 Otherwise, set it to 0. */
974 if (cond_stack
975 && (rtl_label == cond_stack->data.cond.endif_label
976 || rtl_label == cond_stack->data.cond.next_label))
977 end_block = cond_stack;
978 /* If we are in a loop, recognize certain labels which
979 are likely targets. This reduces the number of fixups
980 we need to create. */
981 else if (loop_stack
982 && (rtl_label == loop_stack->data.loop.start_label
983 || rtl_label == loop_stack->data.loop.end_label
984 || rtl_label == loop_stack->data.loop.continue_label))
985 end_block = loop_stack;
986 else
987 end_block = 0;
989 /* Now set END_BLOCK to the binding level to which we will return. */
991 if (end_block)
993 struct nesting *next_block = end_block->all;
994 block = block_stack;
996 /* First see if the END_BLOCK is inside the innermost binding level.
997 If so, then no cleanups or stack levels are relevant. */
998 while (next_block && next_block != block)
999 next_block = next_block->all;
1001 if (next_block)
1002 return 0;
1004 /* Otherwise, set END_BLOCK to the innermost binding level
1005 which is outside the relevant control-structure nesting. */
1006 next_block = block_stack->next;
1007 for (block = block_stack; block != end_block; block = block->all)
1008 if (block == next_block)
1009 next_block = next_block->next;
1010 end_block = next_block;
1013 /* Does any containing block have a stack level or cleanups?
1014 If not, no fixup is needed, and that is the normal case
1015 (the only case, for standard C). */
1016 for (block = block_stack; block != end_block; block = block->next)
1017 if (block->data.block.stack_level != 0
1018 || block->data.block.cleanups != 0)
1019 break;
1021 if (block != end_block)
1023 /* Ok, a fixup is needed. Add a fixup to the list of such. */
1024 struct goto_fixup *fixup
1025 = (struct goto_fixup *) oballoc (sizeof (struct goto_fixup));
1026 /* In case an old stack level is restored, make sure that comes
1027 after any pending stack adjust. */
1028 /* ?? If the fixup isn't to come at the present position,
1029 doing the stack adjust here isn't useful. Doing it with our
1030 settings at that location isn't useful either. Let's hope
1031 someone does it! */
1032 if (last_insn == 0)
1033 do_pending_stack_adjust ();
1034 fixup->target = tree_label;
1035 fixup->target_rtl = rtl_label;
1037 /* Create a BLOCK node and a corresponding matched set of
1038 NOTE_INSN_BEGIN_BLOCK and NOTE_INSN_END_BLOCK notes at
1039 this point. The notes will encapsulate any and all fixup
1040 code which we might later insert at this point in the insn
1041 stream. Also, the BLOCK node will be the parent (i.e. the
1042 `SUPERBLOCK') of any other BLOCK nodes which we might create
1043 later on when we are expanding the fixup code. */
1046 register rtx original_before_jump
1047 = last_insn ? last_insn : get_last_insn ();
1049 start_sequence ();
1050 pushlevel (0);
1051 fixup->before_jump = emit_note (NULL_PTR, NOTE_INSN_BLOCK_BEG);
1052 last_block_end_note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_END);
1053 fixup->context = poplevel (1, 0, 0); /* Create the BLOCK node now! */
1054 end_sequence ();
1055 emit_insns_after (fixup->before_jump, original_before_jump);
1058 fixup->block_start_count = block_start_count;
1059 fixup->stack_level = 0;
1060 fixup->cleanup_list_list
1061 = ((block->data.block.outer_cleanups
1062 || block->data.block.cleanups)
1063 ? tree_cons (NULL_TREE, block->data.block.cleanups,
1064 block->data.block.outer_cleanups)
1065 : 0);
1066 fixup->next = goto_fixup_chain;
1067 goto_fixup_chain = fixup;
1070 return block != 0;
1074 /* Generate bytecode jump with OPCODE to a fixup routine that links to LABEL.
1075 Make the fixup restore the stack level to STACK_LEVEL. */
1077 static void
1078 bc_expand_fixup (opcode, label, stack_level)
1079 enum bytecode_opcode opcode;
1080 struct bc_label *label;
1081 int stack_level;
1083 struct goto_fixup *fixup
1084 = (struct goto_fixup *) oballoc (sizeof (struct goto_fixup));
1086 fixup->label = bc_get_bytecode_label ();
1087 fixup->bc_target = label;
1088 fixup->bc_stack_level = stack_level;
1089 fixup->bc_handled = FALSE;
1091 fixup->next = goto_fixup_chain;
1092 goto_fixup_chain = fixup;
1094 /* Insert a jump to the fixup code */
1095 bc_emit_bytecode (opcode);
1096 bc_emit_bytecode_labelref (fixup->label);
1098 #ifdef DEBUG_PRINT_CODE
1099 fputc ('\n', stderr);
1100 #endif
1103 /* Expand any needed fixups in the outputmost binding level of the
1104 function. FIRST_INSN is the first insn in the function. */
1106 void
1107 expand_fixups (first_insn)
1108 rtx first_insn;
1110 fixup_gotos (NULL_PTR, NULL_RTX, NULL_TREE, first_insn, 0);
1113 /* When exiting a binding contour, process all pending gotos requiring fixups.
1114 THISBLOCK is the structure that describes the block being exited.
1115 STACK_LEVEL is the rtx for the stack level to restore exiting this contour.
1116 CLEANUP_LIST is a list of expressions to evaluate on exiting this contour.
1117 FIRST_INSN is the insn that began this contour.
1119 Gotos that jump out of this contour must restore the
1120 stack level and do the cleanups before actually jumping.
1122 DONT_JUMP_IN nonzero means report error there is a jump into this
1123 contour from before the beginning of the contour.
1124 This is also done if STACK_LEVEL is nonzero. */
1126 static void
1127 fixup_gotos (thisblock, stack_level, cleanup_list, first_insn, dont_jump_in)
1128 struct nesting *thisblock;
1129 rtx stack_level;
1130 tree cleanup_list;
1131 rtx first_insn;
1132 int dont_jump_in;
1134 register struct goto_fixup *f, *prev;
1136 if (output_bytecode)
1138 /* ??? The second arg is the bc stack level, which is not the same
1139 as STACK_LEVEL. I have no idea what should go here, so I'll
1140 just pass 0. */
1141 bc_fixup_gotos (thisblock, 0, cleanup_list, first_insn, dont_jump_in);
1142 return;
1145 /* F is the fixup we are considering; PREV is the previous one. */
1146 /* We run this loop in two passes so that cleanups of exited blocks
1147 are run first, and blocks that are exited are marked so
1148 afterwards. */
1150 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1152 /* Test for a fixup that is inactive because it is already handled. */
1153 if (f->before_jump == 0)
1155 /* Delete inactive fixup from the chain, if that is easy to do. */
1156 if (prev != 0)
1157 prev->next = f->next;
1159 /* Has this fixup's target label been defined?
1160 If so, we can finalize it. */
1161 else if (PREV_INSN (f->target_rtl) != 0)
1163 register rtx cleanup_insns;
1165 /* Get the first non-label after the label
1166 this goto jumps to. If that's before this scope begins,
1167 we don't have a jump into the scope. */
1168 rtx after_label = f->target_rtl;
1169 while (after_label != 0 && GET_CODE (after_label) == CODE_LABEL)
1170 after_label = NEXT_INSN (after_label);
1172 /* If this fixup jumped into this contour from before the beginning
1173 of this contour, report an error. */
1174 /* ??? Bug: this does not detect jumping in through intermediate
1175 blocks that have stack levels or cleanups.
1176 It detects only a problem with the innermost block
1177 around the label. */
1178 if (f->target != 0
1179 && (dont_jump_in || stack_level || cleanup_list)
1180 /* If AFTER_LABEL is 0, it means the jump goes to the end
1181 of the rtl, which means it jumps into this scope. */
1182 && (after_label == 0
1183 || INSN_UID (first_insn) < INSN_UID (after_label))
1184 && INSN_UID (first_insn) > INSN_UID (f->before_jump)
1185 && ! DECL_ERROR_ISSUED (f->target))
1187 error_with_decl (f->target,
1188 "label `%s' used before containing binding contour");
1189 /* Prevent multiple errors for one label. */
1190 DECL_ERROR_ISSUED (f->target) = 1;
1193 /* We will expand the cleanups into a sequence of their own and
1194 then later on we will attach this new sequence to the insn
1195 stream just ahead of the actual jump insn. */
1197 start_sequence ();
1199 /* Temporarily restore the lexical context where we will
1200 logically be inserting the fixup code. We do this for the
1201 sake of getting the debugging information right. */
1203 pushlevel (0);
1204 set_block (f->context);
1206 /* Expand the cleanups for blocks this jump exits. */
1207 if (f->cleanup_list_list)
1209 tree lists;
1210 for (lists = f->cleanup_list_list; lists; lists = TREE_CHAIN (lists))
1211 /* Marked elements correspond to blocks that have been closed.
1212 Do their cleanups. */
1213 if (TREE_ADDRESSABLE (lists)
1214 && TREE_VALUE (lists) != 0)
1216 expand_cleanups (TREE_VALUE (lists), NULL_TREE, 1, 1);
1217 /* Pop any pushes done in the cleanups,
1218 in case function is about to return. */
1219 do_pending_stack_adjust ();
1223 /* Restore stack level for the biggest contour that this
1224 jump jumps out of. */
1225 if (f->stack_level)
1226 emit_stack_restore (SAVE_BLOCK, f->stack_level, f->before_jump);
1228 /* Finish up the sequence containing the insns which implement the
1229 necessary cleanups, and then attach that whole sequence to the
1230 insn stream just ahead of the actual jump insn. Attaching it
1231 at that point insures that any cleanups which are in fact
1232 implicit C++ object destructions (which must be executed upon
1233 leaving the block) appear (to the debugger) to be taking place
1234 in an area of the generated code where the object(s) being
1235 destructed are still "in scope". */
1237 cleanup_insns = get_insns ();
1238 poplevel (1, 0, 0);
1240 end_sequence ();
1241 emit_insns_after (cleanup_insns, f->before_jump);
1244 f->before_jump = 0;
1248 /* For any still-undefined labels, do the cleanups for this block now.
1249 We must do this now since items in the cleanup list may go out
1250 of scope when the block ends. */
1251 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1252 if (f->before_jump != 0
1253 && PREV_INSN (f->target_rtl) == 0
1254 /* Label has still not appeared. If we are exiting a block with
1255 a stack level to restore, that started before the fixup,
1256 mark this stack level as needing restoration
1257 when the fixup is later finalized. */
1258 && thisblock != 0
1259 /* Note: if THISBLOCK == 0 and we have a label that hasn't appeared, it
1260 means the label is undefined. That's erroneous, but possible. */
1261 && (thisblock->data.block.block_start_count
1262 <= f->block_start_count))
1264 tree lists = f->cleanup_list_list;
1265 rtx cleanup_insns;
1267 for (; lists; lists = TREE_CHAIN (lists))
1268 /* If the following elt. corresponds to our containing block
1269 then the elt. must be for this block. */
1270 if (TREE_CHAIN (lists) == thisblock->data.block.outer_cleanups)
1272 start_sequence ();
1273 pushlevel (0);
1274 set_block (f->context);
1275 expand_cleanups (TREE_VALUE (lists), NULL_TREE, 1, 1);
1276 do_pending_stack_adjust ();
1277 cleanup_insns = get_insns ();
1278 poplevel (1, 0, 0);
1279 end_sequence ();
1280 if (cleanup_insns != 0)
1281 f->before_jump
1282 = emit_insns_after (cleanup_insns, f->before_jump);
1284 f->cleanup_list_list = TREE_CHAIN (lists);
1287 if (stack_level)
1288 f->stack_level = stack_level;
1293 /* When exiting a binding contour, process all pending gotos requiring fixups.
1294 Note: STACK_DEPTH is not altered.
1296 The arguments are currently not used in the bytecode compiler, but we may
1297 need them one day for languages other than C.
1299 THISBLOCK is the structure that describes the block being exited.
1300 STACK_LEVEL is the rtx for the stack level to restore exiting this contour.
1301 CLEANUP_LIST is a list of expressions to evaluate on exiting this contour.
1302 FIRST_INSN is the insn that began this contour.
1304 Gotos that jump out of this contour must restore the
1305 stack level and do the cleanups before actually jumping.
1307 DONT_JUMP_IN nonzero means report error there is a jump into this
1308 contour from before the beginning of the contour.
1309 This is also done if STACK_LEVEL is nonzero. */
1311 static void
1312 bc_fixup_gotos (thisblock, stack_level, cleanup_list, first_insn, dont_jump_in)
1313 struct nesting *thisblock;
1314 int stack_level;
1315 tree cleanup_list;
1316 rtx first_insn;
1317 int dont_jump_in;
1319 register struct goto_fixup *f, *prev;
1320 int saved_stack_depth;
1322 /* F is the fixup we are considering; PREV is the previous one. */
1324 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1326 /* Test for a fixup that is inactive because it is already handled. */
1327 if (f->before_jump == 0)
1329 /* Delete inactive fixup from the chain, if that is easy to do. */
1330 if (prev)
1331 prev->next = f->next;
1334 /* Emit code to restore the stack and continue */
1335 bc_emit_bytecode_labeldef (f->label);
1337 /* Save stack_depth across call, since bc_adjust_stack will alter
1338 the perceived stack depth via the instructions generated. */
1340 if (f->bc_stack_level >= 0)
1342 saved_stack_depth = stack_depth;
1343 bc_adjust_stack (stack_depth - f->bc_stack_level);
1344 stack_depth = saved_stack_depth;
1347 bc_emit_bytecode (jump);
1348 bc_emit_bytecode_labelref (f->bc_target);
1350 #ifdef DEBUG_PRINT_CODE
1351 fputc ('\n', stderr);
1352 #endif
1355 goto_fixup_chain = NULL;
1358 /* Generate RTL for an asm statement (explicit assembler code).
1359 BODY is a STRING_CST node containing the assembler code text,
1360 or an ADDR_EXPR containing a STRING_CST. */
1362 void
1363 expand_asm (body)
1364 tree body;
1366 if (output_bytecode)
1368 error ("`asm' is invalid when generating bytecode");
1369 return;
1372 if (flag_check_memory_usage)
1374 error ("`asm' cannot be used with `-fcheck-memory-usage'");
1375 return;
1378 if (TREE_CODE (body) == ADDR_EXPR)
1379 body = TREE_OPERAND (body, 0);
1381 emit_insn (gen_rtx (ASM_INPUT, VOIDmode,
1382 TREE_STRING_POINTER (body)));
1383 last_expr_type = 0;
1386 /* Generate RTL for an asm statement with arguments.
1387 STRING is the instruction template.
1388 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
1389 Each output or input has an expression in the TREE_VALUE and
1390 a constraint-string in the TREE_PURPOSE.
1391 CLOBBERS is a list of STRING_CST nodes each naming a hard register
1392 that is clobbered by this insn.
1394 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
1395 Some elements of OUTPUTS may be replaced with trees representing temporary
1396 values. The caller should copy those temporary values to the originally
1397 specified lvalues.
1399 VOL nonzero means the insn is volatile; don't optimize it. */
1401 void
1402 expand_asm_operands (string, outputs, inputs, clobbers, vol, filename, line)
1403 tree string, outputs, inputs, clobbers;
1404 int vol;
1405 char *filename;
1406 int line;
1408 rtvec argvec, constraints;
1409 rtx body;
1410 int ninputs = list_length (inputs);
1411 int noutputs = list_length (outputs);
1412 int ninout = 0;
1413 int nclobbers;
1414 tree tail;
1415 register int i;
1416 /* Vector of RTX's of evaluated output operands. */
1417 rtx *output_rtx = (rtx *) alloca (noutputs * sizeof (rtx));
1418 int *inout_opnum = (int *) alloca (noutputs * sizeof (int));
1419 enum machine_mode *inout_mode
1420 = (enum machine_mode *) alloca (noutputs * sizeof (enum machine_mode));
1421 /* The insn we have emitted. */
1422 rtx insn;
1424 if (output_bytecode)
1426 error ("`asm' is invalid when generating bytecode");
1427 return;
1430 if (flag_check_memory_usage)
1432 error ("`asm' cannot be used with `-fcheck-memory-usage'");
1433 return;
1436 /* Count the number of meaningful clobbered registers, ignoring what
1437 we would ignore later. */
1438 nclobbers = 0;
1439 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1441 char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1442 i = decode_reg_name (regname);
1443 if (i >= 0 || i == -4)
1444 ++nclobbers;
1445 else if (i == -2)
1446 error ("unknown register name `%s' in `asm'", regname);
1449 last_expr_type = 0;
1451 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1453 tree val = TREE_VALUE (tail);
1454 tree type = TREE_TYPE (val);
1455 tree val1;
1456 int j;
1457 int found_equal = 0;
1458 int found_plus = 0;
1459 int allows_reg = 0;
1461 /* If there's an erroneous arg, emit no insn. */
1462 if (TREE_TYPE (val) == error_mark_node)
1463 return;
1465 /* Make sure constraint has `=' and does not have `+'. Also, see
1466 if it allows any register. Be liberal on the latter test, since
1467 the worst that happens if we get it wrong is we issue an error
1468 message. */
1470 for (j = 0; j < TREE_STRING_LENGTH (TREE_PURPOSE (tail)) - 1; j++)
1471 switch (TREE_STRING_POINTER (TREE_PURPOSE (tail))[j])
1473 case '+':
1474 /* Make sure we can specify the matching operand. */
1475 if (i > 9)
1477 error ("output operand constraint %d contains `+'", i);
1478 return;
1481 /* Replace '+' with '='. */
1482 TREE_STRING_POINTER (TREE_PURPOSE (tail))[j] = '=';
1483 found_plus = 1;
1484 break;
1486 case '=':
1487 found_equal = 1;
1488 break;
1490 case '?': case '!': case '*': case '%': case '&':
1491 case 'V': case 'm': case 'o': case '<': case '>':
1492 case 'E': case 'F': case 'G': case 'H': case 'X':
1493 case 's': case 'i': case 'n':
1494 case 'I': case 'J': case 'K': case 'L': case 'M':
1495 case 'N': case 'O': case 'P': case ',':
1496 #ifdef EXTRA_CONSTRAINT
1497 case 'Q': case 'R': case 'S': case 'T': case 'U':
1498 #endif
1499 break;
1501 case '0': case '1': case '2': case '3': case '4':
1502 case '5': case '6': case '7': case '8': case '9':
1503 error ("matching constraint not valid in output operand");
1504 break;
1506 case 'p': case 'g': case 'r':
1507 default:
1508 allows_reg = 1;
1509 break;
1512 if (! found_equal && ! found_plus)
1514 error ("output operand constraint lacks `='");
1515 return;
1518 /* If an output operand is not a decl or indirect ref and our constraint
1519 allows a register, make a temporary to act as an intermediate.
1520 Make the asm insn write into that, then our caller will copy it to
1521 the real output operand. Likewise for promoted variables. */
1523 if (TREE_CODE (val) == INDIRECT_REF
1524 || (TREE_CODE_CLASS (TREE_CODE (val)) == 'd'
1525 && ! (GET_CODE (DECL_RTL (val)) == REG
1526 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
1527 || ! allows_reg
1528 || found_plus)
1530 if (! allows_reg)
1531 mark_addressable (TREE_VALUE (tail));
1533 output_rtx[i]
1534 = expand_expr (TREE_VALUE (tail), NULL_RTX, VOIDmode,
1535 EXPAND_MEMORY_USE_WO);
1537 if (! allows_reg && GET_CODE (output_rtx[i]) != MEM)
1538 error ("output number %d not directly addressable", i);
1540 else
1542 output_rtx[i] = assign_temp (type, 0, 0, 0);
1543 TREE_VALUE (tail) = make_tree (type, output_rtx[i]);
1546 if (found_plus)
1548 inout_mode[ninout] = TYPE_MODE (TREE_TYPE (TREE_VALUE (tail)));
1549 inout_opnum[ninout++] = i;
1553 ninputs += ninout;
1554 if (ninputs + noutputs > MAX_RECOG_OPERANDS)
1556 error ("more than %d operands in `asm'", MAX_RECOG_OPERANDS);
1557 return;
1560 /* Make vectors for the expression-rtx and constraint strings. */
1562 argvec = rtvec_alloc (ninputs);
1563 constraints = rtvec_alloc (ninputs);
1565 body = gen_rtx (ASM_OPERANDS, VOIDmode,
1566 TREE_STRING_POINTER (string), "", 0, argvec, constraints,
1567 filename, line);
1568 MEM_VOLATILE_P (body) = vol;
1570 /* Eval the inputs and put them into ARGVEC.
1571 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
1573 i = 0;
1574 for (tail = inputs; tail; tail = TREE_CHAIN (tail))
1576 int j;
1577 int allows_reg = 0;
1579 /* If there's an erroneous arg, emit no insn,
1580 because the ASM_INPUT would get VOIDmode
1581 and that could cause a crash in reload. */
1582 if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
1583 return;
1584 if (TREE_PURPOSE (tail) == NULL_TREE)
1586 error ("hard register `%s' listed as input operand to `asm'",
1587 TREE_STRING_POINTER (TREE_VALUE (tail)) );
1588 return;
1591 /* Make sure constraint has neither `=' nor `+'. */
1593 for (j = 0; j < TREE_STRING_LENGTH (TREE_PURPOSE (tail)) - 1; j++)
1594 switch (TREE_STRING_POINTER (TREE_PURPOSE (tail))[j])
1596 case '+': case '=':
1597 error ("input operand constraint contains `%c'",
1598 TREE_STRING_POINTER (TREE_PURPOSE (tail))[j]);
1599 return;
1601 case '?': case '!': case '*': case '%': case '&':
1602 case 'V': case 'm': case 'o': case '<': case '>':
1603 case 'E': case 'F': case 'G': case 'H': case 'X':
1604 case 's': case 'i': case 'n':
1605 case 'I': case 'J': case 'K': case 'L': case 'M':
1606 case 'N': case 'O': case 'P': case ',':
1607 #ifdef EXTRA_CONSTRAINT
1608 case 'Q': case 'R': case 'S': case 'T': case 'U':
1609 #endif
1610 break;
1612 /* Whether or not a numeric constraint allows a register is
1613 decided by the matching constraint, and so there is no need
1614 to do anything special with them. We must handle them in
1615 the default case, so that we don't unnecessarily force
1616 operands to memory. */
1617 case '0': case '1': case '2': case '3': case '4':
1618 case '5': case '6': case '7': case '8': case '9':
1619 if (TREE_STRING_POINTER (TREE_PURPOSE (tail))[j]
1620 >= '0' + noutputs)
1622 error
1623 ("matching constraint references invalid operand number");
1624 return;
1627 /* ... fall through ... */
1629 case 'p': case 'g': case 'r':
1630 default:
1631 allows_reg = 1;
1632 break;
1635 if (! allows_reg)
1636 mark_addressable (TREE_VALUE (tail));
1638 XVECEXP (body, 3, i) /* argvec */
1639 = expand_expr (TREE_VALUE (tail), NULL_RTX, VOIDmode, 0);
1640 if (CONSTANT_P (XVECEXP (body, 3, i))
1641 && ! general_operand (XVECEXP (body, 3, i),
1642 TYPE_MODE (TREE_TYPE (TREE_VALUE (tail)))))
1644 if (allows_reg)
1645 XVECEXP (body, 3, i)
1646 = force_reg (TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))),
1647 XVECEXP (body, 3, i));
1648 else
1649 XVECEXP (body, 3, i)
1650 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))),
1651 XVECEXP (body, 3, i));
1654 if (! allows_reg
1655 && (GET_CODE (XVECEXP (body, 3, i)) == REG
1656 || GET_CODE (XVECEXP (body, 3, i)) == SUBREG
1657 || GET_CODE (XVECEXP (body, 3, i)) == CONCAT))
1659 tree type = TREE_TYPE (TREE_VALUE (tail));
1660 rtx memloc = assign_temp (type, 1, 1, 1);
1662 emit_move_insn (memloc, XVECEXP (body, 3, i));
1663 XVECEXP (body, 3, i) = memloc;
1666 XVECEXP (body, 4, i) /* constraints */
1667 = gen_rtx (ASM_INPUT, TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))),
1668 TREE_STRING_POINTER (TREE_PURPOSE (tail)));
1669 i++;
1672 /* Protect all the operands from the queue,
1673 now that they have all been evaluated. */
1675 for (i = 0; i < ninputs - ninout; i++)
1676 XVECEXP (body, 3, i) = protect_from_queue (XVECEXP (body, 3, i), 0);
1678 for (i = 0; i < noutputs; i++)
1679 output_rtx[i] = protect_from_queue (output_rtx[i], 1);
1681 /* For in-out operands, copy output rtx to input rtx. */
1682 for (i = 0; i < ninout; i++)
1684 static char match[9+1][2]
1685 = {"0", "1", "2", "3", "4", "5", "6", "7", "8", "9"};
1686 int j = inout_opnum[i];
1688 XVECEXP (body, 3, ninputs - ninout + i) /* argvec */
1689 = output_rtx[j];
1690 XVECEXP (body, 4, ninputs - ninout + i) /* constraints */
1691 = gen_rtx (ASM_INPUT, inout_mode[j], match[j]);
1694 /* Now, for each output, construct an rtx
1695 (set OUTPUT (asm_operands INSN OUTPUTNUMBER OUTPUTCONSTRAINT
1696 ARGVEC CONSTRAINTS))
1697 If there is more than one, put them inside a PARALLEL. */
1699 if (noutputs == 1 && nclobbers == 0)
1701 XSTR (body, 1) = TREE_STRING_POINTER (TREE_PURPOSE (outputs));
1702 insn = emit_insn (gen_rtx (SET, VOIDmode, output_rtx[0], body));
1704 else if (noutputs == 0 && nclobbers == 0)
1706 /* No output operands: put in a raw ASM_OPERANDS rtx. */
1707 insn = emit_insn (body);
1709 else
1711 rtx obody = body;
1712 int num = noutputs;
1713 if (num == 0) num = 1;
1714 body = gen_rtx (PARALLEL, VOIDmode, rtvec_alloc (num + nclobbers));
1716 /* For each output operand, store a SET. */
1718 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1720 XVECEXP (body, 0, i)
1721 = gen_rtx (SET, VOIDmode,
1722 output_rtx[i],
1723 gen_rtx (ASM_OPERANDS, VOIDmode,
1724 TREE_STRING_POINTER (string),
1725 TREE_STRING_POINTER (TREE_PURPOSE (tail)),
1726 i, argvec, constraints,
1727 filename, line));
1728 MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
1731 /* If there are no outputs (but there are some clobbers)
1732 store the bare ASM_OPERANDS into the PARALLEL. */
1734 if (i == 0)
1735 XVECEXP (body, 0, i++) = obody;
1737 /* Store (clobber REG) for each clobbered register specified. */
1739 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1741 char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1742 int j = decode_reg_name (regname);
1744 if (j < 0)
1746 if (j == -3) /* `cc', which is not a register */
1747 continue;
1749 if (j == -4) /* `memory', don't cache memory across asm */
1751 XVECEXP (body, 0, i++)
1752 = gen_rtx (CLOBBER, VOIDmode,
1753 gen_rtx (MEM, BLKmode,
1754 gen_rtx (SCRATCH, VOIDmode, 0)));
1755 continue;
1758 /* Ignore unknown register, error already signaled. */
1759 continue;
1762 /* Use QImode since that's guaranteed to clobber just one reg. */
1763 XVECEXP (body, 0, i++)
1764 = gen_rtx (CLOBBER, VOIDmode, gen_rtx (REG, QImode, j));
1767 insn = emit_insn (body);
1770 free_temp_slots ();
1773 /* Generate RTL to evaluate the expression EXP
1774 and remember it in case this is the VALUE in a ({... VALUE; }) constr. */
1776 void
1777 expand_expr_stmt (exp)
1778 tree exp;
1780 if (output_bytecode)
1782 int org_stack_depth = stack_depth;
1784 bc_expand_expr (exp);
1786 /* Restore stack depth */
1787 if (stack_depth < org_stack_depth)
1788 abort ();
1790 bc_emit_instruction (drop);
1792 last_expr_type = TREE_TYPE (exp);
1793 return;
1796 /* If -W, warn about statements with no side effects,
1797 except for an explicit cast to void (e.g. for assert()), and
1798 except inside a ({...}) where they may be useful. */
1799 if (expr_stmts_for_value == 0 && exp != error_mark_node)
1801 if (! TREE_SIDE_EFFECTS (exp) && (extra_warnings || warn_unused)
1802 && !(TREE_CODE (exp) == CONVERT_EXPR
1803 && TREE_TYPE (exp) == void_type_node))
1804 warning_with_file_and_line (emit_filename, emit_lineno,
1805 "statement with no effect");
1806 else if (warn_unused)
1807 warn_if_unused_value (exp);
1810 /* If EXP is of function type and we are expanding statements for
1811 value, convert it to pointer-to-function. */
1812 if (expr_stmts_for_value && TREE_CODE (TREE_TYPE (exp)) == FUNCTION_TYPE)
1813 exp = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (exp)), exp);
1815 last_expr_type = TREE_TYPE (exp);
1816 if (! flag_syntax_only)
1817 last_expr_value = expand_expr (exp,
1818 (expr_stmts_for_value
1819 ? NULL_RTX : const0_rtx),
1820 VOIDmode, 0);
1822 /* If all we do is reference a volatile value in memory,
1823 copy it to a register to be sure it is actually touched. */
1824 if (last_expr_value != 0 && GET_CODE (last_expr_value) == MEM
1825 && TREE_THIS_VOLATILE (exp))
1827 if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode)
1829 else if (TYPE_MODE (TREE_TYPE (exp)) != BLKmode)
1830 copy_to_reg (last_expr_value);
1831 else
1833 rtx lab = gen_label_rtx ();
1835 /* Compare the value with itself to reference it. */
1836 emit_cmp_insn (last_expr_value, last_expr_value, EQ,
1837 expand_expr (TYPE_SIZE (last_expr_type),
1838 NULL_RTX, VOIDmode, 0),
1839 BLKmode, 0,
1840 TYPE_ALIGN (last_expr_type) / BITS_PER_UNIT);
1841 emit_jump_insn ((*bcc_gen_fctn[(int) EQ]) (lab));
1842 emit_label (lab);
1846 /* If this expression is part of a ({...}) and is in memory, we may have
1847 to preserve temporaries. */
1848 preserve_temp_slots (last_expr_value);
1850 /* Free any temporaries used to evaluate this expression. Any temporary
1851 used as a result of this expression will already have been preserved
1852 above. */
1853 free_temp_slots ();
1855 emit_queue ();
1858 /* Warn if EXP contains any computations whose results are not used.
1859 Return 1 if a warning is printed; 0 otherwise. */
1862 warn_if_unused_value (exp)
1863 tree exp;
1865 if (TREE_USED (exp))
1866 return 0;
1868 switch (TREE_CODE (exp))
1870 case PREINCREMENT_EXPR:
1871 case POSTINCREMENT_EXPR:
1872 case PREDECREMENT_EXPR:
1873 case POSTDECREMENT_EXPR:
1874 case MODIFY_EXPR:
1875 case INIT_EXPR:
1876 case TARGET_EXPR:
1877 case CALL_EXPR:
1878 case METHOD_CALL_EXPR:
1879 case RTL_EXPR:
1880 case WITH_CLEANUP_EXPR:
1881 case EXIT_EXPR:
1882 /* We don't warn about COND_EXPR because it may be a useful
1883 construct if either arm contains a side effect. */
1884 case COND_EXPR:
1885 return 0;
1887 case BIND_EXPR:
1888 /* For a binding, warn if no side effect within it. */
1889 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1891 case SAVE_EXPR:
1892 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1894 case TRUTH_ORIF_EXPR:
1895 case TRUTH_ANDIF_EXPR:
1896 /* In && or ||, warn if 2nd operand has no side effect. */
1897 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1899 case COMPOUND_EXPR:
1900 if (TREE_NO_UNUSED_WARNING (exp))
1901 return 0;
1902 if (warn_if_unused_value (TREE_OPERAND (exp, 0)))
1903 return 1;
1904 /* Let people do `(foo (), 0)' without a warning. */
1905 if (TREE_CONSTANT (TREE_OPERAND (exp, 1)))
1906 return 0;
1907 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1909 case NOP_EXPR:
1910 case CONVERT_EXPR:
1911 case NON_LVALUE_EXPR:
1912 /* Don't warn about values cast to void. */
1913 if (TREE_TYPE (exp) == void_type_node)
1914 return 0;
1915 /* Don't warn about conversions not explicit in the user's program. */
1916 if (TREE_NO_UNUSED_WARNING (exp))
1917 return 0;
1918 /* Assignment to a cast usually results in a cast of a modify.
1919 Don't complain about that. There can be an arbitrary number of
1920 casts before the modify, so we must loop until we find the first
1921 non-cast expression and then test to see if that is a modify. */
1923 tree tem = TREE_OPERAND (exp, 0);
1925 while (TREE_CODE (tem) == CONVERT_EXPR || TREE_CODE (tem) == NOP_EXPR)
1926 tem = TREE_OPERAND (tem, 0);
1928 if (TREE_CODE (tem) == MODIFY_EXPR || TREE_CODE (tem) == INIT_EXPR
1929 || TREE_CODE (tem) == CALL_EXPR)
1930 return 0;
1932 goto warn;
1934 case INDIRECT_REF:
1935 /* Don't warn about automatic dereferencing of references, since
1936 the user cannot control it. */
1937 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == REFERENCE_TYPE)
1938 return warn_if_unused_value (TREE_OPERAND (exp, 0));
1939 /* ... fall through ... */
1941 default:
1942 /* Referencing a volatile value is a side effect, so don't warn. */
1943 if ((TREE_CODE_CLASS (TREE_CODE (exp)) == 'd'
1944 || TREE_CODE_CLASS (TREE_CODE (exp)) == 'r')
1945 && TREE_THIS_VOLATILE (exp))
1946 return 0;
1947 warn:
1948 warning_with_file_and_line (emit_filename, emit_lineno,
1949 "value computed is not used");
1950 return 1;
1954 /* Clear out the memory of the last expression evaluated. */
1956 void
1957 clear_last_expr ()
1959 last_expr_type = 0;
1962 /* Begin a statement which will return a value.
1963 Return the RTL_EXPR for this statement expr.
1964 The caller must save that value and pass it to expand_end_stmt_expr. */
1966 tree
1967 expand_start_stmt_expr ()
1969 int momentary;
1970 tree t;
1972 /* When generating bytecode just note down the stack depth */
1973 if (output_bytecode)
1974 return (build_int_2 (stack_depth, 0));
1976 /* Make the RTL_EXPR node temporary, not momentary,
1977 so that rtl_expr_chain doesn't become garbage. */
1978 momentary = suspend_momentary ();
1979 t = make_node (RTL_EXPR);
1980 resume_momentary (momentary);
1981 do_pending_stack_adjust ();
1982 start_sequence_for_rtl_expr (t);
1983 NO_DEFER_POP;
1984 expr_stmts_for_value++;
1985 return t;
1988 /* Restore the previous state at the end of a statement that returns a value.
1989 Returns a tree node representing the statement's value and the
1990 insns to compute the value.
1992 The nodes of that expression have been freed by now, so we cannot use them.
1993 But we don't want to do that anyway; the expression has already been
1994 evaluated and now we just want to use the value. So generate a RTL_EXPR
1995 with the proper type and RTL value.
1997 If the last substatement was not an expression,
1998 return something with type `void'. */
2000 tree
2001 expand_end_stmt_expr (t)
2002 tree t;
2004 if (output_bytecode)
2006 int i;
2007 tree t;
2010 /* At this point, all expressions have been evaluated in order.
2011 However, all expression values have been popped when evaluated,
2012 which means we have to recover the last expression value. This is
2013 the last value removed by means of a `drop' instruction. Instead
2014 of adding code to inhibit dropping the last expression value, it
2015 is here recovered by undoing the `drop'. Since `drop' is
2016 equivalent to `adjustackSI [1]', it can be undone with `adjstackSI
2017 [-1]'. */
2019 bc_adjust_stack (-1);
2021 if (!last_expr_type)
2022 last_expr_type = void_type_node;
2024 t = make_node (RTL_EXPR);
2025 TREE_TYPE (t) = last_expr_type;
2026 RTL_EXPR_RTL (t) = NULL;
2027 RTL_EXPR_SEQUENCE (t) = NULL;
2029 /* Don't consider deleting this expr or containing exprs at tree level. */
2030 TREE_THIS_VOLATILE (t) = 1;
2032 last_expr_type = 0;
2033 return t;
2036 OK_DEFER_POP;
2038 if (last_expr_type == 0)
2040 last_expr_type = void_type_node;
2041 last_expr_value = const0_rtx;
2043 else if (last_expr_value == 0)
2044 /* There are some cases where this can happen, such as when the
2045 statement is void type. */
2046 last_expr_value = const0_rtx;
2047 else if (GET_CODE (last_expr_value) != REG && ! CONSTANT_P (last_expr_value))
2048 /* Remove any possible QUEUED. */
2049 last_expr_value = protect_from_queue (last_expr_value, 0);
2051 emit_queue ();
2053 TREE_TYPE (t) = last_expr_type;
2054 RTL_EXPR_RTL (t) = last_expr_value;
2055 RTL_EXPR_SEQUENCE (t) = get_insns ();
2057 rtl_expr_chain = tree_cons (NULL_TREE, t, rtl_expr_chain);
2059 end_sequence ();
2061 /* Don't consider deleting this expr or containing exprs at tree level. */
2062 TREE_SIDE_EFFECTS (t) = 1;
2063 /* Propagate volatility of the actual RTL expr. */
2064 TREE_THIS_VOLATILE (t) = volatile_refs_p (last_expr_value);
2066 last_expr_type = 0;
2067 expr_stmts_for_value--;
2069 return t;
2072 /* Generate RTL for the start of an if-then. COND is the expression
2073 whose truth should be tested.
2075 If EXITFLAG is nonzero, this conditional is visible to
2076 `exit_something'. */
2078 void
2079 expand_start_cond (cond, exitflag)
2080 tree cond;
2081 int exitflag;
2083 struct nesting *thiscond = ALLOC_NESTING ();
2085 /* Make an entry on cond_stack for the cond we are entering. */
2087 thiscond->next = cond_stack;
2088 thiscond->all = nesting_stack;
2089 thiscond->depth = ++nesting_depth;
2090 thiscond->data.cond.next_label = gen_label_rtx ();
2091 /* Before we encounter an `else', we don't need a separate exit label
2092 unless there are supposed to be exit statements
2093 to exit this conditional. */
2094 thiscond->exit_label = exitflag ? gen_label_rtx () : 0;
2095 thiscond->data.cond.endif_label = thiscond->exit_label;
2096 cond_stack = thiscond;
2097 nesting_stack = thiscond;
2099 if (output_bytecode)
2100 bc_expand_start_cond (cond, exitflag);
2101 else
2102 do_jump (cond, thiscond->data.cond.next_label, NULL_RTX);
2105 /* Generate RTL between then-clause and the elseif-clause
2106 of an if-then-elseif-.... */
2108 void
2109 expand_start_elseif (cond)
2110 tree cond;
2112 if (cond_stack->data.cond.endif_label == 0)
2113 cond_stack->data.cond.endif_label = gen_label_rtx ();
2114 emit_jump (cond_stack->data.cond.endif_label);
2115 emit_label (cond_stack->data.cond.next_label);
2116 cond_stack->data.cond.next_label = gen_label_rtx ();
2117 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
2120 /* Generate RTL between the then-clause and the else-clause
2121 of an if-then-else. */
2123 void
2124 expand_start_else ()
2126 if (cond_stack->data.cond.endif_label == 0)
2127 cond_stack->data.cond.endif_label = gen_label_rtx ();
2129 if (output_bytecode)
2131 bc_expand_start_else ();
2132 return;
2135 emit_jump (cond_stack->data.cond.endif_label);
2136 emit_label (cond_stack->data.cond.next_label);
2137 cond_stack->data.cond.next_label = 0; /* No more _else or _elseif calls. */
2140 /* After calling expand_start_else, turn this "else" into an "else if"
2141 by providing another condition. */
2143 void
2144 expand_elseif (cond)
2145 tree cond;
2147 cond_stack->data.cond.next_label = gen_label_rtx ();
2148 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
2151 /* Generate RTL for the end of an if-then.
2152 Pop the record for it off of cond_stack. */
2154 void
2155 expand_end_cond ()
2157 struct nesting *thiscond = cond_stack;
2159 if (output_bytecode)
2160 bc_expand_end_cond ();
2161 else
2163 do_pending_stack_adjust ();
2164 if (thiscond->data.cond.next_label)
2165 emit_label (thiscond->data.cond.next_label);
2166 if (thiscond->data.cond.endif_label)
2167 emit_label (thiscond->data.cond.endif_label);
2170 POPSTACK (cond_stack);
2171 last_expr_type = 0;
2175 /* Generate code for the start of an if-then. COND is the expression
2176 whose truth is to be tested; if EXITFLAG is nonzero this conditional
2177 is to be visible to exit_something. It is assumed that the caller
2178 has pushed the previous context on the cond stack. */
2180 static void
2181 bc_expand_start_cond (cond, exitflag)
2182 tree cond;
2183 int exitflag;
2185 struct nesting *thiscond = cond_stack;
2187 thiscond->data.case_stmt.nominal_type = cond;
2188 if (! exitflag)
2189 thiscond->exit_label = gen_label_rtx ();
2190 bc_expand_expr (cond);
2191 bc_emit_bytecode (xjumpifnot);
2192 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (thiscond->exit_label));
2194 #ifdef DEBUG_PRINT_CODE
2195 fputc ('\n', stderr);
2196 #endif
2199 /* Generate the label for the end of an if with
2200 no else- clause. */
2202 static void
2203 bc_expand_end_cond ()
2205 struct nesting *thiscond = cond_stack;
2207 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thiscond->exit_label));
2210 /* Generate code for the start of the else- clause of
2211 an if-then-else. */
2213 static void
2214 bc_expand_start_else ()
2216 struct nesting *thiscond = cond_stack;
2218 thiscond->data.cond.endif_label = thiscond->exit_label;
2219 thiscond->exit_label = gen_label_rtx ();
2220 bc_emit_bytecode (jump);
2221 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (thiscond->exit_label));
2223 #ifdef DEBUG_PRINT_CODE
2224 fputc ('\n', stderr);
2225 #endif
2227 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thiscond->data.cond.endif_label));
2230 /* Generate RTL for the start of a loop. EXIT_FLAG is nonzero if this
2231 loop should be exited by `exit_something'. This is a loop for which
2232 `expand_continue' will jump to the top of the loop.
2234 Make an entry on loop_stack to record the labels associated with
2235 this loop. */
2237 struct nesting *
2238 expand_start_loop (exit_flag)
2239 int exit_flag;
2241 register struct nesting *thisloop = ALLOC_NESTING ();
2243 /* Make an entry on loop_stack for the loop we are entering. */
2245 thisloop->next = loop_stack;
2246 thisloop->all = nesting_stack;
2247 thisloop->depth = ++nesting_depth;
2248 thisloop->data.loop.start_label = gen_label_rtx ();
2249 thisloop->data.loop.end_label = gen_label_rtx ();
2250 thisloop->data.loop.alt_end_label = 0;
2251 thisloop->data.loop.continue_label = thisloop->data.loop.start_label;
2252 thisloop->exit_label = exit_flag ? thisloop->data.loop.end_label : 0;
2253 loop_stack = thisloop;
2254 nesting_stack = thisloop;
2256 if (output_bytecode)
2258 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thisloop->data.loop.start_label));
2259 return thisloop;
2262 do_pending_stack_adjust ();
2263 emit_queue ();
2264 emit_note (NULL_PTR, NOTE_INSN_LOOP_BEG);
2265 emit_label (thisloop->data.loop.start_label);
2267 return thisloop;
2270 /* Like expand_start_loop but for a loop where the continuation point
2271 (for expand_continue_loop) will be specified explicitly. */
2273 struct nesting *
2274 expand_start_loop_continue_elsewhere (exit_flag)
2275 int exit_flag;
2277 struct nesting *thisloop = expand_start_loop (exit_flag);
2278 loop_stack->data.loop.continue_label = gen_label_rtx ();
2279 return thisloop;
2282 /* Specify the continuation point for a loop started with
2283 expand_start_loop_continue_elsewhere.
2284 Use this at the point in the code to which a continue statement
2285 should jump. */
2287 void
2288 expand_loop_continue_here ()
2290 if (output_bytecode)
2292 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (loop_stack->data.loop.continue_label));
2293 return;
2295 do_pending_stack_adjust ();
2296 emit_note (NULL_PTR, NOTE_INSN_LOOP_CONT);
2297 emit_label (loop_stack->data.loop.continue_label);
2300 /* End a loop. */
2302 static void
2303 bc_expand_end_loop ()
2305 struct nesting *thisloop = loop_stack;
2307 bc_emit_bytecode (jump);
2308 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (thisloop->data.loop.start_label));
2310 #ifdef DEBUG_PRINT_CODE
2311 fputc ('\n', stderr);
2312 #endif
2314 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thisloop->exit_label));
2315 POPSTACK (loop_stack);
2316 last_expr_type = 0;
2320 /* Finish a loop. Generate a jump back to the top and the loop-exit label.
2321 Pop the block off of loop_stack. */
2323 void
2324 expand_end_loop ()
2326 register rtx insn;
2327 register rtx start_label;
2328 rtx last_test_insn = 0;
2329 int num_insns = 0;
2331 if (output_bytecode)
2333 bc_expand_end_loop ();
2334 return;
2337 insn = get_last_insn ();
2338 start_label = loop_stack->data.loop.start_label;
2340 /* Mark the continue-point at the top of the loop if none elsewhere. */
2341 if (start_label == loop_stack->data.loop.continue_label)
2342 emit_note_before (NOTE_INSN_LOOP_CONT, start_label);
2344 do_pending_stack_adjust ();
2346 /* If optimizing, perhaps reorder the loop. If the loop
2347 starts with a conditional exit, roll that to the end
2348 where it will optimize together with the jump back.
2350 We look for the last conditional branch to the exit that we encounter
2351 before hitting 30 insns or a CALL_INSN. If we see an unconditional
2352 branch to the exit first, use it.
2354 We must also stop at NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes
2355 because moving them is not valid. */
2357 if (optimize
2359 ! (GET_CODE (insn) == JUMP_INSN
2360 && GET_CODE (PATTERN (insn)) == SET
2361 && SET_DEST (PATTERN (insn)) == pc_rtx
2362 && GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE))
2364 /* Scan insns from the top of the loop looking for a qualified
2365 conditional exit. */
2366 for (insn = NEXT_INSN (loop_stack->data.loop.start_label); insn;
2367 insn = NEXT_INSN (insn))
2369 if (GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == CODE_LABEL)
2370 break;
2372 if (GET_CODE (insn) == NOTE
2373 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2374 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
2375 break;
2377 if (GET_CODE (insn) == JUMP_INSN || GET_CODE (insn) == INSN)
2378 num_insns++;
2380 if (last_test_insn && num_insns > 30)
2381 break;
2383 if (GET_CODE (insn) == JUMP_INSN && GET_CODE (PATTERN (insn)) == SET
2384 && SET_DEST (PATTERN (insn)) == pc_rtx
2385 && GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE
2386 && ((GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 1)) == LABEL_REF
2387 && ((XEXP (XEXP (SET_SRC (PATTERN (insn)), 1), 0)
2388 == loop_stack->data.loop.end_label)
2389 || (XEXP (XEXP (SET_SRC (PATTERN (insn)), 1), 0)
2390 == loop_stack->data.loop.alt_end_label)))
2391 || (GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 2)) == LABEL_REF
2392 && ((XEXP (XEXP (SET_SRC (PATTERN (insn)), 2), 0)
2393 == loop_stack->data.loop.end_label)
2394 || (XEXP (XEXP (SET_SRC (PATTERN (insn)), 2), 0)
2395 == loop_stack->data.loop.alt_end_label)))))
2396 last_test_insn = insn;
2398 if (last_test_insn == 0 && GET_CODE (insn) == JUMP_INSN
2399 && GET_CODE (PATTERN (insn)) == SET
2400 && SET_DEST (PATTERN (insn)) == pc_rtx
2401 && GET_CODE (SET_SRC (PATTERN (insn))) == LABEL_REF
2402 && ((XEXP (SET_SRC (PATTERN (insn)), 0)
2403 == loop_stack->data.loop.end_label)
2404 || (XEXP (SET_SRC (PATTERN (insn)), 0)
2405 == loop_stack->data.loop.alt_end_label)))
2406 /* Include BARRIER. */
2407 last_test_insn = NEXT_INSN (insn);
2410 if (last_test_insn != 0 && last_test_insn != get_last_insn ())
2412 /* We found one. Move everything from there up
2413 to the end of the loop, and add a jump into the loop
2414 to jump to there. */
2415 register rtx newstart_label = gen_label_rtx ();
2416 register rtx start_move = start_label;
2418 /* If the start label is preceded by a NOTE_INSN_LOOP_CONT note,
2419 then we want to move this note also. */
2420 if (GET_CODE (PREV_INSN (start_move)) == NOTE
2421 && (NOTE_LINE_NUMBER (PREV_INSN (start_move))
2422 == NOTE_INSN_LOOP_CONT))
2423 start_move = PREV_INSN (start_move);
2425 emit_label_after (newstart_label, PREV_INSN (start_move));
2426 reorder_insns (start_move, last_test_insn, get_last_insn ());
2427 emit_jump_insn_after (gen_jump (start_label),
2428 PREV_INSN (newstart_label));
2429 emit_barrier_after (PREV_INSN (newstart_label));
2430 start_label = newstart_label;
2434 emit_jump (start_label);
2435 emit_note (NULL_PTR, NOTE_INSN_LOOP_END);
2436 emit_label (loop_stack->data.loop.end_label);
2438 POPSTACK (loop_stack);
2440 last_expr_type = 0;
2443 /* Generate a jump to the current loop's continue-point.
2444 This is usually the top of the loop, but may be specified
2445 explicitly elsewhere. If not currently inside a loop,
2446 return 0 and do nothing; caller will print an error message. */
2449 expand_continue_loop (whichloop)
2450 struct nesting *whichloop;
2452 last_expr_type = 0;
2453 if (whichloop == 0)
2454 whichloop = loop_stack;
2455 if (whichloop == 0)
2456 return 0;
2457 expand_goto_internal (NULL_TREE, whichloop->data.loop.continue_label,
2458 NULL_RTX);
2459 return 1;
2462 /* Generate a jump to exit the current loop. If not currently inside a loop,
2463 return 0 and do nothing; caller will print an error message. */
2466 expand_exit_loop (whichloop)
2467 struct nesting *whichloop;
2469 last_expr_type = 0;
2470 if (whichloop == 0)
2471 whichloop = loop_stack;
2472 if (whichloop == 0)
2473 return 0;
2474 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label, NULL_RTX);
2475 return 1;
2478 /* Generate a conditional jump to exit the current loop if COND
2479 evaluates to zero. If not currently inside a loop,
2480 return 0 and do nothing; caller will print an error message. */
2483 expand_exit_loop_if_false (whichloop, cond)
2484 struct nesting *whichloop;
2485 tree cond;
2487 last_expr_type = 0;
2488 if (whichloop == 0)
2489 whichloop = loop_stack;
2490 if (whichloop == 0)
2491 return 0;
2492 if (output_bytecode)
2494 bc_expand_expr (cond);
2495 bc_expand_goto_internal (xjumpifnot,
2496 BYTECODE_BC_LABEL (whichloop->exit_label),
2497 NULL_TREE);
2499 else
2501 /* In order to handle fixups, we actually create a conditional jump
2502 around a unconditional branch to exit the loop. If fixups are
2503 necessary, they go before the unconditional branch. */
2505 rtx label = gen_label_rtx ();
2506 rtx last_insn;
2508 do_jump (cond, NULL_RTX, label);
2509 last_insn = get_last_insn ();
2510 if (GET_CODE (last_insn) == CODE_LABEL)
2511 whichloop->data.loop.alt_end_label = last_insn;
2512 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label,
2513 NULL_RTX);
2514 emit_label (label);
2517 return 1;
2520 /* Return non-zero if we should preserve sub-expressions as separate
2521 pseudos. We never do so if we aren't optimizing. We always do so
2522 if -fexpensive-optimizations.
2524 Otherwise, we only do so if we are in the "early" part of a loop. I.e.,
2525 the loop may still be a small one. */
2528 preserve_subexpressions_p ()
2530 rtx insn;
2532 if (flag_expensive_optimizations)
2533 return 1;
2535 if (optimize == 0 || loop_stack == 0)
2536 return 0;
2538 insn = get_last_insn_anywhere ();
2540 return (insn
2541 && (INSN_UID (insn) - INSN_UID (loop_stack->data.loop.start_label)
2542 < n_non_fixed_regs * 3));
2546 /* Generate a jump to exit the current loop, conditional, binding contour
2547 or case statement. Not all such constructs are visible to this function,
2548 only those started with EXIT_FLAG nonzero. Individual languages use
2549 the EXIT_FLAG parameter to control which kinds of constructs you can
2550 exit this way.
2552 If not currently inside anything that can be exited,
2553 return 0 and do nothing; caller will print an error message. */
2556 expand_exit_something ()
2558 struct nesting *n;
2559 last_expr_type = 0;
2560 for (n = nesting_stack; n; n = n->all)
2561 if (n->exit_label != 0)
2563 expand_goto_internal (NULL_TREE, n->exit_label, NULL_RTX);
2564 return 1;
2567 return 0;
2570 /* Generate RTL to return from the current function, with no value.
2571 (That is, we do not do anything about returning any value.) */
2573 void
2574 expand_null_return ()
2576 struct nesting *block = block_stack;
2577 rtx last_insn = 0;
2579 if (output_bytecode)
2581 bc_emit_instruction (ret);
2582 return;
2585 /* Does any pending block have cleanups? */
2587 while (block && block->data.block.cleanups == 0)
2588 block = block->next;
2590 /* If yes, use a goto to return, since that runs cleanups. */
2592 expand_null_return_1 (last_insn, block != 0);
2595 /* Generate RTL to return from the current function, with value VAL. */
2597 static void
2598 expand_value_return (val)
2599 rtx val;
2601 struct nesting *block = block_stack;
2602 rtx last_insn = get_last_insn ();
2603 rtx return_reg = DECL_RTL (DECL_RESULT (current_function_decl));
2605 /* Copy the value to the return location
2606 unless it's already there. */
2608 if (return_reg != val)
2610 #ifdef PROMOTE_FUNCTION_RETURN
2611 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
2612 int unsignedp = TREE_UNSIGNED (type);
2613 enum machine_mode mode
2614 = promote_mode (type, DECL_MODE (DECL_RESULT (current_function_decl)),
2615 &unsignedp, 1);
2617 if (GET_MODE (val) != VOIDmode && GET_MODE (val) != mode)
2618 convert_move (return_reg, val, unsignedp);
2619 else
2620 #endif
2621 emit_move_insn (return_reg, val);
2623 if (GET_CODE (return_reg) == REG
2624 && REGNO (return_reg) < FIRST_PSEUDO_REGISTER)
2625 emit_insn (gen_rtx (USE, VOIDmode, return_reg));
2626 /* Handle calls that return values in multiple non-contiguous locations.
2627 The Irix 6 ABI has examples of this. */
2628 else if (GET_CODE (return_reg) == PARALLEL)
2630 int i;
2632 for (i = 0; i < XVECLEN (return_reg, 0); i++)
2634 rtx x = XEXP (XVECEXP (return_reg, 0, i), 0);
2636 if (GET_CODE (x) == REG
2637 && REGNO (x) < FIRST_PSEUDO_REGISTER)
2638 emit_insn (gen_rtx (USE, VOIDmode, x));
2642 /* Does any pending block have cleanups? */
2644 while (block && block->data.block.cleanups == 0)
2645 block = block->next;
2647 /* If yes, use a goto to return, since that runs cleanups.
2648 Use LAST_INSN to put cleanups *before* the move insn emitted above. */
2650 expand_null_return_1 (last_insn, block != 0);
2653 /* Output a return with no value. If LAST_INSN is nonzero,
2654 pretend that the return takes place after LAST_INSN.
2655 If USE_GOTO is nonzero then don't use a return instruction;
2656 go to the return label instead. This causes any cleanups
2657 of pending blocks to be executed normally. */
2659 static void
2660 expand_null_return_1 (last_insn, use_goto)
2661 rtx last_insn;
2662 int use_goto;
2664 rtx end_label = cleanup_label ? cleanup_label : return_label;
2666 clear_pending_stack_adjust ();
2667 do_pending_stack_adjust ();
2668 last_expr_type = 0;
2670 /* PCC-struct return always uses an epilogue. */
2671 if (current_function_returns_pcc_struct || use_goto)
2673 if (end_label == 0)
2674 end_label = return_label = gen_label_rtx ();
2675 expand_goto_internal (NULL_TREE, end_label, last_insn);
2676 return;
2679 /* Otherwise output a simple return-insn if one is available,
2680 unless it won't do the job. */
2681 #ifdef HAVE_return
2682 if (HAVE_return && use_goto == 0 && cleanup_label == 0)
2684 emit_jump_insn (gen_return ());
2685 emit_barrier ();
2686 return;
2688 #endif
2690 /* Otherwise jump to the epilogue. */
2691 expand_goto_internal (NULL_TREE, end_label, last_insn);
2694 /* Generate RTL to evaluate the expression RETVAL and return it
2695 from the current function. */
2697 void
2698 expand_return (retval)
2699 tree retval;
2701 /* If there are any cleanups to be performed, then they will
2702 be inserted following LAST_INSN. It is desirable
2703 that the last_insn, for such purposes, should be the
2704 last insn before computing the return value. Otherwise, cleanups
2705 which call functions can clobber the return value. */
2706 /* ??? rms: I think that is erroneous, because in C++ it would
2707 run destructors on variables that might be used in the subsequent
2708 computation of the return value. */
2709 rtx last_insn = 0;
2710 register rtx val = 0;
2711 register rtx op0;
2712 tree retval_rhs;
2713 int cleanups;
2714 struct nesting *block;
2716 /* Bytecode returns are quite simple, just leave the result on the
2717 arithmetic stack. */
2718 if (output_bytecode)
2720 bc_expand_expr (retval);
2721 bc_emit_instruction (ret);
2722 return;
2725 /* If function wants no value, give it none. */
2726 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
2728 expand_expr (retval, NULL_RTX, VOIDmode, 0);
2729 emit_queue ();
2730 expand_null_return ();
2731 return;
2734 /* Are any cleanups needed? E.g. C++ destructors to be run? */
2735 /* This is not sufficient. We also need to watch for cleanups of the
2736 expression we are about to expand. Unfortunately, we cannot know
2737 if it has cleanups until we expand it, and we want to change how we
2738 expand it depending upon if we need cleanups. We can't win. */
2739 #if 0
2740 cleanups = any_pending_cleanups (1);
2741 #else
2742 cleanups = 1;
2743 #endif
2745 if (TREE_CODE (retval) == RESULT_DECL)
2746 retval_rhs = retval;
2747 else if ((TREE_CODE (retval) == MODIFY_EXPR || TREE_CODE (retval) == INIT_EXPR)
2748 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
2749 retval_rhs = TREE_OPERAND (retval, 1);
2750 else if (TREE_TYPE (retval) == void_type_node)
2751 /* Recognize tail-recursive call to void function. */
2752 retval_rhs = retval;
2753 else
2754 retval_rhs = NULL_TREE;
2756 /* Only use `last_insn' if there are cleanups which must be run. */
2757 if (cleanups || cleanup_label != 0)
2758 last_insn = get_last_insn ();
2760 /* Distribute return down conditional expr if either of the sides
2761 may involve tail recursion (see test below). This enhances the number
2762 of tail recursions we see. Don't do this always since it can produce
2763 sub-optimal code in some cases and we distribute assignments into
2764 conditional expressions when it would help. */
2766 if (optimize && retval_rhs != 0
2767 && frame_offset == 0
2768 && TREE_CODE (retval_rhs) == COND_EXPR
2769 && (TREE_CODE (TREE_OPERAND (retval_rhs, 1)) == CALL_EXPR
2770 || TREE_CODE (TREE_OPERAND (retval_rhs, 2)) == CALL_EXPR))
2772 rtx label = gen_label_rtx ();
2773 tree expr;
2775 do_jump (TREE_OPERAND (retval_rhs, 0), label, NULL_RTX);
2776 expr = build (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (current_function_decl)),
2777 DECL_RESULT (current_function_decl),
2778 TREE_OPERAND (retval_rhs, 1));
2779 TREE_SIDE_EFFECTS (expr) = 1;
2780 expand_return (expr);
2781 emit_label (label);
2783 expr = build (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (current_function_decl)),
2784 DECL_RESULT (current_function_decl),
2785 TREE_OPERAND (retval_rhs, 2));
2786 TREE_SIDE_EFFECTS (expr) = 1;
2787 expand_return (expr);
2788 return;
2791 /* For tail-recursive call to current function,
2792 just jump back to the beginning.
2793 It's unsafe if any auto variable in this function
2794 has its address taken; for simplicity,
2795 require stack frame to be empty. */
2796 if (optimize && retval_rhs != 0
2797 && frame_offset == 0
2798 && TREE_CODE (retval_rhs) == CALL_EXPR
2799 && TREE_CODE (TREE_OPERAND (retval_rhs, 0)) == ADDR_EXPR
2800 && TREE_OPERAND (TREE_OPERAND (retval_rhs, 0), 0) == current_function_decl
2801 /* Finish checking validity, and if valid emit code
2802 to set the argument variables for the new call. */
2803 && tail_recursion_args (TREE_OPERAND (retval_rhs, 1),
2804 DECL_ARGUMENTS (current_function_decl)))
2806 if (tail_recursion_label == 0)
2808 tail_recursion_label = gen_label_rtx ();
2809 emit_label_after (tail_recursion_label,
2810 tail_recursion_reentry);
2812 emit_queue ();
2813 expand_goto_internal (NULL_TREE, tail_recursion_label, last_insn);
2814 emit_barrier ();
2815 return;
2817 #ifdef HAVE_return
2818 /* This optimization is safe if there are local cleanups
2819 because expand_null_return takes care of them.
2820 ??? I think it should also be safe when there is a cleanup label,
2821 because expand_null_return takes care of them, too.
2822 Any reason why not? */
2823 if (HAVE_return && cleanup_label == 0
2824 && ! current_function_returns_pcc_struct
2825 && BRANCH_COST <= 1)
2827 /* If this is return x == y; then generate
2828 if (x == y) return 1; else return 0;
2829 if we can do it with explicit return insns and branches are cheap,
2830 but not if we have the corresponding scc insn. */
2831 int has_scc = 0;
2832 if (retval_rhs)
2833 switch (TREE_CODE (retval_rhs))
2835 case EQ_EXPR:
2836 #ifdef HAVE_seq
2837 has_scc = HAVE_seq;
2838 #endif
2839 case NE_EXPR:
2840 #ifdef HAVE_sne
2841 has_scc = HAVE_sne;
2842 #endif
2843 case GT_EXPR:
2844 #ifdef HAVE_sgt
2845 has_scc = HAVE_sgt;
2846 #endif
2847 case GE_EXPR:
2848 #ifdef HAVE_sge
2849 has_scc = HAVE_sge;
2850 #endif
2851 case LT_EXPR:
2852 #ifdef HAVE_slt
2853 has_scc = HAVE_slt;
2854 #endif
2855 case LE_EXPR:
2856 #ifdef HAVE_sle
2857 has_scc = HAVE_sle;
2858 #endif
2859 case TRUTH_ANDIF_EXPR:
2860 case TRUTH_ORIF_EXPR:
2861 case TRUTH_AND_EXPR:
2862 case TRUTH_OR_EXPR:
2863 case TRUTH_NOT_EXPR:
2864 case TRUTH_XOR_EXPR:
2865 if (! has_scc)
2867 op0 = gen_label_rtx ();
2868 jumpifnot (retval_rhs, op0);
2869 expand_value_return (const1_rtx);
2870 emit_label (op0);
2871 expand_value_return (const0_rtx);
2872 return;
2874 break;
2876 default:
2877 break;
2880 #endif /* HAVE_return */
2882 /* If the result is an aggregate that is being returned in one (or more)
2883 registers, load the registers here. The compiler currently can't handle
2884 copying a BLKmode value into registers. We could put this code in a
2885 more general area (for use by everyone instead of just function
2886 call/return), but until this feature is generally usable it is kept here
2887 (and in expand_call). The value must go into a pseudo in case there
2888 are cleanups that will clobber the real return register. */
2890 if (retval_rhs != 0
2891 && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
2892 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG)
2894 int i, bitpos, xbitpos;
2895 int big_endian_correction = 0;
2896 int bytes = int_size_in_bytes (TREE_TYPE (retval_rhs));
2897 int n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
2898 int bitsize = MIN (TYPE_ALIGN (TREE_TYPE (retval_rhs)),BITS_PER_WORD);
2899 rtx *result_pseudos = (rtx *) alloca (sizeof (rtx) * n_regs);
2900 rtx result_reg, src, dst;
2901 rtx result_val = expand_expr (retval_rhs, NULL_RTX, VOIDmode, 0);
2902 enum machine_mode tmpmode, result_reg_mode;
2904 /* Structures whose size is not a multiple of a word are aligned
2905 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2906 machine, this means we must skip the empty high order bytes when
2907 calculating the bit offset. */
2908 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2909 big_endian_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2910 * BITS_PER_UNIT));
2912 /* Copy the structure BITSIZE bits at a time. */
2913 for (bitpos = 0, xbitpos = big_endian_correction;
2914 bitpos < bytes * BITS_PER_UNIT;
2915 bitpos += bitsize, xbitpos += bitsize)
2917 /* We need a new destination pseudo each time xbitpos is
2918 on a word boundary and when xbitpos == big_endian_correction
2919 (the first time through). */
2920 if (xbitpos % BITS_PER_WORD == 0
2921 || xbitpos == big_endian_correction)
2923 /* Generate an appropriate register. */
2924 dst = gen_reg_rtx (word_mode);
2925 result_pseudos[xbitpos / BITS_PER_WORD] = dst;
2927 /* Clobber the destination before we move anything into it. */
2928 emit_insn (gen_rtx (CLOBBER, VOIDmode, dst));
2931 /* We need a new source operand each time bitpos is on a word
2932 boundary. */
2933 if (bitpos % BITS_PER_WORD == 0)
2934 src = operand_subword_force (result_val,
2935 bitpos / BITS_PER_WORD,
2936 BLKmode);
2938 /* Use bitpos for the source extraction (left justified) and
2939 xbitpos for the destination store (right justified). */
2940 store_bit_field (dst, bitsize, xbitpos % BITS_PER_WORD, word_mode,
2941 extract_bit_field (src, bitsize,
2942 bitpos % BITS_PER_WORD, 1,
2943 NULL_RTX, word_mode,
2944 word_mode,
2945 bitsize / BITS_PER_UNIT,
2946 BITS_PER_WORD),
2947 bitsize / BITS_PER_UNIT, BITS_PER_WORD);
2950 /* Find the smallest integer mode large enough to hold the
2951 entire structure and use that mode instead of BLKmode
2952 on the USE insn for the return register. */
2953 bytes = int_size_in_bytes (TREE_TYPE (retval_rhs));
2954 for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2955 tmpmode != MAX_MACHINE_MODE;
2956 tmpmode = GET_MODE_WIDER_MODE (tmpmode))
2958 /* Have we found a large enough mode? */
2959 if (GET_MODE_SIZE (tmpmode) >= bytes)
2960 break;
2963 /* No suitable mode found. */
2964 if (tmpmode == MAX_MACHINE_MODE)
2965 abort ();
2967 PUT_MODE (DECL_RTL (DECL_RESULT (current_function_decl)), tmpmode);
2969 if (GET_MODE_SIZE (tmpmode) < GET_MODE_SIZE (word_mode))
2970 result_reg_mode = word_mode;
2971 else
2972 result_reg_mode = tmpmode;
2973 result_reg = gen_reg_rtx (result_reg_mode);
2975 emit_queue ();
2976 for (i = 0; i < n_regs; i++)
2977 emit_move_insn (operand_subword (result_reg, i, 0, result_reg_mode),
2978 result_pseudos[i]);
2980 if (tmpmode != result_reg_mode)
2981 result_reg = gen_lowpart (tmpmode, result_reg);
2983 expand_value_return (result_reg);
2985 else if (cleanups
2986 && retval_rhs != 0
2987 && TREE_TYPE (retval_rhs) != void_type_node
2988 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG)
2990 /* Calculate the return value into a pseudo reg. */
2991 val = gen_reg_rtx (DECL_MODE (DECL_RESULT (current_function_decl)));
2992 val = expand_expr (retval_rhs, val, GET_MODE (val), 0);
2993 val = force_not_mem (val);
2994 emit_queue ();
2995 /* Return the calculated value, doing cleanups first. */
2996 expand_value_return (val);
2998 else
3000 /* No cleanups or no hard reg used;
3001 calculate value into hard return reg. */
3002 expand_expr (retval, const0_rtx, VOIDmode, 0);
3003 emit_queue ();
3004 expand_value_return (DECL_RTL (DECL_RESULT (current_function_decl)));
3008 /* Return 1 if the end of the generated RTX is not a barrier.
3009 This means code already compiled can drop through. */
3012 drop_through_at_end_p ()
3014 rtx insn = get_last_insn ();
3015 while (insn && GET_CODE (insn) == NOTE)
3016 insn = PREV_INSN (insn);
3017 return insn && GET_CODE (insn) != BARRIER;
3020 /* Emit code to alter this function's formal parms for a tail-recursive call.
3021 ACTUALS is a list of actual parameter expressions (chain of TREE_LISTs).
3022 FORMALS is the chain of decls of formals.
3023 Return 1 if this can be done;
3024 otherwise return 0 and do not emit any code. */
3026 static int
3027 tail_recursion_args (actuals, formals)
3028 tree actuals, formals;
3030 register tree a = actuals, f = formals;
3031 register int i;
3032 register rtx *argvec;
3034 /* Check that number and types of actuals are compatible
3035 with the formals. This is not always true in valid C code.
3036 Also check that no formal needs to be addressable
3037 and that all formals are scalars. */
3039 /* Also count the args. */
3041 for (a = actuals, f = formals, i = 0; a && f; a = TREE_CHAIN (a), f = TREE_CHAIN (f), i++)
3043 if (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_VALUE (a)))
3044 != TYPE_MAIN_VARIANT (TREE_TYPE (f)))
3045 return 0;
3046 if (GET_CODE (DECL_RTL (f)) != REG || DECL_MODE (f) == BLKmode)
3047 return 0;
3049 if (a != 0 || f != 0)
3050 return 0;
3052 /* Compute all the actuals. */
3054 argvec = (rtx *) alloca (i * sizeof (rtx));
3056 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
3057 argvec[i] = expand_expr (TREE_VALUE (a), NULL_RTX, VOIDmode, 0);
3059 /* Find which actual values refer to current values of previous formals.
3060 Copy each of them now, before any formal is changed. */
3062 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
3064 int copy = 0;
3065 register int j;
3066 for (f = formals, j = 0; j < i; f = TREE_CHAIN (f), j++)
3067 if (reg_mentioned_p (DECL_RTL (f), argvec[i]))
3068 { copy = 1; break; }
3069 if (copy)
3070 argvec[i] = copy_to_reg (argvec[i]);
3073 /* Store the values of the actuals into the formals. */
3075 for (f = formals, a = actuals, i = 0; f;
3076 f = TREE_CHAIN (f), a = TREE_CHAIN (a), i++)
3078 if (GET_MODE (DECL_RTL (f)) == GET_MODE (argvec[i]))
3079 emit_move_insn (DECL_RTL (f), argvec[i]);
3080 else
3081 convert_move (DECL_RTL (f), argvec[i],
3082 TREE_UNSIGNED (TREE_TYPE (TREE_VALUE (a))));
3085 free_temp_slots ();
3086 return 1;
3089 /* Generate the RTL code for entering a binding contour.
3090 The variables are declared one by one, by calls to `expand_decl'.
3092 EXIT_FLAG is nonzero if this construct should be visible to
3093 `exit_something'. */
3095 void
3096 expand_start_bindings (exit_flag)
3097 int exit_flag;
3099 struct nesting *thisblock = ALLOC_NESTING ();
3100 rtx note = output_bytecode ? 0 : emit_note (NULL_PTR, NOTE_INSN_BLOCK_BEG);
3102 /* Make an entry on block_stack for the block we are entering. */
3104 thisblock->next = block_stack;
3105 thisblock->all = nesting_stack;
3106 thisblock->depth = ++nesting_depth;
3107 thisblock->data.block.stack_level = 0;
3108 thisblock->data.block.cleanups = 0;
3109 thisblock->data.block.function_call_count = 0;
3110 thisblock->data.block.exception_region = 0;
3111 thisblock->data.block.target_temp_slot_level = target_temp_slot_level;
3113 thisblock->data.block.conditional_code = 0;
3114 thisblock->data.block.last_unconditional_cleanup = note;
3115 thisblock->data.block.cleanup_ptr = &thisblock->data.block.cleanups;
3117 if (block_stack
3118 && !(block_stack->data.block.cleanups == NULL_TREE
3119 && block_stack->data.block.outer_cleanups == NULL_TREE))
3120 thisblock->data.block.outer_cleanups
3121 = tree_cons (NULL_TREE, block_stack->data.block.cleanups,
3122 block_stack->data.block.outer_cleanups);
3123 else
3124 thisblock->data.block.outer_cleanups = 0;
3125 thisblock->data.block.label_chain = 0;
3126 thisblock->data.block.innermost_stack_block = stack_block_stack;
3127 thisblock->data.block.first_insn = note;
3128 thisblock->data.block.block_start_count = ++block_start_count;
3129 thisblock->exit_label = exit_flag ? gen_label_rtx () : 0;
3130 block_stack = thisblock;
3131 nesting_stack = thisblock;
3133 if (!output_bytecode)
3135 /* Make a new level for allocating stack slots. */
3136 push_temp_slots ();
3140 /* Specify the scope of temporaries created by TARGET_EXPRs. Similar
3141 to CLEANUP_POINT_EXPR, but handles cases when a series of calls to
3142 expand_expr are made. After we end the region, we know that all
3143 space for all temporaries that were created by TARGET_EXPRs will be
3144 destroyed and their space freed for reuse. */
3146 void
3147 expand_start_target_temps ()
3149 /* This is so that even if the result is preserved, the space
3150 allocated will be freed, as we know that it is no longer in use. */
3151 push_temp_slots ();
3153 /* Start a new binding layer that will keep track of all cleanup
3154 actions to be performed. */
3155 expand_start_bindings (0);
3157 target_temp_slot_level = temp_slot_level;
3160 void
3161 expand_end_target_temps ()
3163 expand_end_bindings (NULL_TREE, 0, 0);
3165 /* This is so that even if the result is preserved, the space
3166 allocated will be freed, as we know that it is no longer in use. */
3167 pop_temp_slots ();
3170 /* Mark top block of block_stack as an implicit binding for an
3171 exception region. This is used to prevent infinite recursion when
3172 ending a binding with expand_end_bindings. It is only ever called
3173 by expand_eh_region_start, as that it the only way to create a
3174 block stack for a exception region. */
3176 void
3177 mark_block_as_eh_region ()
3179 block_stack->data.block.exception_region = 1;
3180 if (block_stack->next
3181 && block_stack->next->data.block.conditional_code)
3183 block_stack->data.block.conditional_code
3184 = block_stack->next->data.block.conditional_code;
3185 block_stack->data.block.last_unconditional_cleanup
3186 = block_stack->next->data.block.last_unconditional_cleanup;
3187 block_stack->data.block.cleanup_ptr
3188 = block_stack->next->data.block.cleanup_ptr;
3192 /* True if we are currently emitting insns in an area of output code
3193 that is controlled by a conditional expression. This is used by
3194 the cleanup handling code to generate conditional cleanup actions. */
3197 conditional_context ()
3199 return block_stack && block_stack->data.block.conditional_code;
3202 /* Mark top block of block_stack as not for an implicit binding for an
3203 exception region. This is only ever done by expand_eh_region_end
3204 to let expand_end_bindings know that it is being called explicitly
3205 to end the binding layer for just the binding layer associated with
3206 the exception region, otherwise expand_end_bindings would try and
3207 end all implicit binding layers for exceptions regions, and then
3208 one normal binding layer. */
3210 void
3211 mark_block_as_not_eh_region ()
3213 block_stack->data.block.exception_region = 0;
3216 /* True if the top block of block_stack was marked as for an exception
3217 region by mark_block_as_eh_region. */
3220 is_eh_region ()
3222 return block_stack && block_stack->data.block.exception_region;
3225 /* Given a pointer to a BLOCK node, save a pointer to the most recently
3226 generated NOTE_INSN_BLOCK_END in the BLOCK_END_NOTE field of the given
3227 BLOCK node. */
3229 void
3230 remember_end_note (block)
3231 register tree block;
3233 BLOCK_END_NOTE (block) = last_block_end_note;
3234 last_block_end_note = NULL_RTX;
3237 /* Generate RTL code to terminate a binding contour.
3238 VARS is the chain of VAR_DECL nodes
3239 for the variables bound in this contour.
3240 MARK_ENDS is nonzero if we should put a note at the beginning
3241 and end of this binding contour.
3243 DONT_JUMP_IN is nonzero if it is not valid to jump into this contour.
3244 (That is true automatically if the contour has a saved stack level.) */
3246 void
3247 expand_end_bindings (vars, mark_ends, dont_jump_in)
3248 tree vars;
3249 int mark_ends;
3250 int dont_jump_in;
3252 register struct nesting *thisblock;
3253 register tree decl;
3255 while (block_stack->data.block.exception_region)
3257 /* Because we don't need or want a new temporary level and
3258 because we didn't create one in expand_eh_region_start,
3259 create a fake one now to avoid removing one in
3260 expand_end_bindings. */
3261 push_temp_slots ();
3263 block_stack->data.block.exception_region = 0;
3265 expand_end_bindings (NULL_TREE, 0, 0);
3268 if (output_bytecode)
3270 bc_expand_end_bindings (vars, mark_ends, dont_jump_in);
3271 return;
3274 /* Since expand_eh_region_start does an expand_start_bindings, we
3275 have to first end all the bindings that were created by
3276 expand_eh_region_start. */
3278 thisblock = block_stack;
3280 if (warn_unused)
3281 for (decl = vars; decl; decl = TREE_CHAIN (decl))
3282 if (! TREE_USED (decl) && TREE_CODE (decl) == VAR_DECL
3283 && ! DECL_IN_SYSTEM_HEADER (decl)
3284 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
3285 warning_with_decl (decl, "unused variable `%s'");
3287 if (thisblock->exit_label)
3289 do_pending_stack_adjust ();
3290 emit_label (thisblock->exit_label);
3293 /* If necessary, make a handler for nonlocal gotos taking
3294 place in the function calls in this block. */
3295 if (function_call_count != thisblock->data.block.function_call_count
3296 && nonlocal_labels
3297 /* Make handler for outermost block
3298 if there were any nonlocal gotos to this function. */
3299 && (thisblock->next == 0 ? current_function_has_nonlocal_label
3300 /* Make handler for inner block if it has something
3301 special to do when you jump out of it. */
3302 : (thisblock->data.block.cleanups != 0
3303 || thisblock->data.block.stack_level != 0)))
3305 tree link;
3306 rtx afterward = gen_label_rtx ();
3307 rtx handler_label = gen_label_rtx ();
3308 rtx save_receiver = gen_reg_rtx (Pmode);
3309 rtx insns;
3311 /* Don't let jump_optimize delete the handler. */
3312 LABEL_PRESERVE_P (handler_label) = 1;
3314 /* Record the handler address in the stack slot for that purpose,
3315 during this block, saving and restoring the outer value. */
3316 if (thisblock->next != 0)
3318 emit_move_insn (nonlocal_goto_handler_slot, save_receiver);
3320 start_sequence ();
3321 emit_move_insn (save_receiver, nonlocal_goto_handler_slot);
3322 insns = get_insns ();
3323 end_sequence ();
3324 emit_insns_before (insns, thisblock->data.block.first_insn);
3327 start_sequence ();
3328 emit_move_insn (nonlocal_goto_handler_slot,
3329 gen_rtx (LABEL_REF, Pmode, handler_label));
3330 insns = get_insns ();
3331 end_sequence ();
3332 emit_insns_before (insns, thisblock->data.block.first_insn);
3334 /* Jump around the handler; it runs only when specially invoked. */
3335 emit_jump (afterward);
3336 emit_label (handler_label);
3338 #ifdef HAVE_nonlocal_goto
3339 if (! HAVE_nonlocal_goto)
3340 #endif
3341 /* First adjust our frame pointer to its actual value. It was
3342 previously set to the start of the virtual area corresponding to
3343 the stacked variables when we branched here and now needs to be
3344 adjusted to the actual hardware fp value.
3346 Assignments are to virtual registers are converted by
3347 instantiate_virtual_regs into the corresponding assignment
3348 to the underlying register (fp in this case) that makes
3349 the original assignment true.
3350 So the following insn will actually be
3351 decrementing fp by STARTING_FRAME_OFFSET. */
3352 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
3354 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3355 if (fixed_regs[ARG_POINTER_REGNUM])
3357 #ifdef ELIMINABLE_REGS
3358 /* If the argument pointer can be eliminated in favor of the
3359 frame pointer, we don't need to restore it. We assume here
3360 that if such an elimination is present, it can always be used.
3361 This is the case on all known machines; if we don't make this
3362 assumption, we do unnecessary saving on many machines. */
3363 static struct elims {int from, to;} elim_regs[] = ELIMINABLE_REGS;
3364 int i;
3366 for (i = 0; i < sizeof elim_regs / sizeof elim_regs[0]; i++)
3367 if (elim_regs[i].from == ARG_POINTER_REGNUM
3368 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
3369 break;
3371 if (i == sizeof elim_regs / sizeof elim_regs [0])
3372 #endif
3374 /* Now restore our arg pointer from the address at which it
3375 was saved in our stack frame.
3376 If there hasn't be space allocated for it yet, make
3377 some now. */
3378 if (arg_pointer_save_area == 0)
3379 arg_pointer_save_area
3380 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
3381 emit_move_insn (virtual_incoming_args_rtx,
3382 /* We need a pseudo here, or else
3383 instantiate_virtual_regs_1 complains. */
3384 copy_to_reg (arg_pointer_save_area));
3387 #endif
3389 #ifdef HAVE_nonlocal_goto_receiver
3390 if (HAVE_nonlocal_goto_receiver)
3391 emit_insn (gen_nonlocal_goto_receiver ());
3392 #endif
3394 /* The handler expects the desired label address in the static chain
3395 register. It tests the address and does an appropriate jump
3396 to whatever label is desired. */
3397 for (link = nonlocal_labels; link; link = TREE_CHAIN (link))
3398 /* Skip any labels we shouldn't be able to jump to from here. */
3399 if (! DECL_TOO_LATE (TREE_VALUE (link)))
3401 rtx not_this = gen_label_rtx ();
3402 rtx this = gen_label_rtx ();
3403 do_jump_if_equal (static_chain_rtx,
3404 gen_rtx (LABEL_REF, Pmode, DECL_RTL (TREE_VALUE (link))),
3405 this, 0);
3406 emit_jump (not_this);
3407 emit_label (this);
3408 expand_goto (TREE_VALUE (link));
3409 emit_label (not_this);
3411 /* If label is not recognized, abort. */
3412 emit_library_call (gen_rtx (SYMBOL_REF, Pmode, "abort"), 0,
3413 VOIDmode, 0);
3414 emit_barrier ();
3415 emit_label (afterward);
3418 /* Don't allow jumping into a block that has a stack level.
3419 Cleanups are allowed, though. */
3420 if (dont_jump_in
3421 || thisblock->data.block.stack_level != 0)
3423 struct label_chain *chain;
3425 /* Any labels in this block are no longer valid to go to.
3426 Mark them to cause an error message. */
3427 for (chain = thisblock->data.block.label_chain; chain; chain = chain->next)
3429 DECL_TOO_LATE (chain->label) = 1;
3430 /* If any goto without a fixup came to this label,
3431 that must be an error, because gotos without fixups
3432 come from outside all saved stack-levels. */
3433 if (TREE_ADDRESSABLE (chain->label))
3434 error_with_decl (chain->label,
3435 "label `%s' used before containing binding contour");
3439 /* Restore stack level in effect before the block
3440 (only if variable-size objects allocated). */
3441 /* Perform any cleanups associated with the block. */
3443 if (thisblock->data.block.stack_level != 0
3444 || thisblock->data.block.cleanups != 0)
3446 /* Only clean up here if this point can actually be reached. */
3447 int reachable = GET_CODE (get_last_insn ()) != BARRIER;
3449 /* Don't let cleanups affect ({...}) constructs. */
3450 int old_expr_stmts_for_value = expr_stmts_for_value;
3451 rtx old_last_expr_value = last_expr_value;
3452 tree old_last_expr_type = last_expr_type;
3453 expr_stmts_for_value = 0;
3455 /* Do the cleanups. */
3456 expand_cleanups (thisblock->data.block.cleanups, NULL_TREE, 0, reachable);
3457 if (reachable)
3458 do_pending_stack_adjust ();
3460 expr_stmts_for_value = old_expr_stmts_for_value;
3461 last_expr_value = old_last_expr_value;
3462 last_expr_type = old_last_expr_type;
3464 /* Restore the stack level. */
3466 if (reachable && thisblock->data.block.stack_level != 0)
3468 emit_stack_restore (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3469 thisblock->data.block.stack_level, NULL_RTX);
3470 if (nonlocal_goto_handler_slot != 0)
3471 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level,
3472 NULL_RTX);
3475 /* Any gotos out of this block must also do these things.
3476 Also report any gotos with fixups that came to labels in this
3477 level. */
3478 fixup_gotos (thisblock,
3479 thisblock->data.block.stack_level,
3480 thisblock->data.block.cleanups,
3481 thisblock->data.block.first_insn,
3482 dont_jump_in);
3485 /* Mark the beginning and end of the scope if requested.
3486 We do this now, after running cleanups on the variables
3487 just going out of scope, so they are in scope for their cleanups. */
3489 if (mark_ends)
3490 last_block_end_note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_END);
3491 else
3492 /* Get rid of the beginning-mark if we don't make an end-mark. */
3493 NOTE_LINE_NUMBER (thisblock->data.block.first_insn) = NOTE_INSN_DELETED;
3495 /* If doing stupid register allocation, make sure lives of all
3496 register variables declared here extend thru end of scope. */
3498 if (obey_regdecls)
3499 for (decl = vars; decl; decl = TREE_CHAIN (decl))
3501 rtx rtl = DECL_RTL (decl);
3502 if (TREE_CODE (decl) == VAR_DECL && rtl != 0)
3503 use_variable (rtl);
3506 /* Restore the temporary level of TARGET_EXPRs. */
3507 target_temp_slot_level = thisblock->data.block.target_temp_slot_level;
3509 /* Restore block_stack level for containing block. */
3511 stack_block_stack = thisblock->data.block.innermost_stack_block;
3512 POPSTACK (block_stack);
3514 /* Pop the stack slot nesting and free any slots at this level. */
3515 pop_temp_slots ();
3519 /* End a binding contour.
3520 VARS is the chain of VAR_DECL nodes for the variables bound
3521 in this contour. MARK_ENDS is nonzer if we should put a note
3522 at the beginning and end of this binding contour.
3523 DONT_JUMP_IN is nonzero if it is not valid to jump into this
3524 contour. */
3526 static void
3527 bc_expand_end_bindings (vars, mark_ends, dont_jump_in)
3528 tree vars;
3529 int mark_ends;
3530 int dont_jump_in;
3532 struct nesting *thisbind = nesting_stack;
3533 tree decl;
3535 if (warn_unused)
3536 for (decl = vars; decl; decl = TREE_CHAIN (decl))
3537 if (! TREE_USED (TREE_VALUE (decl)) && TREE_CODE (TREE_VALUE (decl)) == VAR_DECL)
3538 warning_with_decl (decl, "unused variable `%s'");
3540 if (thisbind->exit_label)
3541 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thisbind->exit_label));
3543 /* Pop block/bindings off stack */
3544 POPSTACK (block_stack);
3547 /* Generate RTL for the automatic variable declaration DECL.
3548 (Other kinds of declarations are simply ignored if seen here.) */
3550 void
3551 expand_decl (decl)
3552 register tree decl;
3554 struct nesting *thisblock = block_stack;
3555 tree type;
3557 if (output_bytecode)
3559 bc_expand_decl (decl, 0);
3560 return;
3563 type = TREE_TYPE (decl);
3565 /* Only automatic variables need any expansion done.
3566 Static and external variables, and external functions,
3567 will be handled by `assemble_variable' (called from finish_decl).
3568 TYPE_DECL and CONST_DECL require nothing.
3569 PARM_DECLs are handled in `assign_parms'. */
3571 if (TREE_CODE (decl) != VAR_DECL)
3572 return;
3573 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
3574 return;
3576 /* Create the RTL representation for the variable. */
3578 if (type == error_mark_node)
3579 DECL_RTL (decl) = gen_rtx (MEM, BLKmode, const0_rtx);
3580 else if (DECL_SIZE (decl) == 0)
3581 /* Variable with incomplete type. */
3583 if (DECL_INITIAL (decl) == 0)
3584 /* Error message was already done; now avoid a crash. */
3585 DECL_RTL (decl) = assign_stack_temp (DECL_MODE (decl), 0, 1);
3586 else
3587 /* An initializer is going to decide the size of this array.
3588 Until we know the size, represent its address with a reg. */
3589 DECL_RTL (decl) = gen_rtx (MEM, BLKmode, gen_reg_rtx (Pmode));
3590 MEM_IN_STRUCT_P (DECL_RTL (decl)) = AGGREGATE_TYPE_P (type);
3592 else if (DECL_MODE (decl) != BLKmode
3593 /* If -ffloat-store, don't put explicit float vars
3594 into regs. */
3595 && !(flag_float_store
3596 && TREE_CODE (type) == REAL_TYPE)
3597 && ! TREE_THIS_VOLATILE (decl)
3598 && ! TREE_ADDRESSABLE (decl)
3599 && (DECL_REGISTER (decl) || ! obey_regdecls))
3601 /* Automatic variable that can go in a register. */
3602 int unsignedp = TREE_UNSIGNED (type);
3603 enum machine_mode reg_mode
3604 = promote_mode (type, DECL_MODE (decl), &unsignedp, 0);
3606 DECL_RTL (decl) = gen_reg_rtx (reg_mode);
3607 mark_user_reg (DECL_RTL (decl));
3609 if (TREE_CODE (type) == POINTER_TYPE)
3610 mark_reg_pointer (DECL_RTL (decl),
3611 (TYPE_ALIGN (TREE_TYPE (TREE_TYPE (decl)))
3612 / BITS_PER_UNIT));
3615 else if (TREE_CODE (DECL_SIZE (decl)) == INTEGER_CST
3616 && ! (flag_stack_check && ! STACK_CHECK_BUILTIN
3617 && (TREE_INT_CST_HIGH (DECL_SIZE (decl)) != 0
3618 || (TREE_INT_CST_LOW (DECL_SIZE (decl))
3619 > STACK_CHECK_MAX_VAR_SIZE * BITS_PER_UNIT))))
3621 /* Variable of fixed size that goes on the stack. */
3622 rtx oldaddr = 0;
3623 rtx addr;
3625 /* If we previously made RTL for this decl, it must be an array
3626 whose size was determined by the initializer.
3627 The old address was a register; set that register now
3628 to the proper address. */
3629 if (DECL_RTL (decl) != 0)
3631 if (GET_CODE (DECL_RTL (decl)) != MEM
3632 || GET_CODE (XEXP (DECL_RTL (decl), 0)) != REG)
3633 abort ();
3634 oldaddr = XEXP (DECL_RTL (decl), 0);
3637 DECL_RTL (decl)
3638 = assign_stack_temp (DECL_MODE (decl),
3639 ((TREE_INT_CST_LOW (DECL_SIZE (decl))
3640 + BITS_PER_UNIT - 1)
3641 / BITS_PER_UNIT),
3643 MEM_IN_STRUCT_P (DECL_RTL (decl)) = AGGREGATE_TYPE_P (TREE_TYPE (decl));
3645 /* Set alignment we actually gave this decl. */
3646 DECL_ALIGN (decl) = (DECL_MODE (decl) == BLKmode ? BIGGEST_ALIGNMENT
3647 : GET_MODE_BITSIZE (DECL_MODE (decl)));
3649 if (oldaddr)
3651 addr = force_operand (XEXP (DECL_RTL (decl), 0), oldaddr);
3652 if (addr != oldaddr)
3653 emit_move_insn (oldaddr, addr);
3656 /* If this is a memory ref that contains aggregate components,
3657 mark it as such for cse and loop optimize. */
3658 MEM_IN_STRUCT_P (DECL_RTL (decl)) = AGGREGATE_TYPE_P (TREE_TYPE (decl));
3659 #if 0
3660 /* If this is in memory because of -ffloat-store,
3661 set the volatile bit, to prevent optimizations from
3662 undoing the effects. */
3663 if (flag_float_store && TREE_CODE (type) == REAL_TYPE)
3664 MEM_VOLATILE_P (DECL_RTL (decl)) = 1;
3665 #endif
3667 else
3668 /* Dynamic-size object: must push space on the stack. */
3670 rtx address, size;
3672 /* Record the stack pointer on entry to block, if have
3673 not already done so. */
3674 if (thisblock->data.block.stack_level == 0)
3676 do_pending_stack_adjust ();
3677 emit_stack_save (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3678 &thisblock->data.block.stack_level,
3679 thisblock->data.block.first_insn);
3680 stack_block_stack = thisblock;
3683 /* Compute the variable's size, in bytes. */
3684 size = expand_expr (size_binop (CEIL_DIV_EXPR,
3685 DECL_SIZE (decl),
3686 size_int (BITS_PER_UNIT)),
3687 NULL_RTX, VOIDmode, 0);
3688 free_temp_slots ();
3690 /* Allocate space on the stack for the variable. Note that
3691 DECL_ALIGN says how the variable is to be aligned and we
3692 cannot use it to conclude anything about the alignment of
3693 the size. */
3694 address = allocate_dynamic_stack_space (size, NULL_RTX,
3695 TYPE_ALIGN (TREE_TYPE (decl)));
3697 /* Reference the variable indirect through that rtx. */
3698 DECL_RTL (decl) = gen_rtx (MEM, DECL_MODE (decl), address);
3700 /* If this is a memory ref that contains aggregate components,
3701 mark it as such for cse and loop optimize. */
3702 MEM_IN_STRUCT_P (DECL_RTL (decl)) = AGGREGATE_TYPE_P (TREE_TYPE (decl));
3704 /* Indicate the alignment we actually gave this variable. */
3705 #ifdef STACK_BOUNDARY
3706 DECL_ALIGN (decl) = STACK_BOUNDARY;
3707 #else
3708 DECL_ALIGN (decl) = BIGGEST_ALIGNMENT;
3709 #endif
3712 if (TREE_THIS_VOLATILE (decl))
3713 MEM_VOLATILE_P (DECL_RTL (decl)) = 1;
3714 #if 0 /* A variable is not necessarily unchanging
3715 just because it is const. RTX_UNCHANGING_P
3716 means no change in the function,
3717 not merely no change in the variable's scope.
3718 It is correct to set RTX_UNCHANGING_P if the variable's scope
3719 is the whole function. There's no convenient way to test that. */
3720 if (TREE_READONLY (decl))
3721 RTX_UNCHANGING_P (DECL_RTL (decl)) = 1;
3722 #endif
3724 /* If doing stupid register allocation, make sure life of any
3725 register variable starts here, at the start of its scope. */
3727 if (obey_regdecls)
3728 use_variable (DECL_RTL (decl));
3732 /* Generate code for the automatic variable declaration DECL. For
3733 most variables this just means we give it a stack offset. The
3734 compiler sometimes emits cleanups without variables and we will
3735 have to deal with those too. */
3737 static void
3738 bc_expand_decl (decl, cleanup)
3739 tree decl;
3740 tree cleanup;
3742 tree type;
3744 if (!decl)
3746 /* A cleanup with no variable. */
3747 if (!cleanup)
3748 abort ();
3750 return;
3753 /* Only auto variables need any work. */
3754 if (TREE_CODE (decl) != VAR_DECL || TREE_STATIC (decl) || DECL_EXTERNAL (decl))
3755 return;
3757 type = TREE_TYPE (decl);
3759 if (type == error_mark_node)
3760 DECL_RTL (decl) = bc_gen_rtx ((char *) 0, 0, (struct bc_label *) 0);
3762 else if (DECL_SIZE (decl) == 0)
3764 /* Variable with incomplete type. The stack offset herein will be
3765 fixed later in expand_decl_init. */
3766 DECL_RTL (decl) = bc_gen_rtx ((char *) 0, 0, (struct bc_label *) 0);
3768 else if (TREE_CONSTANT (DECL_SIZE (decl)))
3770 DECL_RTL (decl) = bc_allocate_local (TREE_INT_CST_LOW (DECL_SIZE (decl)) / BITS_PER_UNIT,
3771 DECL_ALIGN (decl));
3773 else
3774 DECL_RTL (decl) = bc_allocate_variable_array (DECL_SIZE (decl));
3777 /* Emit code to perform the initialization of a declaration DECL. */
3779 void
3780 expand_decl_init (decl)
3781 tree decl;
3783 int was_used = TREE_USED (decl);
3785 if (output_bytecode)
3787 bc_expand_decl_init (decl);
3788 return;
3791 /* If this is a CONST_DECL, we don't have to generate any code, but
3792 if DECL_INITIAL is a constant, call expand_expr to force TREE_CST_RTL
3793 to be set while in the obstack containing the constant. If we don't
3794 do this, we can lose if we have functions nested three deep and the middle
3795 function makes a CONST_DECL whose DECL_INITIAL is a STRING_CST while
3796 the innermost function is the first to expand that STRING_CST. */
3797 if (TREE_CODE (decl) == CONST_DECL)
3799 if (DECL_INITIAL (decl) && TREE_CONSTANT (DECL_INITIAL (decl)))
3800 expand_expr (DECL_INITIAL (decl), NULL_RTX, VOIDmode,
3801 EXPAND_INITIALIZER);
3802 return;
3805 if (TREE_STATIC (decl))
3806 return;
3808 /* Compute and store the initial value now. */
3810 if (DECL_INITIAL (decl) == error_mark_node)
3812 enum tree_code code = TREE_CODE (TREE_TYPE (decl));
3813 if (code == INTEGER_TYPE || code == REAL_TYPE || code == ENUMERAL_TYPE
3814 || code == POINTER_TYPE)
3815 expand_assignment (decl, convert (TREE_TYPE (decl), integer_zero_node),
3816 0, 0);
3817 emit_queue ();
3819 else if (DECL_INITIAL (decl) && TREE_CODE (DECL_INITIAL (decl)) != TREE_LIST)
3821 emit_line_note (DECL_SOURCE_FILE (decl), DECL_SOURCE_LINE (decl));
3822 expand_assignment (decl, DECL_INITIAL (decl), 0, 0);
3823 emit_queue ();
3826 /* Don't let the initialization count as "using" the variable. */
3827 TREE_USED (decl) = was_used;
3829 /* Free any temporaries we made while initializing the decl. */
3830 preserve_temp_slots (NULL_RTX);
3831 free_temp_slots ();
3834 /* Expand initialization for variable-sized types. Allocate array
3835 using newlocalSI and set local variable, which is a pointer to the
3836 storage. */
3838 static void
3839 bc_expand_variable_local_init (decl)
3840 tree decl;
3842 /* Evaluate size expression and coerce to SI */
3843 bc_expand_expr (DECL_SIZE (decl));
3845 /* Type sizes are always (?) of TREE_CODE INTEGER_CST, so
3846 no coercion is necessary (?) */
3848 /* emit_typecode_conversion (preferred_typecode (TYPE_MODE (DECL_SIZE (decl)),
3849 TREE_UNSIGNED (DECL_SIZE (decl))), SIcode); */
3851 /* Emit code to allocate array */
3852 bc_emit_instruction (newlocalSI);
3854 /* Store array pointer in local variable. This is the only instance
3855 where we actually want the address of the pointer to the
3856 variable-size block, rather than the pointer itself. We avoid
3857 using expand_address() since that would cause the pointer to be
3858 pushed rather than its address. Hence the hard-coded reference;
3859 notice also that the variable is always local (no global
3860 variable-size type variables). */
3862 bc_load_localaddr (DECL_RTL (decl));
3863 bc_emit_instruction (storeP);
3867 /* Emit code to initialize a declaration. */
3869 static void
3870 bc_expand_decl_init (decl)
3871 tree decl;
3873 int org_stack_depth;
3875 /* Statical initializers are handled elsewhere */
3877 if (TREE_STATIC (decl))
3878 return;
3880 /* Memory original stack depth */
3881 org_stack_depth = stack_depth;
3883 /* If the type is variable-size, we first create its space (we ASSUME
3884 it CAN'T be static). We do this regardless of whether there's an
3885 initializer assignment or not. */
3887 if (TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
3888 bc_expand_variable_local_init (decl);
3890 /* Expand initializer assignment */
3891 if (DECL_INITIAL (decl) == error_mark_node)
3893 enum tree_code code = TREE_CODE (TREE_TYPE (decl));
3895 if (code == INTEGER_TYPE || code == REAL_TYPE || code == ENUMERAL_TYPE
3896 || code == POINTER_TYPE)
3898 expand_assignment (TREE_TYPE (decl), decl, 0, 0);
3900 else if (DECL_INITIAL (decl))
3901 expand_assignment (TREE_TYPE (decl), decl, 0, 0);
3903 /* Restore stack depth */
3904 if (org_stack_depth > stack_depth)
3905 abort ();
3907 bc_adjust_stack (stack_depth - org_stack_depth);
3911 /* CLEANUP is an expression to be executed at exit from this binding contour;
3912 for example, in C++, it might call the destructor for this variable.
3914 We wrap CLEANUP in an UNSAVE_EXPR node, so that we can expand the
3915 CLEANUP multiple times, and have the correct semantics. This
3916 happens in exception handling, for gotos, returns, breaks that
3917 leave the current scope.
3919 If CLEANUP is nonzero and DECL is zero, we record a cleanup
3920 that is not associated with any particular variable. */
3923 expand_decl_cleanup (decl, cleanup)
3924 tree decl, cleanup;
3926 struct nesting *thisblock = block_stack;
3928 /* Error if we are not in any block. */
3929 if (thisblock == 0)
3930 return 0;
3932 /* Record the cleanup if there is one. */
3934 if (cleanup != 0)
3936 tree t;
3937 rtx seq;
3938 tree *cleanups = &thisblock->data.block.cleanups;
3939 int cond_context = conditional_context ();
3941 if (cond_context)
3943 rtx flag = gen_reg_rtx (word_mode);
3944 rtx set_flag_0;
3945 tree cond;
3947 start_sequence ();
3948 emit_move_insn (flag, const0_rtx);
3949 set_flag_0 = get_insns ();
3950 end_sequence ();
3952 thisblock->data.block.last_unconditional_cleanup
3953 = emit_insns_after (set_flag_0,
3954 thisblock->data.block.last_unconditional_cleanup);
3956 emit_move_insn (flag, const1_rtx);
3958 /* All cleanups must be on the function_obstack. */
3959 push_obstacks_nochange ();
3960 resume_temporary_allocation ();
3962 cond = build_decl (VAR_DECL, NULL_TREE, type_for_mode (word_mode, 1));
3963 DECL_RTL (cond) = flag;
3965 /* Conditionalize the cleanup. */
3966 cleanup = build (COND_EXPR, void_type_node,
3967 truthvalue_conversion (cond),
3968 cleanup, integer_zero_node);
3969 cleanup = fold (cleanup);
3971 pop_obstacks ();
3973 cleanups = thisblock->data.block.cleanup_ptr;
3976 /* All cleanups must be on the function_obstack. */
3977 push_obstacks_nochange ();
3978 resume_temporary_allocation ();
3979 cleanup = unsave_expr (cleanup);
3980 pop_obstacks ();
3982 t = *cleanups = temp_tree_cons (decl, cleanup, *cleanups);
3984 if (! cond_context)
3985 /* If this block has a cleanup, it belongs in stack_block_stack. */
3986 stack_block_stack = thisblock;
3988 if (cond_context)
3990 start_sequence ();
3993 /* If this was optimized so that there is no exception region for the
3994 cleanup, then mark the TREE_LIST node, so that we can later tell
3995 if we need to call expand_eh_region_end. */
3996 if (! using_eh_for_cleanups_p
3997 || expand_eh_region_start_tree (decl, cleanup))
3998 TREE_ADDRESSABLE (t) = 1;
3999 /* If that started a new EH region, we're in a new block. */
4000 thisblock = block_stack;
4002 if (cond_context)
4004 seq = get_insns ();
4005 end_sequence ();
4006 if (seq)
4007 thisblock->data.block.last_unconditional_cleanup
4008 = emit_insns_after (seq,
4009 thisblock->data.block.last_unconditional_cleanup);
4011 else
4013 thisblock->data.block.last_unconditional_cleanup
4014 = get_last_insn ();
4015 thisblock->data.block.cleanup_ptr = &thisblock->data.block.cleanups;
4018 return 1;
4021 /* Like expand_decl_cleanup, but suppress generating an exception handler
4022 to perform the cleanup. */
4025 expand_decl_cleanup_no_eh (decl, cleanup)
4026 tree decl, cleanup;
4028 int save_eh = using_eh_for_cleanups_p;
4029 using_eh_for_cleanups_p = 0;
4030 expand_decl_cleanup (decl, cleanup);
4031 using_eh_for_cleanups_p = save_eh;
4034 /* Arrange for the top element of the dynamic cleanup chain to be
4035 popped if we exit the current binding contour. DECL is the
4036 associated declaration, if any, otherwise NULL_TREE. If the
4037 current contour is left via an exception, then __sjthrow will pop
4038 the top element off the dynamic cleanup chain. The code that
4039 avoids doing the action we push into the cleanup chain in the
4040 exceptional case is contained in expand_cleanups.
4042 This routine is only used by expand_eh_region_start, and that is
4043 the only way in which an exception region should be started. This
4044 routine is only used when using the setjmp/longjmp codegen method
4045 for exception handling. */
4048 expand_dcc_cleanup (decl)
4049 tree decl;
4051 struct nesting *thisblock = block_stack;
4052 tree cleanup;
4054 /* Error if we are not in any block. */
4055 if (thisblock == 0)
4056 return 0;
4058 /* Record the cleanup for the dynamic handler chain. */
4060 /* All cleanups must be on the function_obstack. */
4061 push_obstacks_nochange ();
4062 resume_temporary_allocation ();
4063 cleanup = make_node (POPDCC_EXPR);
4064 pop_obstacks ();
4066 /* Add the cleanup in a manner similar to expand_decl_cleanup. */
4067 thisblock->data.block.cleanups
4068 = temp_tree_cons (decl, cleanup, thisblock->data.block.cleanups);
4070 /* If this block has a cleanup, it belongs in stack_block_stack. */
4071 stack_block_stack = thisblock;
4072 return 1;
4075 /* Arrange for the top element of the dynamic handler chain to be
4076 popped if we exit the current binding contour. DECL is the
4077 associated declaration, if any, otherwise NULL_TREE. If the current
4078 contour is left via an exception, then __sjthrow will pop the top
4079 element off the dynamic handler chain. The code that avoids doing
4080 the action we push into the handler chain in the exceptional case
4081 is contained in expand_cleanups.
4083 This routine is only used by expand_eh_region_start, and that is
4084 the only way in which an exception region should be started. This
4085 routine is only used when using the setjmp/longjmp codegen method
4086 for exception handling. */
4089 expand_dhc_cleanup (decl)
4090 tree decl;
4092 struct nesting *thisblock = block_stack;
4093 tree cleanup;
4095 /* Error if we are not in any block. */
4096 if (thisblock == 0)
4097 return 0;
4099 /* Record the cleanup for the dynamic handler chain. */
4101 /* All cleanups must be on the function_obstack. */
4102 push_obstacks_nochange ();
4103 resume_temporary_allocation ();
4104 cleanup = make_node (POPDHC_EXPR);
4105 pop_obstacks ();
4107 /* Add the cleanup in a manner similar to expand_decl_cleanup. */
4108 thisblock->data.block.cleanups
4109 = temp_tree_cons (decl, cleanup, thisblock->data.block.cleanups);
4111 /* If this block has a cleanup, it belongs in stack_block_stack. */
4112 stack_block_stack = thisblock;
4113 return 1;
4116 /* DECL is an anonymous union. CLEANUP is a cleanup for DECL.
4117 DECL_ELTS is the list of elements that belong to DECL's type.
4118 In each, the TREE_VALUE is a VAR_DECL, and the TREE_PURPOSE a cleanup. */
4120 void
4121 expand_anon_union_decl (decl, cleanup, decl_elts)
4122 tree decl, cleanup, decl_elts;
4124 struct nesting *thisblock = block_stack;
4125 rtx x;
4127 expand_decl (decl);
4128 expand_decl_cleanup (decl, cleanup);
4129 x = DECL_RTL (decl);
4131 while (decl_elts)
4133 tree decl_elt = TREE_VALUE (decl_elts);
4134 tree cleanup_elt = TREE_PURPOSE (decl_elts);
4135 enum machine_mode mode = TYPE_MODE (TREE_TYPE (decl_elt));
4137 /* Propagate the union's alignment to the elements. */
4138 DECL_ALIGN (decl_elt) = DECL_ALIGN (decl);
4140 /* If the element has BLKmode and the union doesn't, the union is
4141 aligned such that the element doesn't need to have BLKmode, so
4142 change the element's mode to the appropriate one for its size. */
4143 if (mode == BLKmode && DECL_MODE (decl) != BLKmode)
4144 DECL_MODE (decl_elt) = mode
4145 = mode_for_size (TREE_INT_CST_LOW (DECL_SIZE (decl_elt)),
4146 MODE_INT, 1);
4148 /* (SUBREG (MEM ...)) at RTL generation time is invalid, so we
4149 instead create a new MEM rtx with the proper mode. */
4150 if (GET_CODE (x) == MEM)
4152 if (mode == GET_MODE (x))
4153 DECL_RTL (decl_elt) = x;
4154 else
4156 DECL_RTL (decl_elt) = gen_rtx (MEM, mode, copy_rtx (XEXP (x, 0)));
4157 MEM_IN_STRUCT_P (DECL_RTL (decl_elt)) = MEM_IN_STRUCT_P (x);
4158 RTX_UNCHANGING_P (DECL_RTL (decl_elt)) = RTX_UNCHANGING_P (x);
4161 else if (GET_CODE (x) == REG)
4163 if (mode == GET_MODE (x))
4164 DECL_RTL (decl_elt) = x;
4165 else
4166 DECL_RTL (decl_elt) = gen_rtx (SUBREG, mode, x, 0);
4168 else
4169 abort ();
4171 /* Record the cleanup if there is one. */
4173 if (cleanup != 0)
4174 thisblock->data.block.cleanups
4175 = temp_tree_cons (decl_elt, cleanup_elt,
4176 thisblock->data.block.cleanups);
4178 decl_elts = TREE_CHAIN (decl_elts);
4182 /* Expand a list of cleanups LIST.
4183 Elements may be expressions or may be nested lists.
4185 If DONT_DO is nonnull, then any list-element
4186 whose TREE_PURPOSE matches DONT_DO is omitted.
4187 This is sometimes used to avoid a cleanup associated with
4188 a value that is being returned out of the scope.
4190 If IN_FIXUP is non-zero, we are generating this cleanup for a fixup
4191 goto and handle protection regions specially in that case.
4193 If REACHABLE, we emit code, otherwise just inform the exception handling
4194 code about this finalization. */
4196 static void
4197 expand_cleanups (list, dont_do, in_fixup, reachable)
4198 tree list;
4199 tree dont_do;
4200 int in_fixup;
4201 int reachable;
4203 tree tail;
4204 for (tail = list; tail; tail = TREE_CHAIN (tail))
4205 if (dont_do == 0 || TREE_PURPOSE (tail) != dont_do)
4207 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
4208 expand_cleanups (TREE_VALUE (tail), dont_do, in_fixup, reachable);
4209 else
4211 if (! in_fixup)
4213 tree cleanup = TREE_VALUE (tail);
4215 /* See expand_d{h,c}c_cleanup for why we avoid this. */
4216 if (TREE_CODE (cleanup) != POPDHC_EXPR
4217 && TREE_CODE (cleanup) != POPDCC_EXPR
4218 /* See expand_eh_region_start_tree for this case. */
4219 && ! TREE_ADDRESSABLE (tail))
4221 cleanup = protect_with_terminate (cleanup);
4222 expand_eh_region_end (cleanup);
4226 if (reachable)
4228 /* Cleanups may be run multiple times. For example,
4229 when exiting a binding contour, we expand the
4230 cleanups associated with that contour. When a goto
4231 within that binding contour has a target outside that
4232 contour, it will expand all cleanups from its scope to
4233 the target. Though the cleanups are expanded multiple
4234 times, the control paths are non-overlapping so the
4235 cleanups will not be executed twice. */
4236 expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
4237 free_temp_slots ();
4243 /* Mark when the context we are emitting RTL for as a conditional
4244 context, so that any cleanup actions we register with
4245 expand_decl_init will be properly conditionalized when those
4246 cleanup actions are later performed. Must be called before any
4247 expression (tree) is expanded that is within a conditional context. */
4249 void
4250 start_cleanup_deferral ()
4252 /* block_stack can be NULL if we are inside the parameter list. It is
4253 OK to do nothing, because cleanups aren't possible here. */
4254 if (block_stack)
4255 ++block_stack->data.block.conditional_code;
4258 /* Mark the end of a conditional region of code. Because cleanup
4259 deferrals may be nested, we may still be in a conditional region
4260 after we end the currently deferred cleanups, only after we end all
4261 deferred cleanups, are we back in unconditional code. */
4263 void
4264 end_cleanup_deferral ()
4266 /* block_stack can be NULL if we are inside the parameter list. It is
4267 OK to do nothing, because cleanups aren't possible here. */
4268 if (block_stack)
4269 --block_stack->data.block.conditional_code;
4272 /* Move all cleanups from the current block_stack
4273 to the containing block_stack, where they are assumed to
4274 have been created. If anything can cause a temporary to
4275 be created, but not expanded for more than one level of
4276 block_stacks, then this code will have to change. */
4278 void
4279 move_cleanups_up ()
4281 struct nesting *block = block_stack;
4282 struct nesting *outer = block->next;
4284 outer->data.block.cleanups
4285 = chainon (block->data.block.cleanups,
4286 outer->data.block.cleanups);
4287 block->data.block.cleanups = 0;
4290 tree
4291 last_cleanup_this_contour ()
4293 if (block_stack == 0)
4294 return 0;
4296 return block_stack->data.block.cleanups;
4299 /* Return 1 if there are any pending cleanups at this point.
4300 If THIS_CONTOUR is nonzero, check the current contour as well.
4301 Otherwise, look only at the contours that enclose this one. */
4304 any_pending_cleanups (this_contour)
4305 int this_contour;
4307 struct nesting *block;
4309 if (block_stack == 0)
4310 return 0;
4312 if (this_contour && block_stack->data.block.cleanups != NULL)
4313 return 1;
4314 if (block_stack->data.block.cleanups == 0
4315 && block_stack->data.block.outer_cleanups == 0)
4316 return 0;
4318 for (block = block_stack->next; block; block = block->next)
4319 if (block->data.block.cleanups != 0)
4320 return 1;
4322 return 0;
4325 /* Enter a case (Pascal) or switch (C) statement.
4326 Push a block onto case_stack and nesting_stack
4327 to accumulate the case-labels that are seen
4328 and to record the labels generated for the statement.
4330 EXIT_FLAG is nonzero if `exit_something' should exit this case stmt.
4331 Otherwise, this construct is transparent for `exit_something'.
4333 EXPR is the index-expression to be dispatched on.
4334 TYPE is its nominal type. We could simply convert EXPR to this type,
4335 but instead we take short cuts. */
4337 void
4338 expand_start_case (exit_flag, expr, type, printname)
4339 int exit_flag;
4340 tree expr;
4341 tree type;
4342 char *printname;
4344 register struct nesting *thiscase = ALLOC_NESTING ();
4346 /* Make an entry on case_stack for the case we are entering. */
4348 thiscase->next = case_stack;
4349 thiscase->all = nesting_stack;
4350 thiscase->depth = ++nesting_depth;
4351 thiscase->exit_label = exit_flag ? gen_label_rtx () : 0;
4352 thiscase->data.case_stmt.case_list = 0;
4353 thiscase->data.case_stmt.index_expr = expr;
4354 thiscase->data.case_stmt.nominal_type = type;
4355 thiscase->data.case_stmt.default_label = 0;
4356 thiscase->data.case_stmt.num_ranges = 0;
4357 thiscase->data.case_stmt.printname = printname;
4358 thiscase->data.case_stmt.seenlabel = 0;
4359 case_stack = thiscase;
4360 nesting_stack = thiscase;
4362 if (output_bytecode)
4364 bc_expand_start_case (thiscase, expr, type, printname);
4365 return;
4368 do_pending_stack_adjust ();
4370 /* Make sure case_stmt.start points to something that won't
4371 need any transformation before expand_end_case. */
4372 if (GET_CODE (get_last_insn ()) != NOTE)
4373 emit_note (NULL_PTR, NOTE_INSN_DELETED);
4375 thiscase->data.case_stmt.start = get_last_insn ();
4377 start_cleanup_deferral ();
4381 /* Enter a case statement. It is assumed that the caller has pushed
4382 the current context onto the case stack. */
4384 static void
4385 bc_expand_start_case (thiscase, expr, type, printname)
4386 struct nesting *thiscase;
4387 tree expr;
4388 tree type;
4389 char *printname;
4391 bc_expand_expr (expr);
4392 bc_expand_conversion (TREE_TYPE (expr), type);
4394 /* For cases, the skip is a place we jump to that's emitted after
4395 the size of the jump table is known. */
4397 thiscase->data.case_stmt.skip_label = gen_label_rtx ();
4398 bc_emit_bytecode (jump);
4399 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (thiscase->data.case_stmt.skip_label));
4401 #ifdef DEBUG_PRINT_CODE
4402 fputc ('\n', stderr);
4403 #endif
4407 /* Start a "dummy case statement" within which case labels are invalid
4408 and are not connected to any larger real case statement.
4409 This can be used if you don't want to let a case statement jump
4410 into the middle of certain kinds of constructs. */
4412 void
4413 expand_start_case_dummy ()
4415 register struct nesting *thiscase = ALLOC_NESTING ();
4417 /* Make an entry on case_stack for the dummy. */
4419 thiscase->next = case_stack;
4420 thiscase->all = nesting_stack;
4421 thiscase->depth = ++nesting_depth;
4422 thiscase->exit_label = 0;
4423 thiscase->data.case_stmt.case_list = 0;
4424 thiscase->data.case_stmt.start = 0;
4425 thiscase->data.case_stmt.nominal_type = 0;
4426 thiscase->data.case_stmt.default_label = 0;
4427 thiscase->data.case_stmt.num_ranges = 0;
4428 case_stack = thiscase;
4429 nesting_stack = thiscase;
4430 start_cleanup_deferral ();
4433 /* End a dummy case statement. */
4435 void
4436 expand_end_case_dummy ()
4438 end_cleanup_deferral ();
4439 POPSTACK (case_stack);
4442 /* Return the data type of the index-expression
4443 of the innermost case statement, or null if none. */
4445 tree
4446 case_index_expr_type ()
4448 if (case_stack)
4449 return TREE_TYPE (case_stack->data.case_stmt.index_expr);
4450 return 0;
4453 /* Accumulate one case or default label inside a case or switch statement.
4454 VALUE is the value of the case (a null pointer, for a default label).
4455 The function CONVERTER, when applied to arguments T and V,
4456 converts the value V to the type T.
4458 If not currently inside a case or switch statement, return 1 and do
4459 nothing. The caller will print a language-specific error message.
4460 If VALUE is a duplicate or overlaps, return 2 and do nothing
4461 except store the (first) duplicate node in *DUPLICATE.
4462 If VALUE is out of range, return 3 and do nothing.
4463 If we are jumping into the scope of a cleanup or var-sized array, return 5.
4464 Return 0 on success.
4466 Extended to handle range statements. */
4469 pushcase (value, converter, label, duplicate)
4470 register tree value;
4471 tree (*converter) PROTO((tree, tree));
4472 register tree label;
4473 tree *duplicate;
4475 register struct case_node **l;
4476 register struct case_node *n;
4477 tree index_type;
4478 tree nominal_type;
4480 if (output_bytecode)
4481 return bc_pushcase (value, label);
4483 /* Fail if not inside a real case statement. */
4484 if (! (case_stack && case_stack->data.case_stmt.start))
4485 return 1;
4487 if (stack_block_stack
4488 && stack_block_stack->depth > case_stack->depth)
4489 return 5;
4491 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4492 nominal_type = case_stack->data.case_stmt.nominal_type;
4494 /* If the index is erroneous, avoid more problems: pretend to succeed. */
4495 if (index_type == error_mark_node)
4496 return 0;
4498 /* Convert VALUE to the type in which the comparisons are nominally done. */
4499 if (value != 0)
4500 value = (*converter) (nominal_type, value);
4502 /* If this is the first label, warn if any insns have been emitted. */
4503 if (case_stack->data.case_stmt.seenlabel == 0)
4505 rtx insn;
4506 for (insn = case_stack->data.case_stmt.start;
4507 insn;
4508 insn = NEXT_INSN (insn))
4510 if (GET_CODE (insn) == CODE_LABEL)
4511 break;
4512 if (GET_CODE (insn) != NOTE
4513 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn)) != USE))
4515 warning ("unreachable code at beginning of %s",
4516 case_stack->data.case_stmt.printname);
4517 break;
4521 case_stack->data.case_stmt.seenlabel = 1;
4523 /* Fail if this value is out of range for the actual type of the index
4524 (which may be narrower than NOMINAL_TYPE). */
4525 if (value != 0 && ! int_fits_type_p (value, index_type))
4526 return 3;
4528 /* Fail if this is a duplicate or overlaps another entry. */
4529 if (value == 0)
4531 if (case_stack->data.case_stmt.default_label != 0)
4533 *duplicate = case_stack->data.case_stmt.default_label;
4534 return 2;
4536 case_stack->data.case_stmt.default_label = label;
4538 else
4539 return add_case_node (value, value, label, duplicate);
4541 expand_label (label);
4542 return 0;
4545 /* Like pushcase but this case applies to all values between VALUE1 and
4546 VALUE2 (inclusive). If VALUE1 is NULL, the range starts at the lowest
4547 value of the index type and ends at VALUE2. If VALUE2 is NULL, the range
4548 starts at VALUE1 and ends at the highest value of the index type.
4549 If both are NULL, this case applies to all values.
4551 The return value is the same as that of pushcase but there is one
4552 additional error code: 4 means the specified range was empty. */
4555 pushcase_range (value1, value2, converter, label, duplicate)
4556 register tree value1, value2;
4557 tree (*converter) PROTO((tree, tree));
4558 register tree label;
4559 tree *duplicate;
4561 register struct case_node **l;
4562 register struct case_node *n;
4563 tree index_type;
4564 tree nominal_type;
4566 /* Fail if not inside a real case statement. */
4567 if (! (case_stack && case_stack->data.case_stmt.start))
4568 return 1;
4570 if (stack_block_stack
4571 && stack_block_stack->depth > case_stack->depth)
4572 return 5;
4574 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4575 nominal_type = case_stack->data.case_stmt.nominal_type;
4577 /* If the index is erroneous, avoid more problems: pretend to succeed. */
4578 if (index_type == error_mark_node)
4579 return 0;
4581 /* If this is the first label, warn if any insns have been emitted. */
4582 if (case_stack->data.case_stmt.seenlabel == 0)
4584 rtx insn;
4585 for (insn = case_stack->data.case_stmt.start;
4586 insn;
4587 insn = NEXT_INSN (insn))
4589 if (GET_CODE (insn) == CODE_LABEL)
4590 break;
4591 if (GET_CODE (insn) != NOTE
4592 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn)) != USE))
4594 warning ("unreachable code at beginning of %s",
4595 case_stack->data.case_stmt.printname);
4596 break;
4600 case_stack->data.case_stmt.seenlabel = 1;
4602 /* Convert VALUEs to type in which the comparisons are nominally done
4603 and replace any unspecified value with the corresponding bound. */
4604 if (value1 == 0)
4605 value1 = TYPE_MIN_VALUE (index_type);
4606 if (value2 == 0)
4607 value2 = TYPE_MAX_VALUE (index_type);
4609 /* Fail if the range is empty. Do this before any conversion since
4610 we want to allow out-of-range empty ranges. */
4611 if (tree_int_cst_lt (value2, value1))
4612 return 4;
4614 value1 = (*converter) (nominal_type, value1);
4615 value2 = (*converter) (nominal_type, value2);
4617 /* Fail if these values are out of range. */
4618 if (TREE_CONSTANT_OVERFLOW (value1)
4619 || ! int_fits_type_p (value1, index_type))
4620 return 3;
4622 if (TREE_CONSTANT_OVERFLOW (value2)
4623 || ! int_fits_type_p (value2, index_type))
4624 return 3;
4626 return add_case_node (value1, value2, label, duplicate);
4629 /* Do the actual insertion of a case label for pushcase and pushcase_range
4630 into case_stack->data.case_stmt.case_list. Use an AVL tree to avoid
4631 slowdown for large switch statements. */
4633 static int
4634 add_case_node (low, high, label, duplicate)
4635 tree low, high;
4636 tree label;
4637 tree *duplicate;
4639 struct case_node *p, **q, *r;
4641 q = &case_stack->data.case_stmt.case_list;
4642 p = *q;
4644 while (r = *q)
4646 p = r;
4648 /* Keep going past elements distinctly greater than HIGH. */
4649 if (tree_int_cst_lt (high, p->low))
4650 q = &p->left;
4652 /* or distinctly less than LOW. */
4653 else if (tree_int_cst_lt (p->high, low))
4654 q = &p->right;
4656 else
4658 /* We have an overlap; this is an error. */
4659 *duplicate = p->code_label;
4660 return 2;
4664 /* Add this label to the chain, and succeed.
4665 Copy LOW, HIGH so they are on temporary rather than momentary
4666 obstack and will thus survive till the end of the case statement. */
4668 r = (struct case_node *) oballoc (sizeof (struct case_node));
4669 r->low = copy_node (low);
4671 /* If the bounds are equal, turn this into the one-value case. */
4673 if (tree_int_cst_equal (low, high))
4674 r->high = r->low;
4675 else
4677 r->high = copy_node (high);
4678 case_stack->data.case_stmt.num_ranges++;
4681 r->code_label = label;
4682 expand_label (label);
4684 *q = r;
4685 r->parent = p;
4686 r->left = 0;
4687 r->right = 0;
4688 r->balance = 0;
4690 while (p)
4692 struct case_node *s;
4694 if (r == p->left)
4696 int b;
4698 if (! (b = p->balance))
4699 /* Growth propagation from left side. */
4700 p->balance = -1;
4701 else if (b < 0)
4703 if (r->balance < 0)
4705 /* R-Rotation */
4706 if (p->left = s = r->right)
4707 s->parent = p;
4709 r->right = p;
4710 p->balance = 0;
4711 r->balance = 0;
4712 s = p->parent;
4713 p->parent = r;
4715 if (r->parent = s)
4717 if (s->left == p)
4718 s->left = r;
4719 else
4720 s->right = r;
4722 else
4723 case_stack->data.case_stmt.case_list = r;
4725 else
4726 /* r->balance == +1 */
4728 /* LR-Rotation */
4730 int b2;
4731 struct case_node *t = r->right;
4733 if (p->left = s = t->right)
4734 s->parent = p;
4736 t->right = p;
4737 if (r->right = s = t->left)
4738 s->parent = r;
4740 t->left = r;
4741 b = t->balance;
4742 b2 = b < 0;
4743 p->balance = b2;
4744 b2 = -b2 - b;
4745 r->balance = b2;
4746 t->balance = 0;
4747 s = p->parent;
4748 p->parent = t;
4749 r->parent = t;
4751 if (t->parent = s)
4753 if (s->left == p)
4754 s->left = t;
4755 else
4756 s->right = t;
4758 else
4759 case_stack->data.case_stmt.case_list = t;
4761 break;
4764 else
4766 /* p->balance == +1; growth of left side balances the node. */
4767 p->balance = 0;
4768 break;
4771 else
4772 /* r == p->right */
4774 int b;
4776 if (! (b = p->balance))
4777 /* Growth propagation from right side. */
4778 p->balance++;
4779 else if (b > 0)
4781 if (r->balance > 0)
4783 /* L-Rotation */
4785 if (p->right = s = r->left)
4786 s->parent = p;
4788 r->left = p;
4789 p->balance = 0;
4790 r->balance = 0;
4791 s = p->parent;
4792 p->parent = r;
4793 if (r->parent = s)
4795 if (s->left == p)
4796 s->left = r;
4797 else
4798 s->right = r;
4801 else
4802 case_stack->data.case_stmt.case_list = r;
4805 else
4806 /* r->balance == -1 */
4808 /* RL-Rotation */
4809 int b2;
4810 struct case_node *t = r->left;
4812 if (p->right = s = t->left)
4813 s->parent = p;
4815 t->left = p;
4817 if (r->left = s = t->right)
4818 s->parent = r;
4820 t->right = r;
4821 b = t->balance;
4822 b2 = b < 0;
4823 r->balance = b2;
4824 b2 = -b2 - b;
4825 p->balance = b2;
4826 t->balance = 0;
4827 s = p->parent;
4828 p->parent = t;
4829 r->parent = t;
4831 if (t->parent = s)
4833 if (s->left == p)
4834 s->left = t;
4835 else
4836 s->right = t;
4839 else
4840 case_stack->data.case_stmt.case_list = t;
4842 break;
4844 else
4846 /* p->balance == -1; growth of right side balances the node. */
4847 p->balance = 0;
4848 break;
4852 r = p;
4853 p = p->parent;
4856 return 0;
4859 /* Accumulate one case or default label; VALUE is the value of the
4860 case, or nil for a default label. If not currently inside a case,
4861 return 1 and do nothing. If VALUE is a duplicate or overlaps, return
4862 2 and do nothing. If VALUE is out of range, return 3 and do nothing.
4863 Return 0 on success. This function is a leftover from the earlier
4864 bytecode compiler, which was based on gcc 1.37. It should be
4865 merged into pushcase. */
4867 static int
4868 bc_pushcase (value, label)
4869 tree value;
4870 tree label;
4872 struct nesting *thiscase = case_stack;
4873 struct case_node *case_label, *new_label;
4875 if (! thiscase)
4876 return 1;
4878 /* Fail if duplicate, overlap, or out of type range. */
4879 if (value)
4881 value = convert (thiscase->data.case_stmt.nominal_type, value);
4882 if (! int_fits_type_p (value, thiscase->data.case_stmt.nominal_type))
4883 return 3;
4885 for (case_label = thiscase->data.case_stmt.case_list;
4886 case_label->left; case_label = case_label->left)
4887 if (! tree_int_cst_lt (case_label->left->high, value))
4888 break;
4890 if (case_label != thiscase->data.case_stmt.case_list
4891 && ! tree_int_cst_lt (case_label->high, value)
4892 || (case_label->left && ! tree_int_cst_lt (value, case_label->left->low)))
4893 return 2;
4895 new_label = (struct case_node *) oballoc (sizeof (struct case_node));
4896 new_label->low = new_label->high = copy_node (value);
4897 new_label->code_label = label;
4898 new_label->left = case_label->left;
4900 case_label->left = new_label;
4901 thiscase->data.case_stmt.num_ranges++;
4903 else
4905 if (thiscase->data.case_stmt.default_label)
4906 return 2;
4907 thiscase->data.case_stmt.default_label = label;
4910 expand_label (label);
4911 return 0;
4914 /* Returns the number of possible values of TYPE.
4915 Returns -1 if the number is unknown or variable.
4916 Returns -2 if the number does not fit in a HOST_WIDE_INT.
4917 Sets *SPARENESS to 2 if TYPE is an ENUMERAL_TYPE whose values
4918 do not increase monotonically (there may be duplicates);
4919 to 1 if the values increase monotonically, but not always by 1;
4920 otherwise sets it to 0. */
4922 HOST_WIDE_INT
4923 all_cases_count (type, spareness)
4924 tree type;
4925 int *spareness;
4927 HOST_WIDE_INT count, count_high = 0;
4928 *spareness = 0;
4930 switch (TREE_CODE (type))
4932 tree t;
4933 case BOOLEAN_TYPE:
4934 count = 2;
4935 break;
4936 case CHAR_TYPE:
4937 count = 1 << BITS_PER_UNIT;
4938 break;
4939 default:
4940 case INTEGER_TYPE:
4941 if (TREE_CODE (TYPE_MIN_VALUE (type)) != INTEGER_CST
4942 || TREE_CODE (TYPE_MAX_VALUE (type)) != INTEGER_CST)
4943 return -1;
4944 else
4946 /* count
4947 = TREE_INT_CST_LOW (TYPE_MAX_VALUE (type))
4948 - TREE_INT_CST_LOW (TYPE_MIN_VALUE (type)) + 1
4949 but with overflow checking. */
4950 tree mint = TYPE_MIN_VALUE (type);
4951 tree maxt = TYPE_MAX_VALUE (type);
4952 HOST_WIDE_INT lo, hi;
4953 neg_double(TREE_INT_CST_LOW (mint), TREE_INT_CST_HIGH (mint),
4954 &lo, &hi);
4955 add_double(TREE_INT_CST_LOW (maxt), TREE_INT_CST_HIGH (maxt),
4956 lo, hi, &lo, &hi);
4957 add_double (lo, hi, 1, 0, &lo, &hi);
4958 if (hi != 0 || lo < 0)
4959 return -2;
4960 count = lo;
4962 break;
4963 case ENUMERAL_TYPE:
4964 count = 0;
4965 for (t = TYPE_VALUES (type); t != NULL_TREE; t = TREE_CHAIN (t))
4967 if (TREE_CODE (TYPE_MIN_VALUE (type)) != INTEGER_CST
4968 || TREE_CODE (TREE_VALUE (t)) != INTEGER_CST
4969 || TREE_INT_CST_LOW (TYPE_MIN_VALUE (type)) + count
4970 != TREE_INT_CST_LOW (TREE_VALUE (t)))
4971 *spareness = 1;
4972 count++;
4974 if (*spareness == 1)
4976 tree prev = TREE_VALUE (TYPE_VALUES (type));
4977 for (t = TYPE_VALUES (type); t = TREE_CHAIN (t), t != NULL_TREE; )
4979 if (! tree_int_cst_lt (prev, TREE_VALUE (t)))
4981 *spareness = 2;
4982 break;
4984 prev = TREE_VALUE (t);
4989 return count;
4993 #define BITARRAY_TEST(ARRAY, INDEX) \
4994 ((ARRAY)[(unsigned) (INDEX) / HOST_BITS_PER_CHAR]\
4995 & (1 << ((unsigned) (INDEX) % HOST_BITS_PER_CHAR)))
4996 #define BITARRAY_SET(ARRAY, INDEX) \
4997 ((ARRAY)[(unsigned) (INDEX) / HOST_BITS_PER_CHAR]\
4998 |= 1 << ((unsigned) (INDEX) % HOST_BITS_PER_CHAR))
5000 /* Set the elements of the bitstring CASES_SEEN (which has length COUNT),
5001 with the case values we have seen, assuming the case expression
5002 has the given TYPE.
5003 SPARSENESS is as determined by all_cases_count.
5005 The time needed is proportional to COUNT, unless
5006 SPARSENESS is 2, in which case quadratic time is needed. */
5008 void
5009 mark_seen_cases (type, cases_seen, count, sparseness)
5010 tree type;
5011 unsigned char *cases_seen;
5012 long count;
5013 int sparseness;
5015 long i;
5017 tree next_node_to_try = NULL_TREE;
5018 long next_node_offset = 0;
5020 register struct case_node *n, *root = case_stack->data.case_stmt.case_list;
5021 tree val = make_node (INTEGER_CST);
5022 TREE_TYPE (val) = type;
5023 if (! root)
5024 ; /* Do nothing */
5025 else if (sparseness == 2)
5027 tree t;
5028 HOST_WIDE_INT xlo;
5030 /* This less efficient loop is only needed to handle
5031 duplicate case values (multiple enum constants
5032 with the same value). */
5033 TREE_TYPE (val) = TREE_TYPE (root->low);
5034 for (t = TYPE_VALUES (type), xlo = 0; t != NULL_TREE;
5035 t = TREE_CHAIN (t), xlo++)
5037 TREE_INT_CST_LOW (val) = TREE_INT_CST_LOW (TREE_VALUE (t));
5038 TREE_INT_CST_HIGH (val) = TREE_INT_CST_HIGH (TREE_VALUE (t));
5039 n = root;
5042 /* Keep going past elements distinctly greater than VAL. */
5043 if (tree_int_cst_lt (val, n->low))
5044 n = n->left;
5046 /* or distinctly less than VAL. */
5047 else if (tree_int_cst_lt (n->high, val))
5048 n = n->right;
5050 else
5052 /* We have found a matching range. */
5053 BITARRAY_SET (cases_seen, xlo);
5054 break;
5057 while (n);
5060 else
5062 if (root->left)
5063 case_stack->data.case_stmt.case_list = root = case_tree2list (root, 0);
5064 for (n = root; n; n = n->right)
5066 TREE_INT_CST_LOW (val) = TREE_INT_CST_LOW (n->low);
5067 TREE_INT_CST_HIGH (val) = TREE_INT_CST_HIGH (n->low);
5068 while ( ! tree_int_cst_lt (n->high, val))
5070 /* Calculate (into xlo) the "offset" of the integer (val).
5071 The element with lowest value has offset 0, the next smallest
5072 element has offset 1, etc. */
5074 HOST_WIDE_INT xlo, xhi;
5075 tree t;
5076 if (sparseness && TYPE_VALUES (type) != NULL_TREE)
5078 /* The TYPE_VALUES will be in increasing order, so
5079 starting searching where we last ended. */
5080 t = next_node_to_try;
5081 xlo = next_node_offset;
5082 xhi = 0;
5083 for (;;)
5085 if (t == NULL_TREE)
5087 t = TYPE_VALUES (type);
5088 xlo = 0;
5090 if (tree_int_cst_equal (val, TREE_VALUE (t)))
5092 next_node_to_try = TREE_CHAIN (t);
5093 next_node_offset = xlo + 1;
5094 break;
5096 xlo++;
5097 t = TREE_CHAIN (t);
5098 if (t == next_node_to_try)
5100 xlo = -1;
5101 break;
5105 else
5107 t = TYPE_MIN_VALUE (type);
5108 if (t)
5109 neg_double (TREE_INT_CST_LOW (t), TREE_INT_CST_HIGH (t),
5110 &xlo, &xhi);
5111 else
5112 xlo = xhi = 0;
5113 add_double (xlo, xhi,
5114 TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
5115 &xlo, &xhi);
5118 if (xhi == 0 && xlo >= 0 && xlo < count)
5119 BITARRAY_SET (cases_seen, xlo);
5120 add_double (TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
5121 1, 0,
5122 &TREE_INT_CST_LOW (val), &TREE_INT_CST_HIGH (val));
5128 /* Called when the index of a switch statement is an enumerated type
5129 and there is no default label.
5131 Checks that all enumeration literals are covered by the case
5132 expressions of a switch. Also, warn if there are any extra
5133 switch cases that are *not* elements of the enumerated type.
5135 If all enumeration literals were covered by the case expressions,
5136 turn one of the expressions into the default expression since it should
5137 not be possible to fall through such a switch. */
5139 void
5140 check_for_full_enumeration_handling (type)
5141 tree type;
5143 register struct case_node *n;
5144 register struct case_node **l;
5145 register tree chain;
5146 int all_values = 1;
5148 /* True iff the selector type is a numbered set mode. */
5149 int sparseness = 0;
5151 /* The number of possible selector values. */
5152 HOST_WIDE_INT size;
5154 /* For each possible selector value. a one iff it has been matched
5155 by a case value alternative. */
5156 unsigned char *cases_seen;
5158 /* The allocated size of cases_seen, in chars. */
5159 long bytes_needed;
5160 tree t;
5162 if (output_bytecode)
5164 bc_check_for_full_enumeration_handling (type);
5165 return;
5168 if (! warn_switch)
5169 return;
5171 size = all_cases_count (type, &sparseness);
5172 bytes_needed = (size + HOST_BITS_PER_CHAR) / HOST_BITS_PER_CHAR;
5174 if (size > 0 && size < 600000
5175 /* We deliberately use malloc here - not xmalloc. */
5176 && (cases_seen = (unsigned char *) malloc (bytes_needed)) != NULL)
5178 long i;
5179 tree v = TYPE_VALUES (type);
5180 bzero (cases_seen, bytes_needed);
5182 /* The time complexity of this code is normally O(N), where
5183 N being the number of members in the enumerated type.
5184 However, if type is a ENUMERAL_TYPE whose values do not
5185 increase monotonically, O(N*log(N)) time may be needed. */
5187 mark_seen_cases (type, cases_seen, size, sparseness);
5189 for (i = 0; v != NULL_TREE && i < size; i++, v = TREE_CHAIN (v))
5191 if (BITARRAY_TEST(cases_seen, i) == 0)
5192 warning ("enumeration value `%s' not handled in switch",
5193 IDENTIFIER_POINTER (TREE_PURPOSE (v)));
5196 free (cases_seen);
5199 /* Now we go the other way around; we warn if there are case
5200 expressions that don't correspond to enumerators. This can
5201 occur since C and C++ don't enforce type-checking of
5202 assignments to enumeration variables. */
5204 if (case_stack->data.case_stmt.case_list
5205 && case_stack->data.case_stmt.case_list->left)
5206 case_stack->data.case_stmt.case_list
5207 = case_tree2list (case_stack->data.case_stmt.case_list, 0);
5208 if (warn_switch)
5209 for (n = case_stack->data.case_stmt.case_list; n; n = n->right)
5211 for (chain = TYPE_VALUES (type);
5212 chain && !tree_int_cst_equal (n->low, TREE_VALUE (chain));
5213 chain = TREE_CHAIN (chain))
5216 if (!chain)
5218 if (TYPE_NAME (type) == 0)
5219 warning ("case value `%d' not in enumerated type",
5220 TREE_INT_CST_LOW (n->low));
5221 else
5222 warning ("case value `%d' not in enumerated type `%s'",
5223 TREE_INT_CST_LOW (n->low),
5224 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
5225 == IDENTIFIER_NODE)
5226 ? TYPE_NAME (type)
5227 : DECL_NAME (TYPE_NAME (type))));
5229 if (!tree_int_cst_equal (n->low, n->high))
5231 for (chain = TYPE_VALUES (type);
5232 chain && !tree_int_cst_equal (n->high, TREE_VALUE (chain));
5233 chain = TREE_CHAIN (chain))
5236 if (!chain)
5238 if (TYPE_NAME (type) == 0)
5239 warning ("case value `%d' not in enumerated type",
5240 TREE_INT_CST_LOW (n->high));
5241 else
5242 warning ("case value `%d' not in enumerated type `%s'",
5243 TREE_INT_CST_LOW (n->high),
5244 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
5245 == IDENTIFIER_NODE)
5246 ? TYPE_NAME (type)
5247 : DECL_NAME (TYPE_NAME (type))));
5252 #if 0
5253 /* ??? This optimization is disabled because it causes valid programs to
5254 fail. ANSI C does not guarantee that an expression with enum type
5255 will have a value that is the same as one of the enumeration literals. */
5257 /* If all values were found as case labels, make one of them the default
5258 label. Thus, this switch will never fall through. We arbitrarily pick
5259 the last one to make the default since this is likely the most
5260 efficient choice. */
5262 if (all_values)
5264 for (l = &case_stack->data.case_stmt.case_list;
5265 (*l)->right != 0;
5266 l = &(*l)->right)
5269 case_stack->data.case_stmt.default_label = (*l)->code_label;
5270 *l = 0;
5272 #endif /* 0 */
5276 /* Check that all enumeration literals are covered by the case
5277 expressions of a switch. Also warn if there are any cases
5278 that are not elements of the enumerated type. */
5280 static void
5281 bc_check_for_full_enumeration_handling (type)
5282 tree type;
5284 struct nesting *thiscase = case_stack;
5285 struct case_node *c;
5286 tree e;
5288 /* Check for enums not handled. */
5289 for (e = TYPE_VALUES (type); e; e = TREE_CHAIN (e))
5291 for (c = thiscase->data.case_stmt.case_list->left;
5292 c && tree_int_cst_lt (c->high, TREE_VALUE (e));
5293 c = c->left)
5295 if (! (c && tree_int_cst_equal (c->low, TREE_VALUE (e))))
5296 warning ("enumerated value `%s' not handled in switch",
5297 IDENTIFIER_POINTER (TREE_PURPOSE (e)));
5300 /* Check for cases not in the enumeration. */
5301 for (c = thiscase->data.case_stmt.case_list->left; c; c = c->left)
5303 for (e = TYPE_VALUES (type);
5304 e && !tree_int_cst_equal (c->low, TREE_VALUE (e));
5305 e = TREE_CHAIN (e))
5307 if (! e)
5308 warning ("case value `%d' not in enumerated type `%s'",
5309 TREE_INT_CST_LOW (c->low),
5310 IDENTIFIER_POINTER (TREE_CODE (TYPE_NAME (type)) == IDENTIFIER_NODE
5311 ? TYPE_NAME (type)
5312 : DECL_NAME (TYPE_NAME (type))));
5316 /* Terminate a case (Pascal) or switch (C) statement
5317 in which ORIG_INDEX is the expression to be tested.
5318 Generate the code to test it and jump to the right place. */
5320 void
5321 expand_end_case (orig_index)
5322 tree orig_index;
5324 tree minval, maxval, range, orig_minval;
5325 rtx default_label = 0;
5326 register struct case_node *n;
5327 int count;
5328 rtx index;
5329 rtx table_label;
5330 int ncases;
5331 rtx *labelvec;
5332 register int i;
5333 rtx before_case;
5334 register struct nesting *thiscase = case_stack;
5335 tree index_expr, index_type;
5336 int unsignedp;
5338 if (output_bytecode)
5340 bc_expand_end_case (orig_index);
5341 return;
5344 table_label = gen_label_rtx ();
5345 index_expr = thiscase->data.case_stmt.index_expr;
5346 index_type = TREE_TYPE (index_expr);
5347 unsignedp = TREE_UNSIGNED (index_type);
5349 do_pending_stack_adjust ();
5351 /* An ERROR_MARK occurs for various reasons including invalid data type. */
5352 if (index_type != error_mark_node)
5354 /* If switch expression was an enumerated type, check that all
5355 enumeration literals are covered by the cases.
5356 No sense trying this if there's a default case, however. */
5358 if (!thiscase->data.case_stmt.default_label
5359 && TREE_CODE (TREE_TYPE (orig_index)) == ENUMERAL_TYPE
5360 && TREE_CODE (index_expr) != INTEGER_CST)
5361 check_for_full_enumeration_handling (TREE_TYPE (orig_index));
5363 /* If this is the first label, warn if any insns have been emitted. */
5364 if (thiscase->data.case_stmt.seenlabel == 0)
5366 rtx insn;
5367 for (insn = get_last_insn ();
5368 insn != case_stack->data.case_stmt.start;
5369 insn = PREV_INSN (insn))
5370 if (GET_CODE (insn) != NOTE
5371 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn))!= USE))
5373 warning ("unreachable code at beginning of %s",
5374 case_stack->data.case_stmt.printname);
5375 break;
5379 /* If we don't have a default-label, create one here,
5380 after the body of the switch. */
5381 if (thiscase->data.case_stmt.default_label == 0)
5383 thiscase->data.case_stmt.default_label
5384 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
5385 expand_label (thiscase->data.case_stmt.default_label);
5387 default_label = label_rtx (thiscase->data.case_stmt.default_label);
5389 before_case = get_last_insn ();
5391 if (thiscase->data.case_stmt.case_list
5392 && thiscase->data.case_stmt.case_list->left)
5393 thiscase->data.case_stmt.case_list
5394 = case_tree2list(thiscase->data.case_stmt.case_list, 0);
5396 /* Simplify the case-list before we count it. */
5397 group_case_nodes (thiscase->data.case_stmt.case_list);
5399 /* Get upper and lower bounds of case values.
5400 Also convert all the case values to the index expr's data type. */
5402 count = 0;
5403 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5405 /* Check low and high label values are integers. */
5406 if (TREE_CODE (n->low) != INTEGER_CST)
5407 abort ();
5408 if (TREE_CODE (n->high) != INTEGER_CST)
5409 abort ();
5411 n->low = convert (index_type, n->low);
5412 n->high = convert (index_type, n->high);
5414 /* Count the elements and track the largest and smallest
5415 of them (treating them as signed even if they are not). */
5416 if (count++ == 0)
5418 minval = n->low;
5419 maxval = n->high;
5421 else
5423 if (INT_CST_LT (n->low, minval))
5424 minval = n->low;
5425 if (INT_CST_LT (maxval, n->high))
5426 maxval = n->high;
5428 /* A range counts double, since it requires two compares. */
5429 if (! tree_int_cst_equal (n->low, n->high))
5430 count++;
5433 orig_minval = minval;
5435 /* Compute span of values. */
5436 if (count != 0)
5437 range = fold (build (MINUS_EXPR, index_type, maxval, minval));
5439 end_cleanup_deferral ();
5441 if (count == 0)
5443 expand_expr (index_expr, const0_rtx, VOIDmode, 0);
5444 emit_queue ();
5445 emit_jump (default_label);
5448 /* If range of values is much bigger than number of values,
5449 make a sequence of conditional branches instead of a dispatch.
5450 If the switch-index is a constant, do it this way
5451 because we can optimize it. */
5453 #ifndef CASE_VALUES_THRESHOLD
5454 #ifdef HAVE_casesi
5455 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
5456 #else
5457 /* If machine does not have a case insn that compares the
5458 bounds, this means extra overhead for dispatch tables
5459 which raises the threshold for using them. */
5460 #define CASE_VALUES_THRESHOLD 5
5461 #endif /* HAVE_casesi */
5462 #endif /* CASE_VALUES_THRESHOLD */
5464 else if (TREE_INT_CST_HIGH (range) != 0
5465 || count < CASE_VALUES_THRESHOLD
5466 || ((unsigned HOST_WIDE_INT) (TREE_INT_CST_LOW (range))
5467 > 10 * count)
5468 #ifndef ASM_OUTPUT_ADDR_DIFF_ELT
5469 || flag_pic
5470 #endif
5471 || TREE_CODE (index_expr) == INTEGER_CST
5472 /* These will reduce to a constant. */
5473 || (TREE_CODE (index_expr) == CALL_EXPR
5474 && TREE_CODE (TREE_OPERAND (index_expr, 0)) == ADDR_EXPR
5475 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == FUNCTION_DECL
5476 && DECL_FUNCTION_CODE (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == BUILT_IN_CLASSIFY_TYPE)
5477 || (TREE_CODE (index_expr) == COMPOUND_EXPR
5478 && TREE_CODE (TREE_OPERAND (index_expr, 1)) == INTEGER_CST))
5480 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5482 /* If the index is a short or char that we do not have
5483 an insn to handle comparisons directly, convert it to
5484 a full integer now, rather than letting each comparison
5485 generate the conversion. */
5487 if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT
5488 && (cmp_optab->handlers[(int) GET_MODE(index)].insn_code
5489 == CODE_FOR_nothing))
5491 enum machine_mode wider_mode;
5492 for (wider_mode = GET_MODE (index); wider_mode != VOIDmode;
5493 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
5494 if (cmp_optab->handlers[(int) wider_mode].insn_code
5495 != CODE_FOR_nothing)
5497 index = convert_to_mode (wider_mode, index, unsignedp);
5498 break;
5502 emit_queue ();
5503 do_pending_stack_adjust ();
5505 index = protect_from_queue (index, 0);
5506 if (GET_CODE (index) == MEM)
5507 index = copy_to_reg (index);
5508 if (GET_CODE (index) == CONST_INT
5509 || TREE_CODE (index_expr) == INTEGER_CST)
5511 /* Make a tree node with the proper constant value
5512 if we don't already have one. */
5513 if (TREE_CODE (index_expr) != INTEGER_CST)
5515 index_expr
5516 = build_int_2 (INTVAL (index),
5517 unsignedp || INTVAL (index) >= 0 ? 0 : -1);
5518 index_expr = convert (index_type, index_expr);
5521 /* For constant index expressions we need only
5522 issue a unconditional branch to the appropriate
5523 target code. The job of removing any unreachable
5524 code is left to the optimisation phase if the
5525 "-O" option is specified. */
5526 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5527 if (! tree_int_cst_lt (index_expr, n->low)
5528 && ! tree_int_cst_lt (n->high, index_expr))
5529 break;
5531 if (n)
5532 emit_jump (label_rtx (n->code_label));
5533 else
5534 emit_jump (default_label);
5536 else
5538 /* If the index expression is not constant we generate
5539 a binary decision tree to select the appropriate
5540 target code. This is done as follows:
5542 The list of cases is rearranged into a binary tree,
5543 nearly optimal assuming equal probability for each case.
5545 The tree is transformed into RTL, eliminating
5546 redundant test conditions at the same time.
5548 If program flow could reach the end of the
5549 decision tree an unconditional jump to the
5550 default code is emitted. */
5552 use_cost_table
5553 = (TREE_CODE (TREE_TYPE (orig_index)) != ENUMERAL_TYPE
5554 && estimate_case_costs (thiscase->data.case_stmt.case_list));
5555 balance_case_nodes (&thiscase->data.case_stmt.case_list,
5556 NULL_PTR);
5557 emit_case_nodes (index, thiscase->data.case_stmt.case_list,
5558 default_label, index_type);
5559 emit_jump_if_reachable (default_label);
5562 else
5564 int win = 0;
5565 #ifdef HAVE_casesi
5566 if (HAVE_casesi)
5568 enum machine_mode index_mode = SImode;
5569 int index_bits = GET_MODE_BITSIZE (index_mode);
5570 rtx op1, op2;
5571 enum machine_mode op_mode;
5573 /* Convert the index to SImode. */
5574 if (GET_MODE_BITSIZE (TYPE_MODE (index_type))
5575 > GET_MODE_BITSIZE (index_mode))
5577 enum machine_mode omode = TYPE_MODE (index_type);
5578 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
5580 /* We must handle the endpoints in the original mode. */
5581 index_expr = build (MINUS_EXPR, index_type,
5582 index_expr, minval);
5583 minval = integer_zero_node;
5584 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5585 emit_cmp_insn (rangertx, index, LTU, NULL_RTX, omode, 1, 0);
5586 emit_jump_insn (gen_bltu (default_label));
5587 /* Now we can safely truncate. */
5588 index = convert_to_mode (index_mode, index, 0);
5590 else
5592 if (TYPE_MODE (index_type) != index_mode)
5594 index_expr = convert (type_for_size (index_bits, 0),
5595 index_expr);
5596 index_type = TREE_TYPE (index_expr);
5599 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5601 emit_queue ();
5602 index = protect_from_queue (index, 0);
5603 do_pending_stack_adjust ();
5605 op_mode = insn_operand_mode[(int)CODE_FOR_casesi][0];
5606 if (! (*insn_operand_predicate[(int)CODE_FOR_casesi][0])
5607 (index, op_mode))
5608 index = copy_to_mode_reg (op_mode, index);
5610 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
5612 op_mode = insn_operand_mode[(int)CODE_FOR_casesi][1];
5613 if (! (*insn_operand_predicate[(int)CODE_FOR_casesi][1])
5614 (op1, op_mode))
5615 op1 = copy_to_mode_reg (op_mode, op1);
5617 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
5619 op_mode = insn_operand_mode[(int)CODE_FOR_casesi][2];
5620 if (! (*insn_operand_predicate[(int)CODE_FOR_casesi][2])
5621 (op2, op_mode))
5622 op2 = copy_to_mode_reg (op_mode, op2);
5624 emit_jump_insn (gen_casesi (index, op1, op2,
5625 table_label, default_label));
5626 win = 1;
5628 #endif
5629 #ifdef HAVE_tablejump
5630 if (! win && HAVE_tablejump)
5632 index_expr = convert (thiscase->data.case_stmt.nominal_type,
5633 fold (build (MINUS_EXPR, index_type,
5634 index_expr, minval)));
5635 index_type = TREE_TYPE (index_expr);
5636 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5637 emit_queue ();
5638 index = protect_from_queue (index, 0);
5639 do_pending_stack_adjust ();
5641 do_tablejump (index, TYPE_MODE (index_type),
5642 expand_expr (range, NULL_RTX, VOIDmode, 0),
5643 table_label, default_label);
5644 win = 1;
5646 #endif
5647 if (! win)
5648 abort ();
5650 /* Get table of labels to jump to, in order of case index. */
5652 ncases = TREE_INT_CST_LOW (range) + 1;
5653 labelvec = (rtx *) alloca (ncases * sizeof (rtx));
5654 bzero ((char *) labelvec, ncases * sizeof (rtx));
5656 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5658 register HOST_WIDE_INT i
5659 = TREE_INT_CST_LOW (n->low) - TREE_INT_CST_LOW (orig_minval);
5661 while (1)
5663 labelvec[i]
5664 = gen_rtx (LABEL_REF, Pmode, label_rtx (n->code_label));
5665 if (i + TREE_INT_CST_LOW (orig_minval)
5666 == TREE_INT_CST_LOW (n->high))
5667 break;
5668 i++;
5672 /* Fill in the gaps with the default. */
5673 for (i = 0; i < ncases; i++)
5674 if (labelvec[i] == 0)
5675 labelvec[i] = gen_rtx (LABEL_REF, Pmode, default_label);
5677 /* Output the table */
5678 emit_label (table_label);
5680 /* This would be a lot nicer if CASE_VECTOR_PC_RELATIVE
5681 were an expression, instead of an #ifdef/#ifndef. */
5682 if (
5683 #ifdef CASE_VECTOR_PC_RELATIVE
5684 1 ||
5685 #endif
5686 flag_pic)
5687 emit_jump_insn (gen_rtx (ADDR_DIFF_VEC, CASE_VECTOR_MODE,
5688 gen_rtx (LABEL_REF, Pmode, table_label),
5689 gen_rtvec_v (ncases, labelvec)));
5690 else
5691 emit_jump_insn (gen_rtx (ADDR_VEC, CASE_VECTOR_MODE,
5692 gen_rtvec_v (ncases, labelvec)));
5694 /* If the case insn drops through the table,
5695 after the table we must jump to the default-label.
5696 Otherwise record no drop-through after the table. */
5697 #ifdef CASE_DROPS_THROUGH
5698 emit_jump (default_label);
5699 #else
5700 emit_barrier ();
5701 #endif
5704 before_case = squeeze_notes (NEXT_INSN (before_case), get_last_insn ());
5705 reorder_insns (before_case, get_last_insn (),
5706 thiscase->data.case_stmt.start);
5708 else
5709 end_cleanup_deferral ();
5711 if (thiscase->exit_label)
5712 emit_label (thiscase->exit_label);
5714 POPSTACK (case_stack);
5716 free_temp_slots ();
5719 /* Convert the tree NODE into a list linked by the right field, with the left
5720 field zeroed. RIGHT is used for recursion; it is a list to be placed
5721 rightmost in the resulting list. */
5723 static struct case_node *
5724 case_tree2list (node, right)
5725 struct case_node *node, *right;
5727 struct case_node *left;
5729 if (node->right)
5730 right = case_tree2list (node->right, right);
5732 node->right = right;
5733 if (left = node->left)
5735 node->left = 0;
5736 return case_tree2list (left, node);
5739 return node;
5742 /* Terminate a case statement. EXPR is the original index
5743 expression. */
5745 static void
5746 bc_expand_end_case (expr)
5747 tree expr;
5749 struct nesting *thiscase = case_stack;
5750 enum bytecode_opcode opcode;
5751 struct bc_label *jump_label;
5752 struct case_node *c;
5754 bc_emit_bytecode (jump);
5755 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (thiscase->exit_label));
5757 #ifdef DEBUG_PRINT_CODE
5758 fputc ('\n', stderr);
5759 #endif
5761 /* Now that the size of the jump table is known, emit the actual
5762 indexed jump instruction. */
5763 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thiscase->data.case_stmt.skip_label));
5765 opcode = TYPE_MODE (thiscase->data.case_stmt.nominal_type) == SImode
5766 ? TREE_UNSIGNED (thiscase->data.case_stmt.nominal_type) ? caseSU : caseSI
5767 : TREE_UNSIGNED (thiscase->data.case_stmt.nominal_type) ? caseDU : caseDI;
5769 bc_emit_bytecode (opcode);
5771 /* Now emit the case instructions literal arguments, in order.
5772 In addition to the value on the stack, it uses:
5773 1. The address of the jump table.
5774 2. The size of the jump table.
5775 3. The default label. */
5777 jump_label = bc_get_bytecode_label ();
5778 bc_emit_bytecode_labelref (jump_label);
5779 bc_emit_bytecode_const ((char *) &thiscase->data.case_stmt.num_ranges,
5780 sizeof thiscase->data.case_stmt.num_ranges);
5782 if (thiscase->data.case_stmt.default_label)
5783 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (DECL_RTL (thiscase->data.case_stmt.default_label)));
5784 else
5785 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (thiscase->exit_label));
5787 /* Output the jump table. */
5789 bc_align_bytecode (3 /* PTR_ALIGN */);
5790 bc_emit_bytecode_labeldef (jump_label);
5792 if (TYPE_MODE (thiscase->data.case_stmt.nominal_type) == SImode)
5793 for (c = thiscase->data.case_stmt.case_list->left; c; c = c->left)
5795 opcode = TREE_INT_CST_LOW (c->low);
5796 bc_emit_bytecode_const ((char *) &opcode, sizeof opcode);
5798 opcode = TREE_INT_CST_LOW (c->high);
5799 bc_emit_bytecode_const ((char *) &opcode, sizeof opcode);
5801 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (DECL_RTL (c->code_label)));
5803 else
5804 if (TYPE_MODE (thiscase->data.case_stmt.nominal_type) == DImode)
5805 for (c = thiscase->data.case_stmt.case_list->left; c; c = c->left)
5807 bc_emit_bytecode_DI_const (c->low);
5808 bc_emit_bytecode_DI_const (c->high);
5810 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (DECL_RTL (c->code_label)));
5812 else
5813 /* Bad mode */
5814 abort ();
5817 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thiscase->exit_label));
5819 /* Possibly issue enumeration warnings. */
5821 if (!thiscase->data.case_stmt.default_label
5822 && TREE_CODE (TREE_TYPE (expr)) == ENUMERAL_TYPE
5823 && TREE_CODE (expr) != INTEGER_CST
5824 && warn_switch)
5825 check_for_full_enumeration_handling (TREE_TYPE (expr));
5828 #ifdef DEBUG_PRINT_CODE
5829 fputc ('\n', stderr);
5830 #endif
5832 POPSTACK (case_stack);
5836 /* Return unique bytecode ID. */
5838 int
5839 bc_new_uid ()
5841 static int bc_uid = 0;
5843 return (++bc_uid);
5846 /* Generate code to jump to LABEL if OP1 and OP2 are equal. */
5848 static void
5849 do_jump_if_equal (op1, op2, label, unsignedp)
5850 rtx op1, op2, label;
5851 int unsignedp;
5853 if (GET_CODE (op1) == CONST_INT
5854 && GET_CODE (op2) == CONST_INT)
5856 if (INTVAL (op1) == INTVAL (op2))
5857 emit_jump (label);
5859 else
5861 enum machine_mode mode = GET_MODE (op1);
5862 if (mode == VOIDmode)
5863 mode = GET_MODE (op2);
5864 emit_cmp_insn (op1, op2, EQ, NULL_RTX, mode, unsignedp, 0);
5865 emit_jump_insn (gen_beq (label));
5869 /* Not all case values are encountered equally. This function
5870 uses a heuristic to weight case labels, in cases where that
5871 looks like a reasonable thing to do.
5873 Right now, all we try to guess is text, and we establish the
5874 following weights:
5876 chars above space: 16
5877 digits: 16
5878 default: 12
5879 space, punct: 8
5880 tab: 4
5881 newline: 2
5882 other "\" chars: 1
5883 remaining chars: 0
5885 If we find any cases in the switch that are not either -1 or in the range
5886 of valid ASCII characters, or are control characters other than those
5887 commonly used with "\", don't treat this switch scanning text.
5889 Return 1 if these nodes are suitable for cost estimation, otherwise
5890 return 0. */
5892 static int
5893 estimate_case_costs (node)
5894 case_node_ptr node;
5896 tree min_ascii = build_int_2 (-1, -1);
5897 tree max_ascii = convert (TREE_TYPE (node->high), build_int_2 (127, 0));
5898 case_node_ptr n;
5899 int i;
5901 /* If we haven't already made the cost table, make it now. Note that the
5902 lower bound of the table is -1, not zero. */
5904 if (cost_table == NULL)
5906 cost_table = ((short *) xmalloc (129 * sizeof (short))) + 1;
5907 bzero ((char *) (cost_table - 1), 129 * sizeof (short));
5909 for (i = 0; i < 128; i++)
5911 if (isalnum (i))
5912 cost_table[i] = 16;
5913 else if (ispunct (i))
5914 cost_table[i] = 8;
5915 else if (iscntrl (i))
5916 cost_table[i] = -1;
5919 cost_table[' '] = 8;
5920 cost_table['\t'] = 4;
5921 cost_table['\0'] = 4;
5922 cost_table['\n'] = 2;
5923 cost_table['\f'] = 1;
5924 cost_table['\v'] = 1;
5925 cost_table['\b'] = 1;
5928 /* See if all the case expressions look like text. It is text if the
5929 constant is >= -1 and the highest constant is <= 127. Do all comparisons
5930 as signed arithmetic since we don't want to ever access cost_table with a
5931 value less than -1. Also check that none of the constants in a range
5932 are strange control characters. */
5934 for (n = node; n; n = n->right)
5936 if ((INT_CST_LT (n->low, min_ascii)) || INT_CST_LT (max_ascii, n->high))
5937 return 0;
5939 for (i = TREE_INT_CST_LOW (n->low); i <= TREE_INT_CST_LOW (n->high); i++)
5940 if (cost_table[i] < 0)
5941 return 0;
5944 /* All interesting values are within the range of interesting
5945 ASCII characters. */
5946 return 1;
5949 /* Scan an ordered list of case nodes
5950 combining those with consecutive values or ranges.
5952 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
5954 static void
5955 group_case_nodes (head)
5956 case_node_ptr head;
5958 case_node_ptr node = head;
5960 while (node)
5962 rtx lb = next_real_insn (label_rtx (node->code_label));
5963 rtx lb2;
5964 case_node_ptr np = node;
5966 /* Try to group the successors of NODE with NODE. */
5967 while (((np = np->right) != 0)
5968 /* Do they jump to the same place? */
5969 && ((lb2 = next_real_insn (label_rtx (np->code_label))) == lb
5970 || (lb != 0 && lb2 != 0
5971 && simplejump_p (lb)
5972 && simplejump_p (lb2)
5973 && rtx_equal_p (SET_SRC (PATTERN (lb)),
5974 SET_SRC (PATTERN (lb2)))))
5975 /* Are their ranges consecutive? */
5976 && tree_int_cst_equal (np->low,
5977 fold (build (PLUS_EXPR,
5978 TREE_TYPE (node->high),
5979 node->high,
5980 integer_one_node)))
5981 /* An overflow is not consecutive. */
5982 && tree_int_cst_lt (node->high,
5983 fold (build (PLUS_EXPR,
5984 TREE_TYPE (node->high),
5985 node->high,
5986 integer_one_node))))
5988 node->high = np->high;
5990 /* NP is the first node after NODE which can't be grouped with it.
5991 Delete the nodes in between, and move on to that node. */
5992 node->right = np;
5993 node = np;
5997 /* Take an ordered list of case nodes
5998 and transform them into a near optimal binary tree,
5999 on the assumption that any target code selection value is as
6000 likely as any other.
6002 The transformation is performed by splitting the ordered
6003 list into two equal sections plus a pivot. The parts are
6004 then attached to the pivot as left and right branches. Each
6005 branch is is then transformed recursively. */
6007 static void
6008 balance_case_nodes (head, parent)
6009 case_node_ptr *head;
6010 case_node_ptr parent;
6012 register case_node_ptr np;
6014 np = *head;
6015 if (np)
6017 int cost = 0;
6018 int i = 0;
6019 int ranges = 0;
6020 register case_node_ptr *npp;
6021 case_node_ptr left;
6023 /* Count the number of entries on branch. Also count the ranges. */
6025 while (np)
6027 if (!tree_int_cst_equal (np->low, np->high))
6029 ranges++;
6030 if (use_cost_table)
6031 cost += cost_table[TREE_INT_CST_LOW (np->high)];
6034 if (use_cost_table)
6035 cost += cost_table[TREE_INT_CST_LOW (np->low)];
6037 i++;
6038 np = np->right;
6041 if (i > 2)
6043 /* Split this list if it is long enough for that to help. */
6044 npp = head;
6045 left = *npp;
6046 if (use_cost_table)
6048 /* Find the place in the list that bisects the list's total cost,
6049 Here I gets half the total cost. */
6050 int n_moved = 0;
6051 i = (cost + 1) / 2;
6052 while (1)
6054 /* Skip nodes while their cost does not reach that amount. */
6055 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
6056 i -= cost_table[TREE_INT_CST_LOW ((*npp)->high)];
6057 i -= cost_table[TREE_INT_CST_LOW ((*npp)->low)];
6058 if (i <= 0)
6059 break;
6060 npp = &(*npp)->right;
6061 n_moved += 1;
6063 if (n_moved == 0)
6065 /* Leave this branch lopsided, but optimize left-hand
6066 side and fill in `parent' fields for right-hand side. */
6067 np = *head;
6068 np->parent = parent;
6069 balance_case_nodes (&np->left, np);
6070 for (; np->right; np = np->right)
6071 np->right->parent = np;
6072 return;
6075 /* If there are just three nodes, split at the middle one. */
6076 else if (i == 3)
6077 npp = &(*npp)->right;
6078 else
6080 /* Find the place in the list that bisects the list's total cost,
6081 where ranges count as 2.
6082 Here I gets half the total cost. */
6083 i = (i + ranges + 1) / 2;
6084 while (1)
6086 /* Skip nodes while their cost does not reach that amount. */
6087 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
6088 i--;
6089 i--;
6090 if (i <= 0)
6091 break;
6092 npp = &(*npp)->right;
6095 *head = np = *npp;
6096 *npp = 0;
6097 np->parent = parent;
6098 np->left = left;
6100 /* Optimize each of the two split parts. */
6101 balance_case_nodes (&np->left, np);
6102 balance_case_nodes (&np->right, np);
6104 else
6106 /* Else leave this branch as one level,
6107 but fill in `parent' fields. */
6108 np = *head;
6109 np->parent = parent;
6110 for (; np->right; np = np->right)
6111 np->right->parent = np;
6116 /* Search the parent sections of the case node tree
6117 to see if a test for the lower bound of NODE would be redundant.
6118 INDEX_TYPE is the type of the index expression.
6120 The instructions to generate the case decision tree are
6121 output in the same order as nodes are processed so it is
6122 known that if a parent node checks the range of the current
6123 node minus one that the current node is bounded at its lower
6124 span. Thus the test would be redundant. */
6126 static int
6127 node_has_low_bound (node, index_type)
6128 case_node_ptr node;
6129 tree index_type;
6131 tree low_minus_one;
6132 case_node_ptr pnode;
6134 /* If the lower bound of this node is the lowest value in the index type,
6135 we need not test it. */
6137 if (tree_int_cst_equal (node->low, TYPE_MIN_VALUE (index_type)))
6138 return 1;
6140 /* If this node has a left branch, the value at the left must be less
6141 than that at this node, so it cannot be bounded at the bottom and
6142 we need not bother testing any further. */
6144 if (node->left)
6145 return 0;
6147 low_minus_one = fold (build (MINUS_EXPR, TREE_TYPE (node->low),
6148 node->low, integer_one_node));
6150 /* If the subtraction above overflowed, we can't verify anything.
6151 Otherwise, look for a parent that tests our value - 1. */
6153 if (! tree_int_cst_lt (low_minus_one, node->low))
6154 return 0;
6156 for (pnode = node->parent; pnode; pnode = pnode->parent)
6157 if (tree_int_cst_equal (low_minus_one, pnode->high))
6158 return 1;
6160 return 0;
6163 /* Search the parent sections of the case node tree
6164 to see if a test for the upper bound of NODE would be redundant.
6165 INDEX_TYPE is the type of the index expression.
6167 The instructions to generate the case decision tree are
6168 output in the same order as nodes are processed so it is
6169 known that if a parent node checks the range of the current
6170 node plus one that the current node is bounded at its upper
6171 span. Thus the test would be redundant. */
6173 static int
6174 node_has_high_bound (node, index_type)
6175 case_node_ptr node;
6176 tree index_type;
6178 tree high_plus_one;
6179 case_node_ptr pnode;
6181 /* If the upper bound of this node is the highest value in the type
6182 of the index expression, we need not test against it. */
6184 if (tree_int_cst_equal (node->high, TYPE_MAX_VALUE (index_type)))
6185 return 1;
6187 /* If this node has a right branch, the value at the right must be greater
6188 than that at this node, so it cannot be bounded at the top and
6189 we need not bother testing any further. */
6191 if (node->right)
6192 return 0;
6194 high_plus_one = fold (build (PLUS_EXPR, TREE_TYPE (node->high),
6195 node->high, integer_one_node));
6197 /* If the addition above overflowed, we can't verify anything.
6198 Otherwise, look for a parent that tests our value + 1. */
6200 if (! tree_int_cst_lt (node->high, high_plus_one))
6201 return 0;
6203 for (pnode = node->parent; pnode; pnode = pnode->parent)
6204 if (tree_int_cst_equal (high_plus_one, pnode->low))
6205 return 1;
6207 return 0;
6210 /* Search the parent sections of the
6211 case node tree to see if both tests for the upper and lower
6212 bounds of NODE would be redundant. */
6214 static int
6215 node_is_bounded (node, index_type)
6216 case_node_ptr node;
6217 tree index_type;
6219 return (node_has_low_bound (node, index_type)
6220 && node_has_high_bound (node, index_type));
6223 /* Emit an unconditional jump to LABEL unless it would be dead code. */
6225 static void
6226 emit_jump_if_reachable (label)
6227 rtx label;
6229 if (GET_CODE (get_last_insn ()) != BARRIER)
6230 emit_jump (label);
6233 /* Emit step-by-step code to select a case for the value of INDEX.
6234 The thus generated decision tree follows the form of the
6235 case-node binary tree NODE, whose nodes represent test conditions.
6236 INDEX_TYPE is the type of the index of the switch.
6238 Care is taken to prune redundant tests from the decision tree
6239 by detecting any boundary conditions already checked by
6240 emitted rtx. (See node_has_high_bound, node_has_low_bound
6241 and node_is_bounded, above.)
6243 Where the test conditions can be shown to be redundant we emit
6244 an unconditional jump to the target code. As a further
6245 optimization, the subordinates of a tree node are examined to
6246 check for bounded nodes. In this case conditional and/or
6247 unconditional jumps as a result of the boundary check for the
6248 current node are arranged to target the subordinates associated
6249 code for out of bound conditions on the current node node.
6251 We can assume that when control reaches the code generated here,
6252 the index value has already been compared with the parents
6253 of this node, and determined to be on the same side of each parent
6254 as this node is. Thus, if this node tests for the value 51,
6255 and a parent tested for 52, we don't need to consider
6256 the possibility of a value greater than 51. If another parent
6257 tests for the value 50, then this node need not test anything. */
6259 static void
6260 emit_case_nodes (index, node, default_label, index_type)
6261 rtx index;
6262 case_node_ptr node;
6263 rtx default_label;
6264 tree index_type;
6266 /* If INDEX has an unsigned type, we must make unsigned branches. */
6267 int unsignedp = TREE_UNSIGNED (index_type);
6268 typedef rtx rtx_function ();
6269 rtx_function *gen_bgt_pat = unsignedp ? gen_bgtu : gen_bgt;
6270 rtx_function *gen_bge_pat = unsignedp ? gen_bgeu : gen_bge;
6271 rtx_function *gen_blt_pat = unsignedp ? gen_bltu : gen_blt;
6272 rtx_function *gen_ble_pat = unsignedp ? gen_bleu : gen_ble;
6273 enum machine_mode mode = GET_MODE (index);
6275 /* See if our parents have already tested everything for us.
6276 If they have, emit an unconditional jump for this node. */
6277 if (node_is_bounded (node, index_type))
6278 emit_jump (label_rtx (node->code_label));
6280 else if (tree_int_cst_equal (node->low, node->high))
6282 /* Node is single valued. First see if the index expression matches
6283 this node and then check our children, if any. */
6285 do_jump_if_equal (index, expand_expr (node->low, NULL_RTX, VOIDmode, 0),
6286 label_rtx (node->code_label), unsignedp);
6288 if (node->right != 0 && node->left != 0)
6290 /* This node has children on both sides.
6291 Dispatch to one side or the other
6292 by comparing the index value with this node's value.
6293 If one subtree is bounded, check that one first,
6294 so we can avoid real branches in the tree. */
6296 if (node_is_bounded (node->right, index_type))
6298 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
6299 VOIDmode, 0),
6300 GT, NULL_RTX, mode, unsignedp, 0);
6302 emit_jump_insn ((*gen_bgt_pat) (label_rtx (node->right->code_label)));
6303 emit_case_nodes (index, node->left, default_label, index_type);
6306 else if (node_is_bounded (node->left, index_type))
6308 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
6309 VOIDmode, 0),
6310 LT, NULL_RTX, mode, unsignedp, 0);
6311 emit_jump_insn ((*gen_blt_pat) (label_rtx (node->left->code_label)));
6312 emit_case_nodes (index, node->right, default_label, index_type);
6315 else
6317 /* Neither node is bounded. First distinguish the two sides;
6318 then emit the code for one side at a time. */
6320 tree test_label
6321 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
6323 /* See if the value is on the right. */
6324 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
6325 VOIDmode, 0),
6326 GT, NULL_RTX, mode, unsignedp, 0);
6327 emit_jump_insn ((*gen_bgt_pat) (label_rtx (test_label)));
6329 /* Value must be on the left.
6330 Handle the left-hand subtree. */
6331 emit_case_nodes (index, node->left, default_label, index_type);
6332 /* If left-hand subtree does nothing,
6333 go to default. */
6334 emit_jump_if_reachable (default_label);
6336 /* Code branches here for the right-hand subtree. */
6337 expand_label (test_label);
6338 emit_case_nodes (index, node->right, default_label, index_type);
6342 else if (node->right != 0 && node->left == 0)
6344 /* Here we have a right child but no left so we issue conditional
6345 branch to default and process the right child.
6347 Omit the conditional branch to default if we it avoid only one
6348 right child; it costs too much space to save so little time. */
6350 if (node->right->right || node->right->left
6351 || !tree_int_cst_equal (node->right->low, node->right->high))
6353 if (!node_has_low_bound (node, index_type))
6355 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
6356 VOIDmode, 0),
6357 LT, NULL_RTX, mode, unsignedp, 0);
6358 emit_jump_insn ((*gen_blt_pat) (default_label));
6361 emit_case_nodes (index, node->right, default_label, index_type);
6363 else
6364 /* We cannot process node->right normally
6365 since we haven't ruled out the numbers less than
6366 this node's value. So handle node->right explicitly. */
6367 do_jump_if_equal (index,
6368 expand_expr (node->right->low, NULL_RTX,
6369 VOIDmode, 0),
6370 label_rtx (node->right->code_label), unsignedp);
6373 else if (node->right == 0 && node->left != 0)
6375 /* Just one subtree, on the left. */
6377 #if 0 /* The following code and comment were formerly part
6378 of the condition here, but they didn't work
6379 and I don't understand what the idea was. -- rms. */
6380 /* If our "most probable entry" is less probable
6381 than the default label, emit a jump to
6382 the default label using condition codes
6383 already lying around. With no right branch,
6384 a branch-greater-than will get us to the default
6385 label correctly. */
6386 if (use_cost_table
6387 && cost_table[TREE_INT_CST_LOW (node->high)] < 12)
6389 #endif /* 0 */
6390 if (node->left->left || node->left->right
6391 || !tree_int_cst_equal (node->left->low, node->left->high))
6393 if (!node_has_high_bound (node, index_type))
6395 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
6396 VOIDmode, 0),
6397 GT, NULL_RTX, mode, unsignedp, 0);
6398 emit_jump_insn ((*gen_bgt_pat) (default_label));
6401 emit_case_nodes (index, node->left, default_label, index_type);
6403 else
6404 /* We cannot process node->left normally
6405 since we haven't ruled out the numbers less than
6406 this node's value. So handle node->left explicitly. */
6407 do_jump_if_equal (index,
6408 expand_expr (node->left->low, NULL_RTX,
6409 VOIDmode, 0),
6410 label_rtx (node->left->code_label), unsignedp);
6413 else
6415 /* Node is a range. These cases are very similar to those for a single
6416 value, except that we do not start by testing whether this node
6417 is the one to branch to. */
6419 if (node->right != 0 && node->left != 0)
6421 /* Node has subtrees on both sides.
6422 If the right-hand subtree is bounded,
6423 test for it first, since we can go straight there.
6424 Otherwise, we need to make a branch in the control structure,
6425 then handle the two subtrees. */
6426 tree test_label = 0;
6428 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
6429 VOIDmode, 0),
6430 GT, NULL_RTX, mode, unsignedp, 0);
6432 if (node_is_bounded (node->right, index_type))
6433 /* Right hand node is fully bounded so we can eliminate any
6434 testing and branch directly to the target code. */
6435 emit_jump_insn ((*gen_bgt_pat) (label_rtx (node->right->code_label)));
6436 else
6438 /* Right hand node requires testing.
6439 Branch to a label where we will handle it later. */
6441 test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
6442 emit_jump_insn ((*gen_bgt_pat) (label_rtx (test_label)));
6445 /* Value belongs to this node or to the left-hand subtree. */
6447 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX, VOIDmode, 0),
6448 GE, NULL_RTX, mode, unsignedp, 0);
6449 emit_jump_insn ((*gen_bge_pat) (label_rtx (node->code_label)));
6451 /* Handle the left-hand subtree. */
6452 emit_case_nodes (index, node->left, default_label, index_type);
6454 /* If right node had to be handled later, do that now. */
6456 if (test_label)
6458 /* If the left-hand subtree fell through,
6459 don't let it fall into the right-hand subtree. */
6460 emit_jump_if_reachable (default_label);
6462 expand_label (test_label);
6463 emit_case_nodes (index, node->right, default_label, index_type);
6467 else if (node->right != 0 && node->left == 0)
6469 /* Deal with values to the left of this node,
6470 if they are possible. */
6471 if (!node_has_low_bound (node, index_type))
6473 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX,
6474 VOIDmode, 0),
6475 LT, NULL_RTX, mode, unsignedp, 0);
6476 emit_jump_insn ((*gen_blt_pat) (default_label));
6479 /* Value belongs to this node or to the right-hand subtree. */
6481 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
6482 VOIDmode, 0),
6483 LE, NULL_RTX, mode, unsignedp, 0);
6484 emit_jump_insn ((*gen_ble_pat) (label_rtx (node->code_label)));
6486 emit_case_nodes (index, node->right, default_label, index_type);
6489 else if (node->right == 0 && node->left != 0)
6491 /* Deal with values to the right of this node,
6492 if they are possible. */
6493 if (!node_has_high_bound (node, index_type))
6495 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
6496 VOIDmode, 0),
6497 GT, NULL_RTX, mode, unsignedp, 0);
6498 emit_jump_insn ((*gen_bgt_pat) (default_label));
6501 /* Value belongs to this node or to the left-hand subtree. */
6503 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX, VOIDmode, 0),
6504 GE, NULL_RTX, mode, unsignedp, 0);
6505 emit_jump_insn ((*gen_bge_pat) (label_rtx (node->code_label)));
6507 emit_case_nodes (index, node->left, default_label, index_type);
6510 else
6512 /* Node has no children so we check low and high bounds to remove
6513 redundant tests. Only one of the bounds can exist,
6514 since otherwise this node is bounded--a case tested already. */
6516 if (!node_has_high_bound (node, index_type))
6518 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
6519 VOIDmode, 0),
6520 GT, NULL_RTX, mode, unsignedp, 0);
6521 emit_jump_insn ((*gen_bgt_pat) (default_label));
6524 if (!node_has_low_bound (node, index_type))
6526 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX,
6527 VOIDmode, 0),
6528 LT, NULL_RTX, mode, unsignedp, 0);
6529 emit_jump_insn ((*gen_blt_pat) (default_label));
6532 emit_jump (label_rtx (node->code_label));
6537 /* These routines are used by the loop unrolling code. They copy BLOCK trees
6538 so that the debugging info will be correct for the unrolled loop. */
6540 /* Indexed by block number, contains a pointer to the N'th block node. */
6542 static tree *block_vector;
6544 void
6545 find_loop_tree_blocks ()
6547 tree block = DECL_INITIAL (current_function_decl);
6549 block_vector = identify_blocks (block, get_insns ());
6552 void
6553 unroll_block_trees ()
6555 tree block = DECL_INITIAL (current_function_decl);
6557 reorder_blocks (block_vector, block, get_insns ());