* expr.c (do_tablejump): Let CASE_VECTOR_PC_RELATIVE be an
[official-gcc.git] / gcc / stmt.c
blob78e5b543494fdcf49fc7bf0cfa7049709b34a07c
1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 88, 89, 92-6, 1997 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
22 /* This file handles the generation of rtl code from tree structure
23 above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
24 It also creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
27 The functions whose names start with `expand_' are called by the
28 parser to generate RTL instructions for various kinds of constructs.
30 Some control and binding constructs require calling several such
31 functions at different times. For example, a simple if-then
32 is expanded by calling `expand_start_cond' (with the condition-expression
33 as argument) before parsing the then-clause and calling `expand_end_cond'
34 after parsing the then-clause. */
36 #include "config.h"
38 #include <stdio.h>
39 #include <ctype.h>
41 #include "rtl.h"
42 #include "tree.h"
43 #include "flags.h"
44 #include "except.h"
45 #include "function.h"
46 #include "insn-flags.h"
47 #include "insn-config.h"
48 #include "insn-codes.h"
49 #include "expr.h"
50 #include "hard-reg-set.h"
51 #include "obstack.h"
52 #include "loop.h"
53 #include "recog.h"
54 #include "machmode.h"
56 #include "bytecode.h"
57 #include "bc-typecd.h"
58 #include "bc-opcode.h"
59 #include "bc-optab.h"
60 #include "bc-emit.h"
62 #define obstack_chunk_alloc xmalloc
63 #define obstack_chunk_free free
64 struct obstack stmt_obstack;
66 /* Assume that case vectors are not pc-relative. */
67 #ifndef CASE_VECTOR_PC_RELATIVE
68 #define CASE_VECTOR_PC_RELATIVE 0
69 #endif
71 /* Filename and line number of last line-number note,
72 whether we actually emitted it or not. */
73 char *emit_filename;
74 int emit_lineno;
76 /* Nonzero if within a ({...}) grouping, in which case we must
77 always compute a value for each expr-stmt in case it is the last one. */
79 int expr_stmts_for_value;
81 /* Each time we expand an expression-statement,
82 record the expr's type and its RTL value here. */
84 static tree last_expr_type;
85 static rtx last_expr_value;
87 /* Each time we expand the end of a binding contour (in `expand_end_bindings')
88 and we emit a new NOTE_INSN_BLOCK_END note, we save a pointer to it here.
89 This is used by the `remember_end_note' function to record the endpoint
90 of each generated block in its associated BLOCK node. */
92 static rtx last_block_end_note;
94 /* Number of binding contours started so far in this function. */
96 int block_start_count;
98 /* Nonzero if function being compiled needs to
99 return the address of where it has put a structure value. */
101 extern int current_function_returns_pcc_struct;
103 /* Label that will go on parm cleanup code, if any.
104 Jumping to this label runs cleanup code for parameters, if
105 such code must be run. Following this code is the logical return label. */
107 extern rtx cleanup_label;
109 /* Label that will go on function epilogue.
110 Jumping to this label serves as a "return" instruction
111 on machines which require execution of the epilogue on all returns. */
113 extern rtx return_label;
115 /* Offset to end of allocated area of stack frame.
116 If stack grows down, this is the address of the last stack slot allocated.
117 If stack grows up, this is the address for the next slot. */
118 extern int frame_offset;
120 /* Label to jump back to for tail recursion, or 0 if we have
121 not yet needed one for this function. */
122 extern rtx tail_recursion_label;
124 /* Place after which to insert the tail_recursion_label if we need one. */
125 extern rtx tail_recursion_reentry;
127 /* Location at which to save the argument pointer if it will need to be
128 referenced. There are two cases where this is done: if nonlocal gotos
129 exist, or if vars whose is an offset from the argument pointer will be
130 needed by inner routines. */
132 extern rtx arg_pointer_save_area;
134 /* Chain of all RTL_EXPRs that have insns in them. */
135 extern tree rtl_expr_chain;
137 /* Stack allocation level in which temporaries for TARGET_EXPRs live. */
138 extern int target_temp_slot_level;
140 extern int temp_slot_level;
142 /* Functions and data structures for expanding case statements. */
144 /* Case label structure, used to hold info on labels within case
145 statements. We handle "range" labels; for a single-value label
146 as in C, the high and low limits are the same.
148 An AVL tree of case nodes is initially created, and later transformed
149 to a list linked via the RIGHT fields in the nodes. Nodes with
150 higher case values are later in the list.
152 Switch statements can be output in one of two forms. A branch table
153 is used if there are more than a few labels and the labels are dense
154 within the range between the smallest and largest case value. If a
155 branch table is used, no further manipulations are done with the case
156 node chain.
158 The alternative to the use of a branch table is to generate a series
159 of compare and jump insns. When that is done, we use the LEFT, RIGHT,
160 and PARENT fields to hold a binary tree. Initially the tree is
161 totally unbalanced, with everything on the right. We balance the tree
162 with nodes on the left having lower case values than the parent
163 and nodes on the right having higher values. We then output the tree
164 in order. */
166 struct case_node
168 struct case_node *left; /* Left son in binary tree */
169 struct case_node *right; /* Right son in binary tree; also node chain */
170 struct case_node *parent; /* Parent of node in binary tree */
171 tree low; /* Lowest index value for this label */
172 tree high; /* Highest index value for this label */
173 tree code_label; /* Label to jump to when node matches */
174 int balance;
177 typedef struct case_node case_node;
178 typedef struct case_node *case_node_ptr;
180 /* These are used by estimate_case_costs and balance_case_nodes. */
182 /* This must be a signed type, and non-ANSI compilers lack signed char. */
183 static short *cost_table;
184 static int use_cost_table;
186 /* Stack of control and binding constructs we are currently inside.
188 These constructs begin when you call `expand_start_WHATEVER'
189 and end when you call `expand_end_WHATEVER'. This stack records
190 info about how the construct began that tells the end-function
191 what to do. It also may provide information about the construct
192 to alter the behavior of other constructs within the body.
193 For example, they may affect the behavior of C `break' and `continue'.
195 Each construct gets one `struct nesting' object.
196 All of these objects are chained through the `all' field.
197 `nesting_stack' points to the first object (innermost construct).
198 The position of an entry on `nesting_stack' is in its `depth' field.
200 Each type of construct has its own individual stack.
201 For example, loops have `loop_stack'. Each object points to the
202 next object of the same type through the `next' field.
204 Some constructs are visible to `break' exit-statements and others
205 are not. Which constructs are visible depends on the language.
206 Therefore, the data structure allows each construct to be visible
207 or not, according to the args given when the construct is started.
208 The construct is visible if the `exit_label' field is non-null.
209 In that case, the value should be a CODE_LABEL rtx. */
211 struct nesting
213 struct nesting *all;
214 struct nesting *next;
215 int depth;
216 rtx exit_label;
217 union
219 /* For conds (if-then and if-then-else statements). */
220 struct
222 /* Label for the end of the if construct.
223 There is none if EXITFLAG was not set
224 and no `else' has been seen yet. */
225 rtx endif_label;
226 /* Label for the end of this alternative.
227 This may be the end of the if or the next else/elseif. */
228 rtx next_label;
229 } cond;
230 /* For loops. */
231 struct
233 /* Label at the top of the loop; place to loop back to. */
234 rtx start_label;
235 /* Label at the end of the whole construct. */
236 rtx end_label;
237 /* Label before a jump that branches to the end of the whole
238 construct. This is where destructors go if any. */
239 rtx alt_end_label;
240 /* Label for `continue' statement to jump to;
241 this is in front of the stepper of the loop. */
242 rtx continue_label;
243 } loop;
244 /* For variable binding contours. */
245 struct
247 /* Sequence number of this binding contour within the function,
248 in order of entry. */
249 int block_start_count;
250 /* Nonzero => value to restore stack to on exit. Complemented by
251 bc_stack_level (see below) when generating bytecodes. */
252 rtx stack_level;
253 /* The NOTE that starts this contour.
254 Used by expand_goto to check whether the destination
255 is within each contour or not. */
256 rtx first_insn;
257 /* Innermost containing binding contour that has a stack level. */
258 struct nesting *innermost_stack_block;
259 /* List of cleanups to be run on exit from this contour.
260 This is a list of expressions to be evaluated.
261 The TREE_PURPOSE of each link is the ..._DECL node
262 which the cleanup pertains to. */
263 tree cleanups;
264 /* List of cleanup-lists of blocks containing this block,
265 as they were at the locus where this block appears.
266 There is an element for each containing block,
267 ordered innermost containing block first.
268 The tail of this list can be 0,
269 if all remaining elements would be empty lists.
270 The element's TREE_VALUE is the cleanup-list of that block,
271 which may be null. */
272 tree outer_cleanups;
273 /* Chain of labels defined inside this binding contour.
274 For contours that have stack levels or cleanups. */
275 struct label_chain *label_chain;
276 /* Number of function calls seen, as of start of this block. */
277 int function_call_count;
278 /* Bytecode specific: stack level to restore stack to on exit. */
279 int bc_stack_level;
280 /* Nonzero if this is associated with a EH region. */
281 int exception_region;
282 /* The saved target_temp_slot_level from our outer block.
283 We may reset target_temp_slot_level to be the level of
284 this block, if that is done, target_temp_slot_level
285 reverts to the saved target_temp_slot_level at the very
286 end of the block. */
287 int target_temp_slot_level;
288 /* True if we are currently emitting insns in an area of
289 output code that is controlled by a conditional
290 expression. This is used by the cleanup handling code to
291 generate conditional cleanup actions. */
292 int conditional_code;
293 /* A place to move the start of the exception region for any
294 of the conditional cleanups, must be at the end or after
295 the start of the last unconditional cleanup, and before any
296 conditional branch points. */
297 rtx last_unconditional_cleanup;
298 /* When in a conditional context, this is the specific
299 cleanup list associated with last_unconditional_cleanup,
300 where we place the conditionalized cleanups. */
301 tree *cleanup_ptr;
302 } block;
303 /* For switch (C) or case (Pascal) statements,
304 and also for dummies (see `expand_start_case_dummy'). */
305 struct
307 /* The insn after which the case dispatch should finally
308 be emitted. Zero for a dummy. */
309 rtx start;
310 /* For bytecodes, the case table is in-lined right in the code.
311 A label is needed for skipping over this block. It is only
312 used when generating bytecodes. */
313 rtx skip_label;
314 /* A list of case labels; it is first built as an AVL tree.
315 During expand_end_case, this is converted to a list, and may be
316 rearranged into a nearly balanced binary tree. */
317 struct case_node *case_list;
318 /* Label to jump to if no case matches. */
319 tree default_label;
320 /* The expression to be dispatched on. */
321 tree index_expr;
322 /* Type that INDEX_EXPR should be converted to. */
323 tree nominal_type;
324 /* Number of range exprs in case statement. */
325 int num_ranges;
326 /* Name of this kind of statement, for warnings. */
327 char *printname;
328 /* Nonzero if a case label has been seen in this case stmt. */
329 char seenlabel;
330 } case_stmt;
331 } data;
334 /* Chain of all pending binding contours. */
335 struct nesting *block_stack;
337 /* If any new stacks are added here, add them to POPSTACKS too. */
339 /* Chain of all pending binding contours that restore stack levels
340 or have cleanups. */
341 struct nesting *stack_block_stack;
343 /* Chain of all pending conditional statements. */
344 struct nesting *cond_stack;
346 /* Chain of all pending loops. */
347 struct nesting *loop_stack;
349 /* Chain of all pending case or switch statements. */
350 struct nesting *case_stack;
352 /* Separate chain including all of the above,
353 chained through the `all' field. */
354 struct nesting *nesting_stack;
356 /* Number of entries on nesting_stack now. */
357 int nesting_depth;
359 /* Allocate and return a new `struct nesting'. */
361 #define ALLOC_NESTING() \
362 (struct nesting *) obstack_alloc (&stmt_obstack, sizeof (struct nesting))
364 /* Pop the nesting stack element by element until we pop off
365 the element which is at the top of STACK.
366 Update all the other stacks, popping off elements from them
367 as we pop them from nesting_stack. */
369 #define POPSTACK(STACK) \
370 do { struct nesting *target = STACK; \
371 struct nesting *this; \
372 do { this = nesting_stack; \
373 if (loop_stack == this) \
374 loop_stack = loop_stack->next; \
375 if (cond_stack == this) \
376 cond_stack = cond_stack->next; \
377 if (block_stack == this) \
378 block_stack = block_stack->next; \
379 if (stack_block_stack == this) \
380 stack_block_stack = stack_block_stack->next; \
381 if (case_stack == this) \
382 case_stack = case_stack->next; \
383 nesting_depth = nesting_stack->depth - 1; \
384 nesting_stack = this->all; \
385 obstack_free (&stmt_obstack, this); } \
386 while (this != target); } while (0)
388 /* In some cases it is impossible to generate code for a forward goto
389 until the label definition is seen. This happens when it may be necessary
390 for the goto to reset the stack pointer: we don't yet know how to do that.
391 So expand_goto puts an entry on this fixup list.
392 Each time a binding contour that resets the stack is exited,
393 we check each fixup.
394 If the target label has now been defined, we can insert the proper code. */
396 struct goto_fixup
398 /* Points to following fixup. */
399 struct goto_fixup *next;
400 /* Points to the insn before the jump insn.
401 If more code must be inserted, it goes after this insn. */
402 rtx before_jump;
403 /* The LABEL_DECL that this jump is jumping to, or 0
404 for break, continue or return. */
405 tree target;
406 /* The BLOCK for the place where this goto was found. */
407 tree context;
408 /* The CODE_LABEL rtx that this is jumping to. */
409 rtx target_rtl;
410 /* Number of binding contours started in current function
411 before the label reference. */
412 int block_start_count;
413 /* The outermost stack level that should be restored for this jump.
414 Each time a binding contour that resets the stack is exited,
415 if the target label is *not* yet defined, this slot is updated. */
416 rtx stack_level;
417 /* List of lists of cleanup expressions to be run by this goto.
418 There is one element for each block that this goto is within.
419 The tail of this list can be 0,
420 if all remaining elements would be empty.
421 The TREE_VALUE contains the cleanup list of that block as of the
422 time this goto was seen.
423 The TREE_ADDRESSABLE flag is 1 for a block that has been exited. */
424 tree cleanup_list_list;
426 /* Bytecode specific members follow */
428 /* The label that this jump is jumping to, or 0 for break, continue
429 or return. */
430 struct bc_label *bc_target;
432 /* The label we use for the fixup patch */
433 struct bc_label *label;
435 /* True (non-0) if fixup has been handled */
436 int bc_handled:1;
438 /* Like stack_level above, except refers to the interpreter stack */
439 int bc_stack_level;
442 static struct goto_fixup *goto_fixup_chain;
444 /* Within any binding contour that must restore a stack level,
445 all labels are recorded with a chain of these structures. */
447 struct label_chain
449 /* Points to following fixup. */
450 struct label_chain *next;
451 tree label;
455 /* Non-zero if we are using EH to handle cleanus. */
456 static int using_eh_for_cleanups_p = 0;
459 static void expand_goto_internal PROTO((tree, rtx, rtx));
460 static void bc_expand_goto_internal PROTO((enum bytecode_opcode,
461 struct bc_label *, tree));
462 static int expand_fixup PROTO((tree, rtx, rtx));
463 static void bc_expand_fixup PROTO((enum bytecode_opcode,
464 struct bc_label *, int));
465 static void fixup_gotos PROTO((struct nesting *, rtx, tree,
466 rtx, int));
467 static void bc_fixup_gotos PROTO((struct nesting *, int, tree,
468 rtx, int));
469 static void bc_expand_start_cond PROTO((tree, int));
470 static void bc_expand_end_cond PROTO((void));
471 static void bc_expand_start_else PROTO((void));
472 static void bc_expand_end_loop PROTO((void));
473 static void bc_expand_end_bindings PROTO((tree, int, int));
474 static void bc_expand_decl PROTO((tree, tree));
475 static void bc_expand_variable_local_init PROTO((tree));
476 static void bc_expand_decl_init PROTO((tree));
477 static void expand_null_return_1 PROTO((rtx, int));
478 static void expand_value_return PROTO((rtx));
479 static int tail_recursion_args PROTO((tree, tree));
480 static void expand_cleanups PROTO((tree, tree, int, int));
481 static void bc_expand_start_case PROTO((struct nesting *, tree,
482 tree, char *));
483 static int bc_pushcase PROTO((tree, tree));
484 static void bc_check_for_full_enumeration_handling PROTO((tree));
485 static void bc_expand_end_case PROTO((tree));
486 static void do_jump_if_equal PROTO((rtx, rtx, rtx, int));
487 static int estimate_case_costs PROTO((case_node_ptr));
488 static void group_case_nodes PROTO((case_node_ptr));
489 static void balance_case_nodes PROTO((case_node_ptr *,
490 case_node_ptr));
491 static int node_has_low_bound PROTO((case_node_ptr, tree));
492 static int node_has_high_bound PROTO((case_node_ptr, tree));
493 static int node_is_bounded PROTO((case_node_ptr, tree));
494 static void emit_jump_if_reachable PROTO((rtx));
495 static void emit_case_nodes PROTO((rtx, case_node_ptr, rtx, tree));
496 static int add_case_node PROTO((tree, tree, tree, tree *));
497 static struct case_node *case_tree2list PROTO((case_node *, case_node *));
499 extern rtx bc_allocate_local ();
500 extern rtx bc_allocate_variable_array ();
502 void
503 using_eh_for_cleanups ()
505 using_eh_for_cleanups_p = 1;
508 void
509 init_stmt ()
511 gcc_obstack_init (&stmt_obstack);
512 init_eh ();
515 void
516 init_stmt_for_function ()
518 /* We are not currently within any block, conditional, loop or case. */
519 block_stack = 0;
520 stack_block_stack = 0;
521 loop_stack = 0;
522 case_stack = 0;
523 cond_stack = 0;
524 nesting_stack = 0;
525 nesting_depth = 0;
527 block_start_count = 0;
529 /* No gotos have been expanded yet. */
530 goto_fixup_chain = 0;
532 /* We are not processing a ({...}) grouping. */
533 expr_stmts_for_value = 0;
534 last_expr_type = 0;
536 init_eh_for_function ();
539 void
540 save_stmt_status (p)
541 struct function *p;
543 p->block_stack = block_stack;
544 p->stack_block_stack = stack_block_stack;
545 p->cond_stack = cond_stack;
546 p->loop_stack = loop_stack;
547 p->case_stack = case_stack;
548 p->nesting_stack = nesting_stack;
549 p->nesting_depth = nesting_depth;
550 p->block_start_count = block_start_count;
551 p->last_expr_type = last_expr_type;
552 p->last_expr_value = last_expr_value;
553 p->expr_stmts_for_value = expr_stmts_for_value;
554 p->emit_filename = emit_filename;
555 p->emit_lineno = emit_lineno;
556 p->goto_fixup_chain = goto_fixup_chain;
557 save_eh_status (p);
560 void
561 restore_stmt_status (p)
562 struct function *p;
564 block_stack = p->block_stack;
565 stack_block_stack = p->stack_block_stack;
566 cond_stack = p->cond_stack;
567 loop_stack = p->loop_stack;
568 case_stack = p->case_stack;
569 nesting_stack = p->nesting_stack;
570 nesting_depth = p->nesting_depth;
571 block_start_count = p->block_start_count;
572 last_expr_type = p->last_expr_type;
573 last_expr_value = p->last_expr_value;
574 expr_stmts_for_value = p->expr_stmts_for_value;
575 emit_filename = p->emit_filename;
576 emit_lineno = p->emit_lineno;
577 goto_fixup_chain = p->goto_fixup_chain;
578 restore_eh_status (p);
581 /* Emit a no-op instruction. */
583 void
584 emit_nop ()
586 rtx last_insn;
588 if (!output_bytecode)
590 last_insn = get_last_insn ();
591 if (!optimize
592 && (GET_CODE (last_insn) == CODE_LABEL
593 || (GET_CODE (last_insn) == NOTE
594 && prev_real_insn (last_insn) == 0)))
595 emit_insn (gen_nop ());
599 /* Return the rtx-label that corresponds to a LABEL_DECL,
600 creating it if necessary. */
603 label_rtx (label)
604 tree label;
606 if (TREE_CODE (label) != LABEL_DECL)
607 abort ();
609 if (DECL_RTL (label))
610 return DECL_RTL (label);
612 return DECL_RTL (label) = gen_label_rtx ();
615 /* Add an unconditional jump to LABEL as the next sequential instruction. */
617 void
618 emit_jump (label)
619 rtx label;
621 do_pending_stack_adjust ();
622 emit_jump_insn (gen_jump (label));
623 emit_barrier ();
626 /* Emit code to jump to the address
627 specified by the pointer expression EXP. */
629 void
630 expand_computed_goto (exp)
631 tree exp;
633 if (output_bytecode)
635 bc_expand_expr (exp);
636 bc_emit_instruction (jumpP);
638 else
640 rtx x = expand_expr (exp, NULL_RTX, VOIDmode, 0);
642 #ifdef POINTERS_EXTEND_UNSIGNED
643 x = convert_memory_address (Pmode, x);
644 #endif
646 emit_queue ();
647 /* Be sure the function is executable. */
648 if (flag_check_memory_usage)
649 emit_library_call (chkr_check_exec_libfunc, 1,
650 VOIDmode, 1, x, ptr_mode);
652 do_pending_stack_adjust ();
653 emit_indirect_jump (x);
657 /* Handle goto statements and the labels that they can go to. */
659 /* Specify the location in the RTL code of a label LABEL,
660 which is a LABEL_DECL tree node.
662 This is used for the kind of label that the user can jump to with a
663 goto statement, and for alternatives of a switch or case statement.
664 RTL labels generated for loops and conditionals don't go through here;
665 they are generated directly at the RTL level, by other functions below.
667 Note that this has nothing to do with defining label *names*.
668 Languages vary in how they do that and what that even means. */
670 void
671 expand_label (label)
672 tree label;
674 struct label_chain *p;
676 if (output_bytecode)
678 if (! DECL_RTL (label))
679 DECL_RTL (label) = bc_gen_rtx ((char *) 0, 0, bc_get_bytecode_label ());
680 if (! bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (DECL_RTL (label))))
681 error ("multiply defined label");
682 return;
685 do_pending_stack_adjust ();
686 emit_label (label_rtx (label));
687 if (DECL_NAME (label))
688 LABEL_NAME (DECL_RTL (label)) = IDENTIFIER_POINTER (DECL_NAME (label));
690 if (stack_block_stack != 0)
692 p = (struct label_chain *) oballoc (sizeof (struct label_chain));
693 p->next = stack_block_stack->data.block.label_chain;
694 stack_block_stack->data.block.label_chain = p;
695 p->label = label;
699 /* Declare that LABEL (a LABEL_DECL) may be used for nonlocal gotos
700 from nested functions. */
702 void
703 declare_nonlocal_label (label)
704 tree label;
706 nonlocal_labels = tree_cons (NULL_TREE, label, nonlocal_labels);
707 LABEL_PRESERVE_P (label_rtx (label)) = 1;
708 if (nonlocal_goto_handler_slot == 0)
710 nonlocal_goto_handler_slot
711 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
712 emit_stack_save (SAVE_NONLOCAL,
713 &nonlocal_goto_stack_level,
714 PREV_INSN (tail_recursion_reentry));
718 /* Generate RTL code for a `goto' statement with target label LABEL.
719 LABEL should be a LABEL_DECL tree node that was or will later be
720 defined with `expand_label'. */
722 void
723 expand_goto (label)
724 tree label;
726 tree context;
728 if (output_bytecode)
730 expand_goto_internal (label, label_rtx (label), NULL_RTX);
731 return;
734 /* Check for a nonlocal goto to a containing function. */
735 context = decl_function_context (label);
736 if (context != 0 && context != current_function_decl)
738 struct function *p = find_function_data (context);
739 rtx label_ref = gen_rtx (LABEL_REF, Pmode, label_rtx (label));
740 rtx temp;
742 p->has_nonlocal_label = 1;
743 current_function_has_nonlocal_goto = 1;
744 LABEL_REF_NONLOCAL_P (label_ref) = 1;
746 /* Copy the rtl for the slots so that they won't be shared in
747 case the virtual stack vars register gets instantiated differently
748 in the parent than in the child. */
750 #if HAVE_nonlocal_goto
751 if (HAVE_nonlocal_goto)
752 emit_insn (gen_nonlocal_goto (lookup_static_chain (label),
753 copy_rtx (p->nonlocal_goto_handler_slot),
754 copy_rtx (p->nonlocal_goto_stack_level),
755 label_ref));
756 else
757 #endif
759 rtx addr;
761 /* Restore frame pointer for containing function.
762 This sets the actual hard register used for the frame pointer
763 to the location of the function's incoming static chain info.
764 The non-local goto handler will then adjust it to contain the
765 proper value and reload the argument pointer, if needed. */
766 emit_move_insn (hard_frame_pointer_rtx, lookup_static_chain (label));
768 /* We have now loaded the frame pointer hardware register with
769 the address of that corresponds to the start of the virtual
770 stack vars. So replace virtual_stack_vars_rtx in all
771 addresses we use with stack_pointer_rtx. */
773 /* Get addr of containing function's current nonlocal goto handler,
774 which will do any cleanups and then jump to the label. */
775 addr = copy_rtx (p->nonlocal_goto_handler_slot);
776 temp = copy_to_reg (replace_rtx (addr, virtual_stack_vars_rtx,
777 hard_frame_pointer_rtx));
779 /* Restore the stack pointer. Note this uses fp just restored. */
780 addr = p->nonlocal_goto_stack_level;
781 if (addr)
782 addr = replace_rtx (copy_rtx (addr),
783 virtual_stack_vars_rtx,
784 hard_frame_pointer_rtx);
786 emit_stack_restore (SAVE_NONLOCAL, addr, NULL_RTX);
788 /* Put in the static chain register the nonlocal label address. */
789 emit_move_insn (static_chain_rtx, label_ref);
790 /* USE of hard_frame_pointer_rtx added for consistency; not clear if
791 really needed. */
792 emit_insn (gen_rtx (USE, VOIDmode, hard_frame_pointer_rtx));
793 emit_insn (gen_rtx (USE, VOIDmode, stack_pointer_rtx));
794 emit_insn (gen_rtx (USE, VOIDmode, static_chain_rtx));
795 emit_indirect_jump (temp);
798 else
799 expand_goto_internal (label, label_rtx (label), NULL_RTX);
802 /* Generate RTL code for a `goto' statement with target label BODY.
803 LABEL should be a LABEL_REF.
804 LAST_INSN, if non-0, is the rtx we should consider as the last
805 insn emitted (for the purposes of cleaning up a return). */
807 static void
808 expand_goto_internal (body, label, last_insn)
809 tree body;
810 rtx label;
811 rtx last_insn;
813 struct nesting *block;
814 rtx stack_level = 0;
816 /* NOTICE! If a bytecode instruction other than `jump' is needed,
817 then the caller has to call bc_expand_goto_internal()
818 directly. This is rather an exceptional case, and there aren't
819 that many places where this is necessary. */
820 if (output_bytecode)
822 expand_goto_internal (body, label, last_insn);
823 return;
826 if (GET_CODE (label) != CODE_LABEL)
827 abort ();
829 /* If label has already been defined, we can tell now
830 whether and how we must alter the stack level. */
832 if (PREV_INSN (label) != 0)
834 /* Find the innermost pending block that contains the label.
835 (Check containment by comparing insn-uids.)
836 Then restore the outermost stack level within that block,
837 and do cleanups of all blocks contained in it. */
838 for (block = block_stack; block; block = block->next)
840 if (INSN_UID (block->data.block.first_insn) < INSN_UID (label))
841 break;
842 if (block->data.block.stack_level != 0)
843 stack_level = block->data.block.stack_level;
844 /* Execute the cleanups for blocks we are exiting. */
845 if (block->data.block.cleanups != 0)
847 expand_cleanups (block->data.block.cleanups, NULL_TREE, 1, 1);
848 do_pending_stack_adjust ();
852 if (stack_level)
854 /* Ensure stack adjust isn't done by emit_jump, as this
855 would clobber the stack pointer. This one should be
856 deleted as dead by flow. */
857 clear_pending_stack_adjust ();
858 do_pending_stack_adjust ();
859 emit_stack_restore (SAVE_BLOCK, stack_level, NULL_RTX);
862 if (body != 0 && DECL_TOO_LATE (body))
863 error ("jump to `%s' invalidly jumps into binding contour",
864 IDENTIFIER_POINTER (DECL_NAME (body)));
866 /* Label not yet defined: may need to put this goto
867 on the fixup list. */
868 else if (! expand_fixup (body, label, last_insn))
870 /* No fixup needed. Record that the label is the target
871 of at least one goto that has no fixup. */
872 if (body != 0)
873 TREE_ADDRESSABLE (body) = 1;
876 emit_jump (label);
879 /* Generate a jump with OPCODE to the given bytecode LABEL which is
880 found within BODY. */
882 static void
883 bc_expand_goto_internal (opcode, label, body)
884 enum bytecode_opcode opcode;
885 struct bc_label *label;
886 tree body;
888 struct nesting *block;
889 int stack_level = -1;
891 /* If the label is defined, adjust the stack as necessary.
892 If it's not defined, we have to push the reference on the
893 fixup list. */
895 if (label->defined)
898 /* Find the innermost pending block that contains the label.
899 (Check containment by comparing bytecode uids.) Then restore the
900 outermost stack level within that block. */
902 for (block = block_stack; block; block = block->next)
904 if (BYTECODE_BC_LABEL (block->data.block.first_insn)->uid < label->uid)
905 break;
906 if (block->data.block.bc_stack_level)
907 stack_level = block->data.block.bc_stack_level;
909 /* Execute the cleanups for blocks we are exiting. */
910 if (block->data.block.cleanups != 0)
912 expand_cleanups (block->data.block.cleanups, NULL_TREE, 1, 1);
913 do_pending_stack_adjust ();
917 /* Restore the stack level. If we need to adjust the stack, we
918 must do so after the jump, since the jump may depend on
919 what's on the stack. Thus, any stack-modifying conditional
920 jumps (these are the only ones that rely on what's on the
921 stack) go into the fixup list. */
923 if (stack_level >= 0
924 && stack_depth != stack_level
925 && opcode != jump)
927 bc_expand_fixup (opcode, label, stack_level);
928 else
930 if (stack_level >= 0)
931 bc_adjust_stack (stack_depth - stack_level);
933 if (body && DECL_BIT_FIELD (body))
934 error ("jump to `%s' invalidly jumps into binding contour",
935 IDENTIFIER_POINTER (DECL_NAME (body)));
937 /* Emit immediate jump */
938 bc_emit_bytecode (opcode);
939 bc_emit_bytecode_labelref (label);
941 #ifdef DEBUG_PRINT_CODE
942 fputc ('\n', stderr);
943 #endif
946 else
947 /* Put goto in the fixup list */
948 bc_expand_fixup (opcode, label, stack_level);
951 /* Generate if necessary a fixup for a goto
952 whose target label in tree structure (if any) is TREE_LABEL
953 and whose target in rtl is RTL_LABEL.
955 If LAST_INSN is nonzero, we pretend that the jump appears
956 after insn LAST_INSN instead of at the current point in the insn stream.
958 The fixup will be used later to insert insns just before the goto.
959 Those insns will restore the stack level as appropriate for the
960 target label, and will (in the case of C++) also invoke any object
961 destructors which have to be invoked when we exit the scopes which
962 are exited by the goto.
964 Value is nonzero if a fixup is made. */
966 static int
967 expand_fixup (tree_label, rtl_label, last_insn)
968 tree tree_label;
969 rtx rtl_label;
970 rtx last_insn;
972 struct nesting *block, *end_block;
974 /* See if we can recognize which block the label will be output in.
975 This is possible in some very common cases.
976 If we succeed, set END_BLOCK to that block.
977 Otherwise, set it to 0. */
979 if (cond_stack
980 && (rtl_label == cond_stack->data.cond.endif_label
981 || rtl_label == cond_stack->data.cond.next_label))
982 end_block = cond_stack;
983 /* If we are in a loop, recognize certain labels which
984 are likely targets. This reduces the number of fixups
985 we need to create. */
986 else if (loop_stack
987 && (rtl_label == loop_stack->data.loop.start_label
988 || rtl_label == loop_stack->data.loop.end_label
989 || rtl_label == loop_stack->data.loop.continue_label))
990 end_block = loop_stack;
991 else
992 end_block = 0;
994 /* Now set END_BLOCK to the binding level to which we will return. */
996 if (end_block)
998 struct nesting *next_block = end_block->all;
999 block = block_stack;
1001 /* First see if the END_BLOCK is inside the innermost binding level.
1002 If so, then no cleanups or stack levels are relevant. */
1003 while (next_block && next_block != block)
1004 next_block = next_block->all;
1006 if (next_block)
1007 return 0;
1009 /* Otherwise, set END_BLOCK to the innermost binding level
1010 which is outside the relevant control-structure nesting. */
1011 next_block = block_stack->next;
1012 for (block = block_stack; block != end_block; block = block->all)
1013 if (block == next_block)
1014 next_block = next_block->next;
1015 end_block = next_block;
1018 /* Does any containing block have a stack level or cleanups?
1019 If not, no fixup is needed, and that is the normal case
1020 (the only case, for standard C). */
1021 for (block = block_stack; block != end_block; block = block->next)
1022 if (block->data.block.stack_level != 0
1023 || block->data.block.cleanups != 0)
1024 break;
1026 if (block != end_block)
1028 /* Ok, a fixup is needed. Add a fixup to the list of such. */
1029 struct goto_fixup *fixup
1030 = (struct goto_fixup *) oballoc (sizeof (struct goto_fixup));
1031 /* In case an old stack level is restored, make sure that comes
1032 after any pending stack adjust. */
1033 /* ?? If the fixup isn't to come at the present position,
1034 doing the stack adjust here isn't useful. Doing it with our
1035 settings at that location isn't useful either. Let's hope
1036 someone does it! */
1037 if (last_insn == 0)
1038 do_pending_stack_adjust ();
1039 fixup->target = tree_label;
1040 fixup->target_rtl = rtl_label;
1042 /* Create a BLOCK node and a corresponding matched set of
1043 NOTE_INSN_BEGIN_BLOCK and NOTE_INSN_END_BLOCK notes at
1044 this point. The notes will encapsulate any and all fixup
1045 code which we might later insert at this point in the insn
1046 stream. Also, the BLOCK node will be the parent (i.e. the
1047 `SUPERBLOCK') of any other BLOCK nodes which we might create
1048 later on when we are expanding the fixup code. */
1051 register rtx original_before_jump
1052 = last_insn ? last_insn : get_last_insn ();
1054 start_sequence ();
1055 pushlevel (0);
1056 fixup->before_jump = emit_note (NULL_PTR, NOTE_INSN_BLOCK_BEG);
1057 last_block_end_note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_END);
1058 fixup->context = poplevel (1, 0, 0); /* Create the BLOCK node now! */
1059 end_sequence ();
1060 emit_insns_after (fixup->before_jump, original_before_jump);
1063 fixup->block_start_count = block_start_count;
1064 fixup->stack_level = 0;
1065 fixup->cleanup_list_list
1066 = ((block->data.block.outer_cleanups
1067 || block->data.block.cleanups)
1068 ? tree_cons (NULL_TREE, block->data.block.cleanups,
1069 block->data.block.outer_cleanups)
1070 : 0);
1071 fixup->next = goto_fixup_chain;
1072 goto_fixup_chain = fixup;
1075 return block != 0;
1079 /* Generate bytecode jump with OPCODE to a fixup routine that links to LABEL.
1080 Make the fixup restore the stack level to STACK_LEVEL. */
1082 static void
1083 bc_expand_fixup (opcode, label, stack_level)
1084 enum bytecode_opcode opcode;
1085 struct bc_label *label;
1086 int stack_level;
1088 struct goto_fixup *fixup
1089 = (struct goto_fixup *) oballoc (sizeof (struct goto_fixup));
1091 fixup->label = bc_get_bytecode_label ();
1092 fixup->bc_target = label;
1093 fixup->bc_stack_level = stack_level;
1094 fixup->bc_handled = FALSE;
1096 fixup->next = goto_fixup_chain;
1097 goto_fixup_chain = fixup;
1099 /* Insert a jump to the fixup code */
1100 bc_emit_bytecode (opcode);
1101 bc_emit_bytecode_labelref (fixup->label);
1103 #ifdef DEBUG_PRINT_CODE
1104 fputc ('\n', stderr);
1105 #endif
1108 /* Expand any needed fixups in the outputmost binding level of the
1109 function. FIRST_INSN is the first insn in the function. */
1111 void
1112 expand_fixups (first_insn)
1113 rtx first_insn;
1115 fixup_gotos (NULL_PTR, NULL_RTX, NULL_TREE, first_insn, 0);
1118 /* When exiting a binding contour, process all pending gotos requiring fixups.
1119 THISBLOCK is the structure that describes the block being exited.
1120 STACK_LEVEL is the rtx for the stack level to restore exiting this contour.
1121 CLEANUP_LIST is a list of expressions to evaluate on exiting this contour.
1122 FIRST_INSN is the insn that began this contour.
1124 Gotos that jump out of this contour must restore the
1125 stack level and do the cleanups before actually jumping.
1127 DONT_JUMP_IN nonzero means report error there is a jump into this
1128 contour from before the beginning of the contour.
1129 This is also done if STACK_LEVEL is nonzero. */
1131 static void
1132 fixup_gotos (thisblock, stack_level, cleanup_list, first_insn, dont_jump_in)
1133 struct nesting *thisblock;
1134 rtx stack_level;
1135 tree cleanup_list;
1136 rtx first_insn;
1137 int dont_jump_in;
1139 register struct goto_fixup *f, *prev;
1141 if (output_bytecode)
1143 /* ??? The second arg is the bc stack level, which is not the same
1144 as STACK_LEVEL. I have no idea what should go here, so I'll
1145 just pass 0. */
1146 bc_fixup_gotos (thisblock, 0, cleanup_list, first_insn, dont_jump_in);
1147 return;
1150 /* F is the fixup we are considering; PREV is the previous one. */
1151 /* We run this loop in two passes so that cleanups of exited blocks
1152 are run first, and blocks that are exited are marked so
1153 afterwards. */
1155 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1157 /* Test for a fixup that is inactive because it is already handled. */
1158 if (f->before_jump == 0)
1160 /* Delete inactive fixup from the chain, if that is easy to do. */
1161 if (prev != 0)
1162 prev->next = f->next;
1164 /* Has this fixup's target label been defined?
1165 If so, we can finalize it. */
1166 else if (PREV_INSN (f->target_rtl) != 0)
1168 register rtx cleanup_insns;
1170 /* Get the first non-label after the label
1171 this goto jumps to. If that's before this scope begins,
1172 we don't have a jump into the scope. */
1173 rtx after_label = f->target_rtl;
1174 while (after_label != 0 && GET_CODE (after_label) == CODE_LABEL)
1175 after_label = NEXT_INSN (after_label);
1177 /* If this fixup jumped into this contour from before the beginning
1178 of this contour, report an error. */
1179 /* ??? Bug: this does not detect jumping in through intermediate
1180 blocks that have stack levels or cleanups.
1181 It detects only a problem with the innermost block
1182 around the label. */
1183 if (f->target != 0
1184 && (dont_jump_in || stack_level || cleanup_list)
1185 /* If AFTER_LABEL is 0, it means the jump goes to the end
1186 of the rtl, which means it jumps into this scope. */
1187 && (after_label == 0
1188 || INSN_UID (first_insn) < INSN_UID (after_label))
1189 && INSN_UID (first_insn) > INSN_UID (f->before_jump)
1190 && ! DECL_ERROR_ISSUED (f->target))
1192 error_with_decl (f->target,
1193 "label `%s' used before containing binding contour");
1194 /* Prevent multiple errors for one label. */
1195 DECL_ERROR_ISSUED (f->target) = 1;
1198 /* We will expand the cleanups into a sequence of their own and
1199 then later on we will attach this new sequence to the insn
1200 stream just ahead of the actual jump insn. */
1202 start_sequence ();
1204 /* Temporarily restore the lexical context where we will
1205 logically be inserting the fixup code. We do this for the
1206 sake of getting the debugging information right. */
1208 pushlevel (0);
1209 set_block (f->context);
1211 /* Expand the cleanups for blocks this jump exits. */
1212 if (f->cleanup_list_list)
1214 tree lists;
1215 for (lists = f->cleanup_list_list; lists; lists = TREE_CHAIN (lists))
1216 /* Marked elements correspond to blocks that have been closed.
1217 Do their cleanups. */
1218 if (TREE_ADDRESSABLE (lists)
1219 && TREE_VALUE (lists) != 0)
1221 expand_cleanups (TREE_VALUE (lists), NULL_TREE, 1, 1);
1222 /* Pop any pushes done in the cleanups,
1223 in case function is about to return. */
1224 do_pending_stack_adjust ();
1228 /* Restore stack level for the biggest contour that this
1229 jump jumps out of. */
1230 if (f->stack_level)
1231 emit_stack_restore (SAVE_BLOCK, f->stack_level, f->before_jump);
1233 /* Finish up the sequence containing the insns which implement the
1234 necessary cleanups, and then attach that whole sequence to the
1235 insn stream just ahead of the actual jump insn. Attaching it
1236 at that point insures that any cleanups which are in fact
1237 implicit C++ object destructions (which must be executed upon
1238 leaving the block) appear (to the debugger) to be taking place
1239 in an area of the generated code where the object(s) being
1240 destructed are still "in scope". */
1242 cleanup_insns = get_insns ();
1243 poplevel (1, 0, 0);
1245 end_sequence ();
1246 emit_insns_after (cleanup_insns, f->before_jump);
1249 f->before_jump = 0;
1253 /* For any still-undefined labels, do the cleanups for this block now.
1254 We must do this now since items in the cleanup list may go out
1255 of scope when the block ends. */
1256 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1257 if (f->before_jump != 0
1258 && PREV_INSN (f->target_rtl) == 0
1259 /* Label has still not appeared. If we are exiting a block with
1260 a stack level to restore, that started before the fixup,
1261 mark this stack level as needing restoration
1262 when the fixup is later finalized. */
1263 && thisblock != 0
1264 /* Note: if THISBLOCK == 0 and we have a label that hasn't appeared, it
1265 means the label is undefined. That's erroneous, but possible. */
1266 && (thisblock->data.block.block_start_count
1267 <= f->block_start_count))
1269 tree lists = f->cleanup_list_list;
1270 rtx cleanup_insns;
1272 for (; lists; lists = TREE_CHAIN (lists))
1273 /* If the following elt. corresponds to our containing block
1274 then the elt. must be for this block. */
1275 if (TREE_CHAIN (lists) == thisblock->data.block.outer_cleanups)
1277 start_sequence ();
1278 pushlevel (0);
1279 set_block (f->context);
1280 expand_cleanups (TREE_VALUE (lists), NULL_TREE, 1, 1);
1281 do_pending_stack_adjust ();
1282 cleanup_insns = get_insns ();
1283 poplevel (1, 0, 0);
1284 end_sequence ();
1285 if (cleanup_insns != 0)
1286 f->before_jump
1287 = emit_insns_after (cleanup_insns, f->before_jump);
1289 f->cleanup_list_list = TREE_CHAIN (lists);
1292 if (stack_level)
1293 f->stack_level = stack_level;
1298 /* When exiting a binding contour, process all pending gotos requiring fixups.
1299 Note: STACK_DEPTH is not altered.
1301 The arguments are currently not used in the bytecode compiler, but we may
1302 need them one day for languages other than C.
1304 THISBLOCK is the structure that describes the block being exited.
1305 STACK_LEVEL is the rtx for the stack level to restore exiting this contour.
1306 CLEANUP_LIST is a list of expressions to evaluate on exiting this contour.
1307 FIRST_INSN is the insn that began this contour.
1309 Gotos that jump out of this contour must restore the
1310 stack level and do the cleanups before actually jumping.
1312 DONT_JUMP_IN nonzero means report error there is a jump into this
1313 contour from before the beginning of the contour.
1314 This is also done if STACK_LEVEL is nonzero. */
1316 static void
1317 bc_fixup_gotos (thisblock, stack_level, cleanup_list, first_insn, dont_jump_in)
1318 struct nesting *thisblock;
1319 int stack_level;
1320 tree cleanup_list;
1321 rtx first_insn;
1322 int dont_jump_in;
1324 register struct goto_fixup *f, *prev;
1325 int saved_stack_depth;
1327 /* F is the fixup we are considering; PREV is the previous one. */
1329 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1331 /* Test for a fixup that is inactive because it is already handled. */
1332 if (f->before_jump == 0)
1334 /* Delete inactive fixup from the chain, if that is easy to do. */
1335 if (prev)
1336 prev->next = f->next;
1339 /* Emit code to restore the stack and continue */
1340 bc_emit_bytecode_labeldef (f->label);
1342 /* Save stack_depth across call, since bc_adjust_stack will alter
1343 the perceived stack depth via the instructions generated. */
1345 if (f->bc_stack_level >= 0)
1347 saved_stack_depth = stack_depth;
1348 bc_adjust_stack (stack_depth - f->bc_stack_level);
1349 stack_depth = saved_stack_depth;
1352 bc_emit_bytecode (jump);
1353 bc_emit_bytecode_labelref (f->bc_target);
1355 #ifdef DEBUG_PRINT_CODE
1356 fputc ('\n', stderr);
1357 #endif
1360 goto_fixup_chain = NULL;
1363 /* Generate RTL for an asm statement (explicit assembler code).
1364 BODY is a STRING_CST node containing the assembler code text,
1365 or an ADDR_EXPR containing a STRING_CST. */
1367 void
1368 expand_asm (body)
1369 tree body;
1371 if (output_bytecode)
1373 error ("`asm' is invalid when generating bytecode");
1374 return;
1377 if (flag_check_memory_usage)
1379 error ("`asm' cannot be used with `-fcheck-memory-usage'");
1380 return;
1383 if (TREE_CODE (body) == ADDR_EXPR)
1384 body = TREE_OPERAND (body, 0);
1386 emit_insn (gen_rtx (ASM_INPUT, VOIDmode,
1387 TREE_STRING_POINTER (body)));
1388 last_expr_type = 0;
1391 /* Generate RTL for an asm statement with arguments.
1392 STRING is the instruction template.
1393 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
1394 Each output or input has an expression in the TREE_VALUE and
1395 a constraint-string in the TREE_PURPOSE.
1396 CLOBBERS is a list of STRING_CST nodes each naming a hard register
1397 that is clobbered by this insn.
1399 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
1400 Some elements of OUTPUTS may be replaced with trees representing temporary
1401 values. The caller should copy those temporary values to the originally
1402 specified lvalues.
1404 VOL nonzero means the insn is volatile; don't optimize it. */
1406 void
1407 expand_asm_operands (string, outputs, inputs, clobbers, vol, filename, line)
1408 tree string, outputs, inputs, clobbers;
1409 int vol;
1410 char *filename;
1411 int line;
1413 rtvec argvec, constraints;
1414 rtx body;
1415 int ninputs = list_length (inputs);
1416 int noutputs = list_length (outputs);
1417 int ninout = 0;
1418 int nclobbers;
1419 tree tail;
1420 register int i;
1421 /* Vector of RTX's of evaluated output operands. */
1422 rtx *output_rtx = (rtx *) alloca (noutputs * sizeof (rtx));
1423 int *inout_opnum = (int *) alloca (noutputs * sizeof (int));
1424 enum machine_mode *inout_mode
1425 = (enum machine_mode *) alloca (noutputs * sizeof (enum machine_mode));
1426 /* The insn we have emitted. */
1427 rtx insn;
1429 /* An ASM with no outputs needs to be treated as volatile. */
1430 if (noutputs == 0)
1431 vol = 1;
1433 if (output_bytecode)
1435 error ("`asm' is invalid when generating bytecode");
1436 return;
1439 if (flag_check_memory_usage)
1441 error ("`asm' cannot be used with `-fcheck-memory-usage'");
1442 return;
1445 /* Count the number of meaningful clobbered registers, ignoring what
1446 we would ignore later. */
1447 nclobbers = 0;
1448 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1450 char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1451 i = decode_reg_name (regname);
1452 if (i >= 0 || i == -4)
1453 ++nclobbers;
1454 else if (i == -2)
1455 error ("unknown register name `%s' in `asm'", regname);
1458 last_expr_type = 0;
1460 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1462 tree val = TREE_VALUE (tail);
1463 tree type = TREE_TYPE (val);
1464 tree val1;
1465 int j;
1466 int found_equal = 0;
1467 int found_plus = 0;
1468 int allows_reg = 0;
1470 /* If there's an erroneous arg, emit no insn. */
1471 if (TREE_TYPE (val) == error_mark_node)
1472 return;
1474 /* Make sure constraint has `=' and does not have `+'. Also, see
1475 if it allows any register. Be liberal on the latter test, since
1476 the worst that happens if we get it wrong is we issue an error
1477 message. */
1479 for (j = 0; j < TREE_STRING_LENGTH (TREE_PURPOSE (tail)) - 1; j++)
1480 switch (TREE_STRING_POINTER (TREE_PURPOSE (tail))[j])
1482 case '+':
1483 /* Make sure we can specify the matching operand. */
1484 if (i > 9)
1486 error ("output operand constraint %d contains `+'", i);
1487 return;
1490 /* Replace '+' with '='. */
1491 TREE_STRING_POINTER (TREE_PURPOSE (tail))[j] = '=';
1492 found_plus = 1;
1493 break;
1495 case '=':
1496 found_equal = 1;
1497 break;
1499 case '?': case '!': case '*': case '%': case '&':
1500 case 'V': case 'm': case 'o': case '<': case '>':
1501 case 'E': case 'F': case 'G': case 'H': case 'X':
1502 case 's': case 'i': case 'n':
1503 case 'I': case 'J': case 'K': case 'L': case 'M':
1504 case 'N': case 'O': case 'P': case ',':
1505 #ifdef EXTRA_CONSTRAINT
1506 case 'Q': case 'R': case 'S': case 'T': case 'U':
1507 #endif
1508 break;
1510 case '0': case '1': case '2': case '3': case '4':
1511 case '5': case '6': case '7': case '8': case '9':
1512 error ("matching constraint not valid in output operand");
1513 break;
1515 case 'p': case 'g': case 'r':
1516 default:
1517 allows_reg = 1;
1518 break;
1521 if (! found_equal && ! found_plus)
1523 error ("output operand constraint lacks `='");
1524 return;
1527 /* If an output operand is not a decl or indirect ref and our constraint
1528 allows a register, make a temporary to act as an intermediate.
1529 Make the asm insn write into that, then our caller will copy it to
1530 the real output operand. Likewise for promoted variables. */
1532 if (TREE_CODE (val) == INDIRECT_REF
1533 || (TREE_CODE_CLASS (TREE_CODE (val)) == 'd'
1534 && ! (GET_CODE (DECL_RTL (val)) == REG
1535 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
1536 || ! allows_reg
1537 || found_plus)
1539 if (! allows_reg)
1540 mark_addressable (TREE_VALUE (tail));
1542 output_rtx[i]
1543 = expand_expr (TREE_VALUE (tail), NULL_RTX, VOIDmode,
1544 EXPAND_MEMORY_USE_WO);
1546 if (! allows_reg && GET_CODE (output_rtx[i]) != MEM)
1547 error ("output number %d not directly addressable", i);
1549 else
1551 output_rtx[i] = assign_temp (type, 0, 0, 0);
1552 TREE_VALUE (tail) = make_tree (type, output_rtx[i]);
1555 if (found_plus)
1557 inout_mode[ninout] = TYPE_MODE (TREE_TYPE (TREE_VALUE (tail)));
1558 inout_opnum[ninout++] = i;
1562 ninputs += ninout;
1563 if (ninputs + noutputs > MAX_RECOG_OPERANDS)
1565 error ("more than %d operands in `asm'", MAX_RECOG_OPERANDS);
1566 return;
1569 /* Make vectors for the expression-rtx and constraint strings. */
1571 argvec = rtvec_alloc (ninputs);
1572 constraints = rtvec_alloc (ninputs);
1574 body = gen_rtx (ASM_OPERANDS, VOIDmode,
1575 TREE_STRING_POINTER (string), "", 0, argvec, constraints,
1576 filename, line);
1577 MEM_VOLATILE_P (body) = vol;
1579 /* Eval the inputs and put them into ARGVEC.
1580 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
1582 i = 0;
1583 for (tail = inputs; tail; tail = TREE_CHAIN (tail))
1585 int j;
1586 int allows_reg = 0;
1588 /* If there's an erroneous arg, emit no insn,
1589 because the ASM_INPUT would get VOIDmode
1590 and that could cause a crash in reload. */
1591 if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
1592 return;
1593 if (TREE_PURPOSE (tail) == NULL_TREE)
1595 error ("hard register `%s' listed as input operand to `asm'",
1596 TREE_STRING_POINTER (TREE_VALUE (tail)) );
1597 return;
1600 /* Make sure constraint has neither `=' nor `+'. */
1602 for (j = 0; j < TREE_STRING_LENGTH (TREE_PURPOSE (tail)) - 1; j++)
1603 switch (TREE_STRING_POINTER (TREE_PURPOSE (tail))[j])
1605 case '+': case '=':
1606 error ("input operand constraint contains `%c'",
1607 TREE_STRING_POINTER (TREE_PURPOSE (tail))[j]);
1608 return;
1610 case '?': case '!': case '*': case '%': case '&':
1611 case 'V': case 'm': case 'o': case '<': case '>':
1612 case 'E': case 'F': case 'G': case 'H': case 'X':
1613 case 's': case 'i': case 'n':
1614 case 'I': case 'J': case 'K': case 'L': case 'M':
1615 case 'N': case 'O': case 'P': case ',':
1616 #ifdef EXTRA_CONSTRAINT
1617 case 'Q': case 'R': case 'S': case 'T': case 'U':
1618 #endif
1619 break;
1621 /* Whether or not a numeric constraint allows a register is
1622 decided by the matching constraint, and so there is no need
1623 to do anything special with them. We must handle them in
1624 the default case, so that we don't unnecessarily force
1625 operands to memory. */
1626 case '0': case '1': case '2': case '3': case '4':
1627 case '5': case '6': case '7': case '8': case '9':
1628 if (TREE_STRING_POINTER (TREE_PURPOSE (tail))[j]
1629 >= '0' + noutputs)
1631 error
1632 ("matching constraint references invalid operand number");
1633 return;
1636 /* ... fall through ... */
1638 case 'p': case 'g': case 'r':
1639 default:
1640 allows_reg = 1;
1641 break;
1644 if (! allows_reg)
1645 mark_addressable (TREE_VALUE (tail));
1647 XVECEXP (body, 3, i) /* argvec */
1648 = expand_expr (TREE_VALUE (tail), NULL_RTX, VOIDmode, 0);
1649 if (CONSTANT_P (XVECEXP (body, 3, i))
1650 && ! general_operand (XVECEXP (body, 3, i),
1651 TYPE_MODE (TREE_TYPE (TREE_VALUE (tail)))))
1653 if (allows_reg)
1654 XVECEXP (body, 3, i)
1655 = force_reg (TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))),
1656 XVECEXP (body, 3, i));
1657 else
1658 XVECEXP (body, 3, i)
1659 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))),
1660 XVECEXP (body, 3, i));
1663 if (! allows_reg
1664 && (GET_CODE (XVECEXP (body, 3, i)) == REG
1665 || GET_CODE (XVECEXP (body, 3, i)) == SUBREG
1666 || GET_CODE (XVECEXP (body, 3, i)) == CONCAT))
1668 tree type = TREE_TYPE (TREE_VALUE (tail));
1669 rtx memloc = assign_temp (type, 1, 1, 1);
1671 emit_move_insn (memloc, XVECEXP (body, 3, i));
1672 XVECEXP (body, 3, i) = memloc;
1675 XVECEXP (body, 4, i) /* constraints */
1676 = gen_rtx (ASM_INPUT, TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))),
1677 TREE_STRING_POINTER (TREE_PURPOSE (tail)));
1678 i++;
1681 /* Protect all the operands from the queue,
1682 now that they have all been evaluated. */
1684 for (i = 0; i < ninputs - ninout; i++)
1685 XVECEXP (body, 3, i) = protect_from_queue (XVECEXP (body, 3, i), 0);
1687 for (i = 0; i < noutputs; i++)
1688 output_rtx[i] = protect_from_queue (output_rtx[i], 1);
1690 /* For in-out operands, copy output rtx to input rtx. */
1691 for (i = 0; i < ninout; i++)
1693 static char match[9+1][2]
1694 = {"0", "1", "2", "3", "4", "5", "6", "7", "8", "9"};
1695 int j = inout_opnum[i];
1697 XVECEXP (body, 3, ninputs - ninout + i) /* argvec */
1698 = output_rtx[j];
1699 XVECEXP (body, 4, ninputs - ninout + i) /* constraints */
1700 = gen_rtx (ASM_INPUT, inout_mode[j], match[j]);
1703 /* Now, for each output, construct an rtx
1704 (set OUTPUT (asm_operands INSN OUTPUTNUMBER OUTPUTCONSTRAINT
1705 ARGVEC CONSTRAINTS))
1706 If there is more than one, put them inside a PARALLEL. */
1708 if (noutputs == 1 && nclobbers == 0)
1710 XSTR (body, 1) = TREE_STRING_POINTER (TREE_PURPOSE (outputs));
1711 insn = emit_insn (gen_rtx (SET, VOIDmode, output_rtx[0], body));
1713 else if (noutputs == 0 && nclobbers == 0)
1715 /* No output operands: put in a raw ASM_OPERANDS rtx. */
1716 insn = emit_insn (body);
1718 else
1720 rtx obody = body;
1721 int num = noutputs;
1722 if (num == 0) num = 1;
1723 body = gen_rtx (PARALLEL, VOIDmode, rtvec_alloc (num + nclobbers));
1725 /* For each output operand, store a SET. */
1727 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1729 XVECEXP (body, 0, i)
1730 = gen_rtx (SET, VOIDmode,
1731 output_rtx[i],
1732 gen_rtx (ASM_OPERANDS, VOIDmode,
1733 TREE_STRING_POINTER (string),
1734 TREE_STRING_POINTER (TREE_PURPOSE (tail)),
1735 i, argvec, constraints,
1736 filename, line));
1737 MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
1740 /* If there are no outputs (but there are some clobbers)
1741 store the bare ASM_OPERANDS into the PARALLEL. */
1743 if (i == 0)
1744 XVECEXP (body, 0, i++) = obody;
1746 /* Store (clobber REG) for each clobbered register specified. */
1748 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1750 char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1751 int j = decode_reg_name (regname);
1753 if (j < 0)
1755 if (j == -3) /* `cc', which is not a register */
1756 continue;
1758 if (j == -4) /* `memory', don't cache memory across asm */
1760 XVECEXP (body, 0, i++)
1761 = gen_rtx (CLOBBER, VOIDmode,
1762 gen_rtx (MEM, BLKmode,
1763 gen_rtx (SCRATCH, VOIDmode, 0)));
1764 continue;
1767 /* Ignore unknown register, error already signaled. */
1768 continue;
1771 /* Use QImode since that's guaranteed to clobber just one reg. */
1772 XVECEXP (body, 0, i++)
1773 = gen_rtx (CLOBBER, VOIDmode, gen_rtx (REG, QImode, j));
1776 insn = emit_insn (body);
1779 free_temp_slots ();
1782 /* Generate RTL to evaluate the expression EXP
1783 and remember it in case this is the VALUE in a ({... VALUE; }) constr. */
1785 void
1786 expand_expr_stmt (exp)
1787 tree exp;
1789 if (output_bytecode)
1791 int org_stack_depth = stack_depth;
1793 bc_expand_expr (exp);
1795 /* Restore stack depth */
1796 if (stack_depth < org_stack_depth)
1797 abort ();
1799 bc_emit_instruction (drop);
1801 last_expr_type = TREE_TYPE (exp);
1802 return;
1805 /* If -W, warn about statements with no side effects,
1806 except for an explicit cast to void (e.g. for assert()), and
1807 except inside a ({...}) where they may be useful. */
1808 if (expr_stmts_for_value == 0 && exp != error_mark_node)
1810 if (! TREE_SIDE_EFFECTS (exp) && (extra_warnings || warn_unused)
1811 && !(TREE_CODE (exp) == CONVERT_EXPR
1812 && TREE_TYPE (exp) == void_type_node))
1813 warning_with_file_and_line (emit_filename, emit_lineno,
1814 "statement with no effect");
1815 else if (warn_unused)
1816 warn_if_unused_value (exp);
1819 /* If EXP is of function type and we are expanding statements for
1820 value, convert it to pointer-to-function. */
1821 if (expr_stmts_for_value && TREE_CODE (TREE_TYPE (exp)) == FUNCTION_TYPE)
1822 exp = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (exp)), exp);
1824 last_expr_type = TREE_TYPE (exp);
1825 if (! flag_syntax_only)
1826 last_expr_value = expand_expr (exp,
1827 (expr_stmts_for_value
1828 ? NULL_RTX : const0_rtx),
1829 VOIDmode, 0);
1831 /* If all we do is reference a volatile value in memory,
1832 copy it to a register to be sure it is actually touched. */
1833 if (last_expr_value != 0 && GET_CODE (last_expr_value) == MEM
1834 && TREE_THIS_VOLATILE (exp))
1836 if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode)
1838 else if (TYPE_MODE (TREE_TYPE (exp)) != BLKmode)
1839 copy_to_reg (last_expr_value);
1840 else
1842 rtx lab = gen_label_rtx ();
1844 /* Compare the value with itself to reference it. */
1845 emit_cmp_insn (last_expr_value, last_expr_value, EQ,
1846 expand_expr (TYPE_SIZE (last_expr_type),
1847 NULL_RTX, VOIDmode, 0),
1848 BLKmode, 0,
1849 TYPE_ALIGN (last_expr_type) / BITS_PER_UNIT);
1850 emit_jump_insn ((*bcc_gen_fctn[(int) EQ]) (lab));
1851 emit_label (lab);
1855 /* If this expression is part of a ({...}) and is in memory, we may have
1856 to preserve temporaries. */
1857 preserve_temp_slots (last_expr_value);
1859 /* Free any temporaries used to evaluate this expression. Any temporary
1860 used as a result of this expression will already have been preserved
1861 above. */
1862 free_temp_slots ();
1864 emit_queue ();
1867 /* Warn if EXP contains any computations whose results are not used.
1868 Return 1 if a warning is printed; 0 otherwise. */
1871 warn_if_unused_value (exp)
1872 tree exp;
1874 if (TREE_USED (exp))
1875 return 0;
1877 switch (TREE_CODE (exp))
1879 case PREINCREMENT_EXPR:
1880 case POSTINCREMENT_EXPR:
1881 case PREDECREMENT_EXPR:
1882 case POSTDECREMENT_EXPR:
1883 case MODIFY_EXPR:
1884 case INIT_EXPR:
1885 case TARGET_EXPR:
1886 case CALL_EXPR:
1887 case METHOD_CALL_EXPR:
1888 case RTL_EXPR:
1889 case TRY_CATCH_EXPR:
1890 case WITH_CLEANUP_EXPR:
1891 case EXIT_EXPR:
1892 /* We don't warn about COND_EXPR because it may be a useful
1893 construct if either arm contains a side effect. */
1894 case COND_EXPR:
1895 return 0;
1897 case BIND_EXPR:
1898 /* For a binding, warn if no side effect within it. */
1899 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1901 case SAVE_EXPR:
1902 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1904 case TRUTH_ORIF_EXPR:
1905 case TRUTH_ANDIF_EXPR:
1906 /* In && or ||, warn if 2nd operand has no side effect. */
1907 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1909 case COMPOUND_EXPR:
1910 if (TREE_NO_UNUSED_WARNING (exp))
1911 return 0;
1912 if (warn_if_unused_value (TREE_OPERAND (exp, 0)))
1913 return 1;
1914 /* Let people do `(foo (), 0)' without a warning. */
1915 if (TREE_CONSTANT (TREE_OPERAND (exp, 1)))
1916 return 0;
1917 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1919 case NOP_EXPR:
1920 case CONVERT_EXPR:
1921 case NON_LVALUE_EXPR:
1922 /* Don't warn about values cast to void. */
1923 if (TREE_TYPE (exp) == void_type_node)
1924 return 0;
1925 /* Don't warn about conversions not explicit in the user's program. */
1926 if (TREE_NO_UNUSED_WARNING (exp))
1927 return 0;
1928 /* Assignment to a cast usually results in a cast of a modify.
1929 Don't complain about that. There can be an arbitrary number of
1930 casts before the modify, so we must loop until we find the first
1931 non-cast expression and then test to see if that is a modify. */
1933 tree tem = TREE_OPERAND (exp, 0);
1935 while (TREE_CODE (tem) == CONVERT_EXPR || TREE_CODE (tem) == NOP_EXPR)
1936 tem = TREE_OPERAND (tem, 0);
1938 if (TREE_CODE (tem) == MODIFY_EXPR || TREE_CODE (tem) == INIT_EXPR
1939 || TREE_CODE (tem) == CALL_EXPR)
1940 return 0;
1942 goto warn;
1944 case INDIRECT_REF:
1945 /* Don't warn about automatic dereferencing of references, since
1946 the user cannot control it. */
1947 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == REFERENCE_TYPE)
1948 return warn_if_unused_value (TREE_OPERAND (exp, 0));
1949 /* ... fall through ... */
1951 default:
1952 /* Referencing a volatile value is a side effect, so don't warn. */
1953 if ((TREE_CODE_CLASS (TREE_CODE (exp)) == 'd'
1954 || TREE_CODE_CLASS (TREE_CODE (exp)) == 'r')
1955 && TREE_THIS_VOLATILE (exp))
1956 return 0;
1957 warn:
1958 warning_with_file_and_line (emit_filename, emit_lineno,
1959 "value computed is not used");
1960 return 1;
1964 /* Clear out the memory of the last expression evaluated. */
1966 void
1967 clear_last_expr ()
1969 last_expr_type = 0;
1972 /* Begin a statement which will return a value.
1973 Return the RTL_EXPR for this statement expr.
1974 The caller must save that value and pass it to expand_end_stmt_expr. */
1976 tree
1977 expand_start_stmt_expr ()
1979 int momentary;
1980 tree t;
1982 /* When generating bytecode just note down the stack depth */
1983 if (output_bytecode)
1984 return (build_int_2 (stack_depth, 0));
1986 /* Make the RTL_EXPR node temporary, not momentary,
1987 so that rtl_expr_chain doesn't become garbage. */
1988 momentary = suspend_momentary ();
1989 t = make_node (RTL_EXPR);
1990 resume_momentary (momentary);
1991 do_pending_stack_adjust ();
1992 start_sequence_for_rtl_expr (t);
1993 NO_DEFER_POP;
1994 expr_stmts_for_value++;
1995 return t;
1998 /* Restore the previous state at the end of a statement that returns a value.
1999 Returns a tree node representing the statement's value and the
2000 insns to compute the value.
2002 The nodes of that expression have been freed by now, so we cannot use them.
2003 But we don't want to do that anyway; the expression has already been
2004 evaluated and now we just want to use the value. So generate a RTL_EXPR
2005 with the proper type and RTL value.
2007 If the last substatement was not an expression,
2008 return something with type `void'. */
2010 tree
2011 expand_end_stmt_expr (t)
2012 tree t;
2014 if (output_bytecode)
2016 int i;
2017 tree t;
2020 /* At this point, all expressions have been evaluated in order.
2021 However, all expression values have been popped when evaluated,
2022 which means we have to recover the last expression value. This is
2023 the last value removed by means of a `drop' instruction. Instead
2024 of adding code to inhibit dropping the last expression value, it
2025 is here recovered by undoing the `drop'. Since `drop' is
2026 equivalent to `adjustackSI [1]', it can be undone with `adjstackSI
2027 [-1]'. */
2029 bc_adjust_stack (-1);
2031 if (!last_expr_type)
2032 last_expr_type = void_type_node;
2034 t = make_node (RTL_EXPR);
2035 TREE_TYPE (t) = last_expr_type;
2036 RTL_EXPR_RTL (t) = NULL;
2037 RTL_EXPR_SEQUENCE (t) = NULL;
2039 /* Don't consider deleting this expr or containing exprs at tree level. */
2040 TREE_THIS_VOLATILE (t) = 1;
2042 last_expr_type = 0;
2043 return t;
2046 OK_DEFER_POP;
2048 if (last_expr_type == 0)
2050 last_expr_type = void_type_node;
2051 last_expr_value = const0_rtx;
2053 else if (last_expr_value == 0)
2054 /* There are some cases where this can happen, such as when the
2055 statement is void type. */
2056 last_expr_value = const0_rtx;
2057 else if (GET_CODE (last_expr_value) != REG && ! CONSTANT_P (last_expr_value))
2058 /* Remove any possible QUEUED. */
2059 last_expr_value = protect_from_queue (last_expr_value, 0);
2061 emit_queue ();
2063 TREE_TYPE (t) = last_expr_type;
2064 RTL_EXPR_RTL (t) = last_expr_value;
2065 RTL_EXPR_SEQUENCE (t) = get_insns ();
2067 rtl_expr_chain = tree_cons (NULL_TREE, t, rtl_expr_chain);
2069 end_sequence ();
2071 /* Don't consider deleting this expr or containing exprs at tree level. */
2072 TREE_SIDE_EFFECTS (t) = 1;
2073 /* Propagate volatility of the actual RTL expr. */
2074 TREE_THIS_VOLATILE (t) = volatile_refs_p (last_expr_value);
2076 last_expr_type = 0;
2077 expr_stmts_for_value--;
2079 return t;
2082 /* Generate RTL for the start of an if-then. COND is the expression
2083 whose truth should be tested.
2085 If EXITFLAG is nonzero, this conditional is visible to
2086 `exit_something'. */
2088 void
2089 expand_start_cond (cond, exitflag)
2090 tree cond;
2091 int exitflag;
2093 struct nesting *thiscond = ALLOC_NESTING ();
2095 /* Make an entry on cond_stack for the cond we are entering. */
2097 thiscond->next = cond_stack;
2098 thiscond->all = nesting_stack;
2099 thiscond->depth = ++nesting_depth;
2100 thiscond->data.cond.next_label = gen_label_rtx ();
2101 /* Before we encounter an `else', we don't need a separate exit label
2102 unless there are supposed to be exit statements
2103 to exit this conditional. */
2104 thiscond->exit_label = exitflag ? gen_label_rtx () : 0;
2105 thiscond->data.cond.endif_label = thiscond->exit_label;
2106 cond_stack = thiscond;
2107 nesting_stack = thiscond;
2109 if (output_bytecode)
2110 bc_expand_start_cond (cond, exitflag);
2111 else
2112 do_jump (cond, thiscond->data.cond.next_label, NULL_RTX);
2115 /* Generate RTL between then-clause and the elseif-clause
2116 of an if-then-elseif-.... */
2118 void
2119 expand_start_elseif (cond)
2120 tree cond;
2122 if (cond_stack->data.cond.endif_label == 0)
2123 cond_stack->data.cond.endif_label = gen_label_rtx ();
2124 emit_jump (cond_stack->data.cond.endif_label);
2125 emit_label (cond_stack->data.cond.next_label);
2126 cond_stack->data.cond.next_label = gen_label_rtx ();
2127 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
2130 /* Generate RTL between the then-clause and the else-clause
2131 of an if-then-else. */
2133 void
2134 expand_start_else ()
2136 if (cond_stack->data.cond.endif_label == 0)
2137 cond_stack->data.cond.endif_label = gen_label_rtx ();
2139 if (output_bytecode)
2141 bc_expand_start_else ();
2142 return;
2145 emit_jump (cond_stack->data.cond.endif_label);
2146 emit_label (cond_stack->data.cond.next_label);
2147 cond_stack->data.cond.next_label = 0; /* No more _else or _elseif calls. */
2150 /* After calling expand_start_else, turn this "else" into an "else if"
2151 by providing another condition. */
2153 void
2154 expand_elseif (cond)
2155 tree cond;
2157 cond_stack->data.cond.next_label = gen_label_rtx ();
2158 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
2161 /* Generate RTL for the end of an if-then.
2162 Pop the record for it off of cond_stack. */
2164 void
2165 expand_end_cond ()
2167 struct nesting *thiscond = cond_stack;
2169 if (output_bytecode)
2170 bc_expand_end_cond ();
2171 else
2173 do_pending_stack_adjust ();
2174 if (thiscond->data.cond.next_label)
2175 emit_label (thiscond->data.cond.next_label);
2176 if (thiscond->data.cond.endif_label)
2177 emit_label (thiscond->data.cond.endif_label);
2180 POPSTACK (cond_stack);
2181 last_expr_type = 0;
2185 /* Generate code for the start of an if-then. COND is the expression
2186 whose truth is to be tested; if EXITFLAG is nonzero this conditional
2187 is to be visible to exit_something. It is assumed that the caller
2188 has pushed the previous context on the cond stack. */
2190 static void
2191 bc_expand_start_cond (cond, exitflag)
2192 tree cond;
2193 int exitflag;
2195 struct nesting *thiscond = cond_stack;
2197 thiscond->data.case_stmt.nominal_type = cond;
2198 if (! exitflag)
2199 thiscond->exit_label = gen_label_rtx ();
2200 bc_expand_expr (cond);
2201 bc_emit_bytecode (xjumpifnot);
2202 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (thiscond->exit_label));
2204 #ifdef DEBUG_PRINT_CODE
2205 fputc ('\n', stderr);
2206 #endif
2209 /* Generate the label for the end of an if with
2210 no else- clause. */
2212 static void
2213 bc_expand_end_cond ()
2215 struct nesting *thiscond = cond_stack;
2217 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thiscond->exit_label));
2220 /* Generate code for the start of the else- clause of
2221 an if-then-else. */
2223 static void
2224 bc_expand_start_else ()
2226 struct nesting *thiscond = cond_stack;
2228 thiscond->data.cond.endif_label = thiscond->exit_label;
2229 thiscond->exit_label = gen_label_rtx ();
2230 bc_emit_bytecode (jump);
2231 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (thiscond->exit_label));
2233 #ifdef DEBUG_PRINT_CODE
2234 fputc ('\n', stderr);
2235 #endif
2237 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thiscond->data.cond.endif_label));
2240 /* Generate RTL for the start of a loop. EXIT_FLAG is nonzero if this
2241 loop should be exited by `exit_something'. This is a loop for which
2242 `expand_continue' will jump to the top of the loop.
2244 Make an entry on loop_stack to record the labels associated with
2245 this loop. */
2247 struct nesting *
2248 expand_start_loop (exit_flag)
2249 int exit_flag;
2251 register struct nesting *thisloop = ALLOC_NESTING ();
2253 /* Make an entry on loop_stack for the loop we are entering. */
2255 thisloop->next = loop_stack;
2256 thisloop->all = nesting_stack;
2257 thisloop->depth = ++nesting_depth;
2258 thisloop->data.loop.start_label = gen_label_rtx ();
2259 thisloop->data.loop.end_label = gen_label_rtx ();
2260 thisloop->data.loop.alt_end_label = 0;
2261 thisloop->data.loop.continue_label = thisloop->data.loop.start_label;
2262 thisloop->exit_label = exit_flag ? thisloop->data.loop.end_label : 0;
2263 loop_stack = thisloop;
2264 nesting_stack = thisloop;
2266 if (output_bytecode)
2268 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thisloop->data.loop.start_label));
2269 return thisloop;
2272 do_pending_stack_adjust ();
2273 emit_queue ();
2274 emit_note (NULL_PTR, NOTE_INSN_LOOP_BEG);
2275 emit_label (thisloop->data.loop.start_label);
2277 return thisloop;
2280 /* Like expand_start_loop but for a loop where the continuation point
2281 (for expand_continue_loop) will be specified explicitly. */
2283 struct nesting *
2284 expand_start_loop_continue_elsewhere (exit_flag)
2285 int exit_flag;
2287 struct nesting *thisloop = expand_start_loop (exit_flag);
2288 loop_stack->data.loop.continue_label = gen_label_rtx ();
2289 return thisloop;
2292 /* Specify the continuation point for a loop started with
2293 expand_start_loop_continue_elsewhere.
2294 Use this at the point in the code to which a continue statement
2295 should jump. */
2297 void
2298 expand_loop_continue_here ()
2300 if (output_bytecode)
2302 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (loop_stack->data.loop.continue_label));
2303 return;
2305 do_pending_stack_adjust ();
2306 emit_note (NULL_PTR, NOTE_INSN_LOOP_CONT);
2307 emit_label (loop_stack->data.loop.continue_label);
2310 /* End a loop. */
2312 static void
2313 bc_expand_end_loop ()
2315 struct nesting *thisloop = loop_stack;
2317 bc_emit_bytecode (jump);
2318 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (thisloop->data.loop.start_label));
2320 #ifdef DEBUG_PRINT_CODE
2321 fputc ('\n', stderr);
2322 #endif
2324 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thisloop->exit_label));
2325 POPSTACK (loop_stack);
2326 last_expr_type = 0;
2330 /* Finish a loop. Generate a jump back to the top and the loop-exit label.
2331 Pop the block off of loop_stack. */
2333 void
2334 expand_end_loop ()
2336 register rtx insn;
2337 register rtx start_label;
2338 rtx last_test_insn = 0;
2339 int num_insns = 0;
2341 if (output_bytecode)
2343 bc_expand_end_loop ();
2344 return;
2347 insn = get_last_insn ();
2348 start_label = loop_stack->data.loop.start_label;
2350 /* Mark the continue-point at the top of the loop if none elsewhere. */
2351 if (start_label == loop_stack->data.loop.continue_label)
2352 emit_note_before (NOTE_INSN_LOOP_CONT, start_label);
2354 do_pending_stack_adjust ();
2356 /* If optimizing, perhaps reorder the loop. If the loop
2357 starts with a conditional exit, roll that to the end
2358 where it will optimize together with the jump back.
2360 We look for the last conditional branch to the exit that we encounter
2361 before hitting 30 insns or a CALL_INSN. If we see an unconditional
2362 branch to the exit first, use it.
2364 We must also stop at NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes
2365 because moving them is not valid. */
2367 if (optimize
2369 ! (GET_CODE (insn) == JUMP_INSN
2370 && GET_CODE (PATTERN (insn)) == SET
2371 && SET_DEST (PATTERN (insn)) == pc_rtx
2372 && GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE))
2374 /* Scan insns from the top of the loop looking for a qualified
2375 conditional exit. */
2376 for (insn = NEXT_INSN (loop_stack->data.loop.start_label); insn;
2377 insn = NEXT_INSN (insn))
2379 if (GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == CODE_LABEL)
2380 break;
2382 if (GET_CODE (insn) == NOTE
2383 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2384 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
2385 break;
2387 if (GET_CODE (insn) == JUMP_INSN || GET_CODE (insn) == INSN)
2388 num_insns++;
2390 if (last_test_insn && num_insns > 30)
2391 break;
2393 if (GET_CODE (insn) == JUMP_INSN && GET_CODE (PATTERN (insn)) == SET
2394 && SET_DEST (PATTERN (insn)) == pc_rtx
2395 && GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE
2396 && ((GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 1)) == LABEL_REF
2397 && ((XEXP (XEXP (SET_SRC (PATTERN (insn)), 1), 0)
2398 == loop_stack->data.loop.end_label)
2399 || (XEXP (XEXP (SET_SRC (PATTERN (insn)), 1), 0)
2400 == loop_stack->data.loop.alt_end_label)))
2401 || (GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 2)) == LABEL_REF
2402 && ((XEXP (XEXP (SET_SRC (PATTERN (insn)), 2), 0)
2403 == loop_stack->data.loop.end_label)
2404 || (XEXP (XEXP (SET_SRC (PATTERN (insn)), 2), 0)
2405 == loop_stack->data.loop.alt_end_label)))))
2406 last_test_insn = insn;
2408 if (last_test_insn == 0 && GET_CODE (insn) == JUMP_INSN
2409 && GET_CODE (PATTERN (insn)) == SET
2410 && SET_DEST (PATTERN (insn)) == pc_rtx
2411 && GET_CODE (SET_SRC (PATTERN (insn))) == LABEL_REF
2412 && ((XEXP (SET_SRC (PATTERN (insn)), 0)
2413 == loop_stack->data.loop.end_label)
2414 || (XEXP (SET_SRC (PATTERN (insn)), 0)
2415 == loop_stack->data.loop.alt_end_label)))
2416 /* Include BARRIER. */
2417 last_test_insn = NEXT_INSN (insn);
2420 if (last_test_insn != 0 && last_test_insn != get_last_insn ())
2422 /* We found one. Move everything from there up
2423 to the end of the loop, and add a jump into the loop
2424 to jump to there. */
2425 register rtx newstart_label = gen_label_rtx ();
2426 register rtx start_move = start_label;
2428 /* If the start label is preceded by a NOTE_INSN_LOOP_CONT note,
2429 then we want to move this note also. */
2430 if (GET_CODE (PREV_INSN (start_move)) == NOTE
2431 && (NOTE_LINE_NUMBER (PREV_INSN (start_move))
2432 == NOTE_INSN_LOOP_CONT))
2433 start_move = PREV_INSN (start_move);
2435 emit_label_after (newstart_label, PREV_INSN (start_move));
2436 reorder_insns (start_move, last_test_insn, get_last_insn ());
2437 emit_jump_insn_after (gen_jump (start_label),
2438 PREV_INSN (newstart_label));
2439 emit_barrier_after (PREV_INSN (newstart_label));
2440 start_label = newstart_label;
2444 emit_jump (start_label);
2445 emit_note (NULL_PTR, NOTE_INSN_LOOP_END);
2446 emit_label (loop_stack->data.loop.end_label);
2448 POPSTACK (loop_stack);
2450 last_expr_type = 0;
2453 /* Generate a jump to the current loop's continue-point.
2454 This is usually the top of the loop, but may be specified
2455 explicitly elsewhere. If not currently inside a loop,
2456 return 0 and do nothing; caller will print an error message. */
2459 expand_continue_loop (whichloop)
2460 struct nesting *whichloop;
2462 last_expr_type = 0;
2463 if (whichloop == 0)
2464 whichloop = loop_stack;
2465 if (whichloop == 0)
2466 return 0;
2467 expand_goto_internal (NULL_TREE, whichloop->data.loop.continue_label,
2468 NULL_RTX);
2469 return 1;
2472 /* Generate a jump to exit the current loop. If not currently inside a loop,
2473 return 0 and do nothing; caller will print an error message. */
2476 expand_exit_loop (whichloop)
2477 struct nesting *whichloop;
2479 last_expr_type = 0;
2480 if (whichloop == 0)
2481 whichloop = loop_stack;
2482 if (whichloop == 0)
2483 return 0;
2484 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label, NULL_RTX);
2485 return 1;
2488 /* Generate a conditional jump to exit the current loop if COND
2489 evaluates to zero. If not currently inside a loop,
2490 return 0 and do nothing; caller will print an error message. */
2493 expand_exit_loop_if_false (whichloop, cond)
2494 struct nesting *whichloop;
2495 tree cond;
2497 last_expr_type = 0;
2498 if (whichloop == 0)
2499 whichloop = loop_stack;
2500 if (whichloop == 0)
2501 return 0;
2502 if (output_bytecode)
2504 bc_expand_expr (cond);
2505 bc_expand_goto_internal (xjumpifnot,
2506 BYTECODE_BC_LABEL (whichloop->exit_label),
2507 NULL_TREE);
2509 else
2511 /* In order to handle fixups, we actually create a conditional jump
2512 around a unconditional branch to exit the loop. If fixups are
2513 necessary, they go before the unconditional branch. */
2515 rtx label = gen_label_rtx ();
2516 rtx last_insn;
2518 do_jump (cond, NULL_RTX, label);
2519 last_insn = get_last_insn ();
2520 if (GET_CODE (last_insn) == CODE_LABEL)
2521 whichloop->data.loop.alt_end_label = last_insn;
2522 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label,
2523 NULL_RTX);
2524 emit_label (label);
2527 return 1;
2530 /* Return non-zero if we should preserve sub-expressions as separate
2531 pseudos. We never do so if we aren't optimizing. We always do so
2532 if -fexpensive-optimizations.
2534 Otherwise, we only do so if we are in the "early" part of a loop. I.e.,
2535 the loop may still be a small one. */
2538 preserve_subexpressions_p ()
2540 rtx insn;
2542 if (flag_expensive_optimizations)
2543 return 1;
2545 if (optimize == 0 || loop_stack == 0)
2546 return 0;
2548 insn = get_last_insn_anywhere ();
2550 return (insn
2551 && (INSN_UID (insn) - INSN_UID (loop_stack->data.loop.start_label)
2552 < n_non_fixed_regs * 3));
2556 /* Generate a jump to exit the current loop, conditional, binding contour
2557 or case statement. Not all such constructs are visible to this function,
2558 only those started with EXIT_FLAG nonzero. Individual languages use
2559 the EXIT_FLAG parameter to control which kinds of constructs you can
2560 exit this way.
2562 If not currently inside anything that can be exited,
2563 return 0 and do nothing; caller will print an error message. */
2566 expand_exit_something ()
2568 struct nesting *n;
2569 last_expr_type = 0;
2570 for (n = nesting_stack; n; n = n->all)
2571 if (n->exit_label != 0)
2573 expand_goto_internal (NULL_TREE, n->exit_label, NULL_RTX);
2574 return 1;
2577 return 0;
2580 /* Generate RTL to return from the current function, with no value.
2581 (That is, we do not do anything about returning any value.) */
2583 void
2584 expand_null_return ()
2586 struct nesting *block = block_stack;
2587 rtx last_insn = 0;
2589 if (output_bytecode)
2591 bc_emit_instruction (ret);
2592 return;
2595 /* Does any pending block have cleanups? */
2597 while (block && block->data.block.cleanups == 0)
2598 block = block->next;
2600 /* If yes, use a goto to return, since that runs cleanups. */
2602 expand_null_return_1 (last_insn, block != 0);
2605 /* Generate RTL to return from the current function, with value VAL. */
2607 static void
2608 expand_value_return (val)
2609 rtx val;
2611 struct nesting *block = block_stack;
2612 rtx last_insn = get_last_insn ();
2613 rtx return_reg = DECL_RTL (DECL_RESULT (current_function_decl));
2615 /* Copy the value to the return location
2616 unless it's already there. */
2618 if (return_reg != val)
2620 #ifdef PROMOTE_FUNCTION_RETURN
2621 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
2622 int unsignedp = TREE_UNSIGNED (type);
2623 enum machine_mode mode
2624 = promote_mode (type, DECL_MODE (DECL_RESULT (current_function_decl)),
2625 &unsignedp, 1);
2627 if (GET_MODE (val) != VOIDmode && GET_MODE (val) != mode)
2628 convert_move (return_reg, val, unsignedp);
2629 else
2630 #endif
2631 emit_move_insn (return_reg, val);
2633 if (GET_CODE (return_reg) == REG
2634 && REGNO (return_reg) < FIRST_PSEUDO_REGISTER)
2635 emit_insn (gen_rtx (USE, VOIDmode, return_reg));
2636 /* Handle calls that return values in multiple non-contiguous locations.
2637 The Irix 6 ABI has examples of this. */
2638 else if (GET_CODE (return_reg) == PARALLEL)
2640 int i;
2642 for (i = 0; i < XVECLEN (return_reg, 0); i++)
2644 rtx x = XEXP (XVECEXP (return_reg, 0, i), 0);
2646 if (GET_CODE (x) == REG
2647 && REGNO (x) < FIRST_PSEUDO_REGISTER)
2648 emit_insn (gen_rtx (USE, VOIDmode, x));
2652 /* Does any pending block have cleanups? */
2654 while (block && block->data.block.cleanups == 0)
2655 block = block->next;
2657 /* If yes, use a goto to return, since that runs cleanups.
2658 Use LAST_INSN to put cleanups *before* the move insn emitted above. */
2660 expand_null_return_1 (last_insn, block != 0);
2663 /* Output a return with no value. If LAST_INSN is nonzero,
2664 pretend that the return takes place after LAST_INSN.
2665 If USE_GOTO is nonzero then don't use a return instruction;
2666 go to the return label instead. This causes any cleanups
2667 of pending blocks to be executed normally. */
2669 static void
2670 expand_null_return_1 (last_insn, use_goto)
2671 rtx last_insn;
2672 int use_goto;
2674 rtx end_label = cleanup_label ? cleanup_label : return_label;
2676 clear_pending_stack_adjust ();
2677 do_pending_stack_adjust ();
2678 last_expr_type = 0;
2680 /* PCC-struct return always uses an epilogue. */
2681 if (current_function_returns_pcc_struct || use_goto)
2683 if (end_label == 0)
2684 end_label = return_label = gen_label_rtx ();
2685 expand_goto_internal (NULL_TREE, end_label, last_insn);
2686 return;
2689 /* Otherwise output a simple return-insn if one is available,
2690 unless it won't do the job. */
2691 #ifdef HAVE_return
2692 if (HAVE_return && use_goto == 0 && cleanup_label == 0)
2694 emit_jump_insn (gen_return ());
2695 emit_barrier ();
2696 return;
2698 #endif
2700 /* Otherwise jump to the epilogue. */
2701 expand_goto_internal (NULL_TREE, end_label, last_insn);
2704 /* Generate RTL to evaluate the expression RETVAL and return it
2705 from the current function. */
2707 void
2708 expand_return (retval)
2709 tree retval;
2711 /* If there are any cleanups to be performed, then they will
2712 be inserted following LAST_INSN. It is desirable
2713 that the last_insn, for such purposes, should be the
2714 last insn before computing the return value. Otherwise, cleanups
2715 which call functions can clobber the return value. */
2716 /* ??? rms: I think that is erroneous, because in C++ it would
2717 run destructors on variables that might be used in the subsequent
2718 computation of the return value. */
2719 rtx last_insn = 0;
2720 register rtx val = 0;
2721 register rtx op0;
2722 tree retval_rhs;
2723 int cleanups;
2724 struct nesting *block;
2726 /* Bytecode returns are quite simple, just leave the result on the
2727 arithmetic stack. */
2728 if (output_bytecode)
2730 bc_expand_expr (retval);
2731 bc_emit_instruction (ret);
2732 return;
2735 /* If function wants no value, give it none. */
2736 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
2738 expand_expr (retval, NULL_RTX, VOIDmode, 0);
2739 emit_queue ();
2740 expand_null_return ();
2741 return;
2744 /* Are any cleanups needed? E.g. C++ destructors to be run? */
2745 /* This is not sufficient. We also need to watch for cleanups of the
2746 expression we are about to expand. Unfortunately, we cannot know
2747 if it has cleanups until we expand it, and we want to change how we
2748 expand it depending upon if we need cleanups. We can't win. */
2749 #if 0
2750 cleanups = any_pending_cleanups (1);
2751 #else
2752 cleanups = 1;
2753 #endif
2755 if (TREE_CODE (retval) == RESULT_DECL)
2756 retval_rhs = retval;
2757 else if ((TREE_CODE (retval) == MODIFY_EXPR || TREE_CODE (retval) == INIT_EXPR)
2758 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
2759 retval_rhs = TREE_OPERAND (retval, 1);
2760 else if (TREE_TYPE (retval) == void_type_node)
2761 /* Recognize tail-recursive call to void function. */
2762 retval_rhs = retval;
2763 else
2764 retval_rhs = NULL_TREE;
2766 /* Only use `last_insn' if there are cleanups which must be run. */
2767 if (cleanups || cleanup_label != 0)
2768 last_insn = get_last_insn ();
2770 /* Distribute return down conditional expr if either of the sides
2771 may involve tail recursion (see test below). This enhances the number
2772 of tail recursions we see. Don't do this always since it can produce
2773 sub-optimal code in some cases and we distribute assignments into
2774 conditional expressions when it would help. */
2776 if (optimize && retval_rhs != 0
2777 && frame_offset == 0
2778 && TREE_CODE (retval_rhs) == COND_EXPR
2779 && (TREE_CODE (TREE_OPERAND (retval_rhs, 1)) == CALL_EXPR
2780 || TREE_CODE (TREE_OPERAND (retval_rhs, 2)) == CALL_EXPR))
2782 rtx label = gen_label_rtx ();
2783 tree expr;
2785 do_jump (TREE_OPERAND (retval_rhs, 0), label, NULL_RTX);
2786 expr = build (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (current_function_decl)),
2787 DECL_RESULT (current_function_decl),
2788 TREE_OPERAND (retval_rhs, 1));
2789 TREE_SIDE_EFFECTS (expr) = 1;
2790 expand_return (expr);
2791 emit_label (label);
2793 expr = build (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (current_function_decl)),
2794 DECL_RESULT (current_function_decl),
2795 TREE_OPERAND (retval_rhs, 2));
2796 TREE_SIDE_EFFECTS (expr) = 1;
2797 expand_return (expr);
2798 return;
2801 /* For tail-recursive call to current function,
2802 just jump back to the beginning.
2803 It's unsafe if any auto variable in this function
2804 has its address taken; for simplicity,
2805 require stack frame to be empty. */
2806 if (optimize && retval_rhs != 0
2807 && frame_offset == 0
2808 && TREE_CODE (retval_rhs) == CALL_EXPR
2809 && TREE_CODE (TREE_OPERAND (retval_rhs, 0)) == ADDR_EXPR
2810 && TREE_OPERAND (TREE_OPERAND (retval_rhs, 0), 0) == current_function_decl
2811 /* Finish checking validity, and if valid emit code
2812 to set the argument variables for the new call. */
2813 && tail_recursion_args (TREE_OPERAND (retval_rhs, 1),
2814 DECL_ARGUMENTS (current_function_decl)))
2816 if (tail_recursion_label == 0)
2818 tail_recursion_label = gen_label_rtx ();
2819 emit_label_after (tail_recursion_label,
2820 tail_recursion_reentry);
2822 emit_queue ();
2823 expand_goto_internal (NULL_TREE, tail_recursion_label, last_insn);
2824 emit_barrier ();
2825 return;
2827 #ifdef HAVE_return
2828 /* This optimization is safe if there are local cleanups
2829 because expand_null_return takes care of them.
2830 ??? I think it should also be safe when there is a cleanup label,
2831 because expand_null_return takes care of them, too.
2832 Any reason why not? */
2833 if (HAVE_return && cleanup_label == 0
2834 && ! current_function_returns_pcc_struct
2835 && BRANCH_COST <= 1)
2837 /* If this is return x == y; then generate
2838 if (x == y) return 1; else return 0;
2839 if we can do it with explicit return insns and branches are cheap,
2840 but not if we have the corresponding scc insn. */
2841 int has_scc = 0;
2842 if (retval_rhs)
2843 switch (TREE_CODE (retval_rhs))
2845 case EQ_EXPR:
2846 #ifdef HAVE_seq
2847 has_scc = HAVE_seq;
2848 #endif
2849 case NE_EXPR:
2850 #ifdef HAVE_sne
2851 has_scc = HAVE_sne;
2852 #endif
2853 case GT_EXPR:
2854 #ifdef HAVE_sgt
2855 has_scc = HAVE_sgt;
2856 #endif
2857 case GE_EXPR:
2858 #ifdef HAVE_sge
2859 has_scc = HAVE_sge;
2860 #endif
2861 case LT_EXPR:
2862 #ifdef HAVE_slt
2863 has_scc = HAVE_slt;
2864 #endif
2865 case LE_EXPR:
2866 #ifdef HAVE_sle
2867 has_scc = HAVE_sle;
2868 #endif
2869 case TRUTH_ANDIF_EXPR:
2870 case TRUTH_ORIF_EXPR:
2871 case TRUTH_AND_EXPR:
2872 case TRUTH_OR_EXPR:
2873 case TRUTH_NOT_EXPR:
2874 case TRUTH_XOR_EXPR:
2875 if (! has_scc)
2877 op0 = gen_label_rtx ();
2878 jumpifnot (retval_rhs, op0);
2879 expand_value_return (const1_rtx);
2880 emit_label (op0);
2881 expand_value_return (const0_rtx);
2882 return;
2884 break;
2886 default:
2887 break;
2890 #endif /* HAVE_return */
2892 /* If the result is an aggregate that is being returned in one (or more)
2893 registers, load the registers here. The compiler currently can't handle
2894 copying a BLKmode value into registers. We could put this code in a
2895 more general area (for use by everyone instead of just function
2896 call/return), but until this feature is generally usable it is kept here
2897 (and in expand_call). The value must go into a pseudo in case there
2898 are cleanups that will clobber the real return register. */
2900 if (retval_rhs != 0
2901 && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
2902 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG)
2904 int i, bitpos, xbitpos;
2905 int big_endian_correction = 0;
2906 int bytes = int_size_in_bytes (TREE_TYPE (retval_rhs));
2907 int n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
2908 int bitsize = MIN (TYPE_ALIGN (TREE_TYPE (retval_rhs)),BITS_PER_WORD);
2909 rtx *result_pseudos = (rtx *) alloca (sizeof (rtx) * n_regs);
2910 rtx result_reg, src, dst;
2911 rtx result_val = expand_expr (retval_rhs, NULL_RTX, VOIDmode, 0);
2912 enum machine_mode tmpmode, result_reg_mode;
2914 /* Structures whose size is not a multiple of a word are aligned
2915 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2916 machine, this means we must skip the empty high order bytes when
2917 calculating the bit offset. */
2918 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2919 big_endian_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2920 * BITS_PER_UNIT));
2922 /* Copy the structure BITSIZE bits at a time. */
2923 for (bitpos = 0, xbitpos = big_endian_correction;
2924 bitpos < bytes * BITS_PER_UNIT;
2925 bitpos += bitsize, xbitpos += bitsize)
2927 /* We need a new destination pseudo each time xbitpos is
2928 on a word boundary and when xbitpos == big_endian_correction
2929 (the first time through). */
2930 if (xbitpos % BITS_PER_WORD == 0
2931 || xbitpos == big_endian_correction)
2933 /* Generate an appropriate register. */
2934 dst = gen_reg_rtx (word_mode);
2935 result_pseudos[xbitpos / BITS_PER_WORD] = dst;
2937 /* Clobber the destination before we move anything into it. */
2938 emit_insn (gen_rtx (CLOBBER, VOIDmode, dst));
2941 /* We need a new source operand each time bitpos is on a word
2942 boundary. */
2943 if (bitpos % BITS_PER_WORD == 0)
2944 src = operand_subword_force (result_val,
2945 bitpos / BITS_PER_WORD,
2946 BLKmode);
2948 /* Use bitpos for the source extraction (left justified) and
2949 xbitpos for the destination store (right justified). */
2950 store_bit_field (dst, bitsize, xbitpos % BITS_PER_WORD, word_mode,
2951 extract_bit_field (src, bitsize,
2952 bitpos % BITS_PER_WORD, 1,
2953 NULL_RTX, word_mode,
2954 word_mode,
2955 bitsize / BITS_PER_UNIT,
2956 BITS_PER_WORD),
2957 bitsize / BITS_PER_UNIT, BITS_PER_WORD);
2960 /* Find the smallest integer mode large enough to hold the
2961 entire structure and use that mode instead of BLKmode
2962 on the USE insn for the return register. */
2963 bytes = int_size_in_bytes (TREE_TYPE (retval_rhs));
2964 for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2965 tmpmode != MAX_MACHINE_MODE;
2966 tmpmode = GET_MODE_WIDER_MODE (tmpmode))
2968 /* Have we found a large enough mode? */
2969 if (GET_MODE_SIZE (tmpmode) >= bytes)
2970 break;
2973 /* No suitable mode found. */
2974 if (tmpmode == MAX_MACHINE_MODE)
2975 abort ();
2977 PUT_MODE (DECL_RTL (DECL_RESULT (current_function_decl)), tmpmode);
2979 if (GET_MODE_SIZE (tmpmode) < GET_MODE_SIZE (word_mode))
2980 result_reg_mode = word_mode;
2981 else
2982 result_reg_mode = tmpmode;
2983 result_reg = gen_reg_rtx (result_reg_mode);
2985 emit_queue ();
2986 for (i = 0; i < n_regs; i++)
2987 emit_move_insn (operand_subword (result_reg, i, 0, result_reg_mode),
2988 result_pseudos[i]);
2990 if (tmpmode != result_reg_mode)
2991 result_reg = gen_lowpart (tmpmode, result_reg);
2993 expand_value_return (result_reg);
2995 else if (cleanups
2996 && retval_rhs != 0
2997 && TREE_TYPE (retval_rhs) != void_type_node
2998 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG)
3000 /* Calculate the return value into a pseudo reg. */
3001 val = gen_reg_rtx (DECL_MODE (DECL_RESULT (current_function_decl)));
3002 val = expand_expr (retval_rhs, val, GET_MODE (val), 0);
3003 val = force_not_mem (val);
3004 emit_queue ();
3005 /* Return the calculated value, doing cleanups first. */
3006 expand_value_return (val);
3008 else
3010 /* No cleanups or no hard reg used;
3011 calculate value into hard return reg. */
3012 expand_expr (retval, const0_rtx, VOIDmode, 0);
3013 emit_queue ();
3014 expand_value_return (DECL_RTL (DECL_RESULT (current_function_decl)));
3018 /* Return 1 if the end of the generated RTX is not a barrier.
3019 This means code already compiled can drop through. */
3022 drop_through_at_end_p ()
3024 rtx insn = get_last_insn ();
3025 while (insn && GET_CODE (insn) == NOTE)
3026 insn = PREV_INSN (insn);
3027 return insn && GET_CODE (insn) != BARRIER;
3030 /* Emit code to alter this function's formal parms for a tail-recursive call.
3031 ACTUALS is a list of actual parameter expressions (chain of TREE_LISTs).
3032 FORMALS is the chain of decls of formals.
3033 Return 1 if this can be done;
3034 otherwise return 0 and do not emit any code. */
3036 static int
3037 tail_recursion_args (actuals, formals)
3038 tree actuals, formals;
3040 register tree a = actuals, f = formals;
3041 register int i;
3042 register rtx *argvec;
3044 /* Check that number and types of actuals are compatible
3045 with the formals. This is not always true in valid C code.
3046 Also check that no formal needs to be addressable
3047 and that all formals are scalars. */
3049 /* Also count the args. */
3051 for (a = actuals, f = formals, i = 0; a && f; a = TREE_CHAIN (a), f = TREE_CHAIN (f), i++)
3053 if (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_VALUE (a)))
3054 != TYPE_MAIN_VARIANT (TREE_TYPE (f)))
3055 return 0;
3056 if (GET_CODE (DECL_RTL (f)) != REG || DECL_MODE (f) == BLKmode)
3057 return 0;
3059 if (a != 0 || f != 0)
3060 return 0;
3062 /* Compute all the actuals. */
3064 argvec = (rtx *) alloca (i * sizeof (rtx));
3066 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
3067 argvec[i] = expand_expr (TREE_VALUE (a), NULL_RTX, VOIDmode, 0);
3069 /* Find which actual values refer to current values of previous formals.
3070 Copy each of them now, before any formal is changed. */
3072 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
3074 int copy = 0;
3075 register int j;
3076 for (f = formals, j = 0; j < i; f = TREE_CHAIN (f), j++)
3077 if (reg_mentioned_p (DECL_RTL (f), argvec[i]))
3078 { copy = 1; break; }
3079 if (copy)
3080 argvec[i] = copy_to_reg (argvec[i]);
3083 /* Store the values of the actuals into the formals. */
3085 for (f = formals, a = actuals, i = 0; f;
3086 f = TREE_CHAIN (f), a = TREE_CHAIN (a), i++)
3088 if (GET_MODE (DECL_RTL (f)) == GET_MODE (argvec[i]))
3089 emit_move_insn (DECL_RTL (f), argvec[i]);
3090 else
3091 convert_move (DECL_RTL (f), argvec[i],
3092 TREE_UNSIGNED (TREE_TYPE (TREE_VALUE (a))));
3095 free_temp_slots ();
3096 return 1;
3099 /* Generate the RTL code for entering a binding contour.
3100 The variables are declared one by one, by calls to `expand_decl'.
3102 EXIT_FLAG is nonzero if this construct should be visible to
3103 `exit_something'. */
3105 void
3106 expand_start_bindings (exit_flag)
3107 int exit_flag;
3109 struct nesting *thisblock = ALLOC_NESTING ();
3110 rtx note = output_bytecode ? 0 : emit_note (NULL_PTR, NOTE_INSN_BLOCK_BEG);
3112 /* Make an entry on block_stack for the block we are entering. */
3114 thisblock->next = block_stack;
3115 thisblock->all = nesting_stack;
3116 thisblock->depth = ++nesting_depth;
3117 thisblock->data.block.stack_level = 0;
3118 thisblock->data.block.cleanups = 0;
3119 thisblock->data.block.function_call_count = 0;
3120 thisblock->data.block.exception_region = 0;
3121 thisblock->data.block.target_temp_slot_level = target_temp_slot_level;
3123 thisblock->data.block.conditional_code = 0;
3124 thisblock->data.block.last_unconditional_cleanup = note;
3125 thisblock->data.block.cleanup_ptr = &thisblock->data.block.cleanups;
3127 if (block_stack
3128 && !(block_stack->data.block.cleanups == NULL_TREE
3129 && block_stack->data.block.outer_cleanups == NULL_TREE))
3130 thisblock->data.block.outer_cleanups
3131 = tree_cons (NULL_TREE, block_stack->data.block.cleanups,
3132 block_stack->data.block.outer_cleanups);
3133 else
3134 thisblock->data.block.outer_cleanups = 0;
3135 thisblock->data.block.label_chain = 0;
3136 thisblock->data.block.innermost_stack_block = stack_block_stack;
3137 thisblock->data.block.first_insn = note;
3138 thisblock->data.block.block_start_count = ++block_start_count;
3139 thisblock->exit_label = exit_flag ? gen_label_rtx () : 0;
3140 block_stack = thisblock;
3141 nesting_stack = thisblock;
3143 if (!output_bytecode)
3145 /* Make a new level for allocating stack slots. */
3146 push_temp_slots ();
3150 /* Specify the scope of temporaries created by TARGET_EXPRs. Similar
3151 to CLEANUP_POINT_EXPR, but handles cases when a series of calls to
3152 expand_expr are made. After we end the region, we know that all
3153 space for all temporaries that were created by TARGET_EXPRs will be
3154 destroyed and their space freed for reuse. */
3156 void
3157 expand_start_target_temps ()
3159 /* This is so that even if the result is preserved, the space
3160 allocated will be freed, as we know that it is no longer in use. */
3161 push_temp_slots ();
3163 /* Start a new binding layer that will keep track of all cleanup
3164 actions to be performed. */
3165 expand_start_bindings (0);
3167 target_temp_slot_level = temp_slot_level;
3170 void
3171 expand_end_target_temps ()
3173 expand_end_bindings (NULL_TREE, 0, 0);
3175 /* This is so that even if the result is preserved, the space
3176 allocated will be freed, as we know that it is no longer in use. */
3177 pop_temp_slots ();
3180 /* Mark top block of block_stack as an implicit binding for an
3181 exception region. This is used to prevent infinite recursion when
3182 ending a binding with expand_end_bindings. It is only ever called
3183 by expand_eh_region_start, as that it the only way to create a
3184 block stack for a exception region. */
3186 void
3187 mark_block_as_eh_region ()
3189 block_stack->data.block.exception_region = 1;
3190 if (block_stack->next
3191 && block_stack->next->data.block.conditional_code)
3193 block_stack->data.block.conditional_code
3194 = block_stack->next->data.block.conditional_code;
3195 block_stack->data.block.last_unconditional_cleanup
3196 = block_stack->next->data.block.last_unconditional_cleanup;
3197 block_stack->data.block.cleanup_ptr
3198 = block_stack->next->data.block.cleanup_ptr;
3202 /* True if we are currently emitting insns in an area of output code
3203 that is controlled by a conditional expression. This is used by
3204 the cleanup handling code to generate conditional cleanup actions. */
3207 conditional_context ()
3209 return block_stack && block_stack->data.block.conditional_code;
3212 /* Mark top block of block_stack as not for an implicit binding for an
3213 exception region. This is only ever done by expand_eh_region_end
3214 to let expand_end_bindings know that it is being called explicitly
3215 to end the binding layer for just the binding layer associated with
3216 the exception region, otherwise expand_end_bindings would try and
3217 end all implicit binding layers for exceptions regions, and then
3218 one normal binding layer. */
3220 void
3221 mark_block_as_not_eh_region ()
3223 block_stack->data.block.exception_region = 0;
3226 /* True if the top block of block_stack was marked as for an exception
3227 region by mark_block_as_eh_region. */
3230 is_eh_region ()
3232 return block_stack && block_stack->data.block.exception_region;
3235 /* Given a pointer to a BLOCK node, save a pointer to the most recently
3236 generated NOTE_INSN_BLOCK_END in the BLOCK_END_NOTE field of the given
3237 BLOCK node. */
3239 void
3240 remember_end_note (block)
3241 register tree block;
3243 BLOCK_END_NOTE (block) = last_block_end_note;
3244 last_block_end_note = NULL_RTX;
3247 /* Generate RTL code to terminate a binding contour.
3248 VARS is the chain of VAR_DECL nodes
3249 for the variables bound in this contour.
3250 MARK_ENDS is nonzero if we should put a note at the beginning
3251 and end of this binding contour.
3253 DONT_JUMP_IN is nonzero if it is not valid to jump into this contour.
3254 (That is true automatically if the contour has a saved stack level.) */
3256 void
3257 expand_end_bindings (vars, mark_ends, dont_jump_in)
3258 tree vars;
3259 int mark_ends;
3260 int dont_jump_in;
3262 register struct nesting *thisblock;
3263 register tree decl;
3265 while (block_stack->data.block.exception_region)
3267 /* Because we don't need or want a new temporary level and
3268 because we didn't create one in expand_eh_region_start,
3269 create a fake one now to avoid removing one in
3270 expand_end_bindings. */
3271 push_temp_slots ();
3273 block_stack->data.block.exception_region = 0;
3275 expand_end_bindings (NULL_TREE, 0, 0);
3278 if (output_bytecode)
3280 bc_expand_end_bindings (vars, mark_ends, dont_jump_in);
3281 return;
3284 /* Since expand_eh_region_start does an expand_start_bindings, we
3285 have to first end all the bindings that were created by
3286 expand_eh_region_start. */
3288 thisblock = block_stack;
3290 if (warn_unused)
3291 for (decl = vars; decl; decl = TREE_CHAIN (decl))
3292 if (! TREE_USED (decl) && TREE_CODE (decl) == VAR_DECL
3293 && ! DECL_IN_SYSTEM_HEADER (decl)
3294 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
3295 warning_with_decl (decl, "unused variable `%s'");
3297 if (thisblock->exit_label)
3299 do_pending_stack_adjust ();
3300 emit_label (thisblock->exit_label);
3303 /* If necessary, make a handler for nonlocal gotos taking
3304 place in the function calls in this block. */
3305 if (function_call_count != thisblock->data.block.function_call_count
3306 && nonlocal_labels
3307 /* Make handler for outermost block
3308 if there were any nonlocal gotos to this function. */
3309 && (thisblock->next == 0 ? current_function_has_nonlocal_label
3310 /* Make handler for inner block if it has something
3311 special to do when you jump out of it. */
3312 : (thisblock->data.block.cleanups != 0
3313 || thisblock->data.block.stack_level != 0)))
3315 tree link;
3316 rtx afterward = gen_label_rtx ();
3317 rtx handler_label = gen_label_rtx ();
3318 rtx save_receiver = gen_reg_rtx (Pmode);
3319 rtx insns;
3321 /* Don't let jump_optimize delete the handler. */
3322 LABEL_PRESERVE_P (handler_label) = 1;
3324 /* Record the handler address in the stack slot for that purpose,
3325 during this block, saving and restoring the outer value. */
3326 if (thisblock->next != 0)
3328 emit_move_insn (nonlocal_goto_handler_slot, save_receiver);
3330 start_sequence ();
3331 emit_move_insn (save_receiver, nonlocal_goto_handler_slot);
3332 insns = get_insns ();
3333 end_sequence ();
3334 emit_insns_before (insns, thisblock->data.block.first_insn);
3337 start_sequence ();
3338 emit_move_insn (nonlocal_goto_handler_slot,
3339 gen_rtx (LABEL_REF, Pmode, handler_label));
3340 insns = get_insns ();
3341 end_sequence ();
3342 emit_insns_before (insns, thisblock->data.block.first_insn);
3344 /* Jump around the handler; it runs only when specially invoked. */
3345 emit_jump (afterward);
3346 emit_label (handler_label);
3348 #ifdef HAVE_nonlocal_goto
3349 if (! HAVE_nonlocal_goto)
3350 #endif
3351 /* First adjust our frame pointer to its actual value. It was
3352 previously set to the start of the virtual area corresponding to
3353 the stacked variables when we branched here and now needs to be
3354 adjusted to the actual hardware fp value.
3356 Assignments are to virtual registers are converted by
3357 instantiate_virtual_regs into the corresponding assignment
3358 to the underlying register (fp in this case) that makes
3359 the original assignment true.
3360 So the following insn will actually be
3361 decrementing fp by STARTING_FRAME_OFFSET. */
3362 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
3364 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3365 if (fixed_regs[ARG_POINTER_REGNUM])
3367 #ifdef ELIMINABLE_REGS
3368 /* If the argument pointer can be eliminated in favor of the
3369 frame pointer, we don't need to restore it. We assume here
3370 that if such an elimination is present, it can always be used.
3371 This is the case on all known machines; if we don't make this
3372 assumption, we do unnecessary saving on many machines. */
3373 static struct elims {int from, to;} elim_regs[] = ELIMINABLE_REGS;
3374 int i;
3376 for (i = 0; i < sizeof elim_regs / sizeof elim_regs[0]; i++)
3377 if (elim_regs[i].from == ARG_POINTER_REGNUM
3378 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
3379 break;
3381 if (i == sizeof elim_regs / sizeof elim_regs [0])
3382 #endif
3384 /* Now restore our arg pointer from the address at which it
3385 was saved in our stack frame.
3386 If there hasn't be space allocated for it yet, make
3387 some now. */
3388 if (arg_pointer_save_area == 0)
3389 arg_pointer_save_area
3390 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
3391 emit_move_insn (virtual_incoming_args_rtx,
3392 /* We need a pseudo here, or else
3393 instantiate_virtual_regs_1 complains. */
3394 copy_to_reg (arg_pointer_save_area));
3397 #endif
3399 #ifdef HAVE_nonlocal_goto_receiver
3400 if (HAVE_nonlocal_goto_receiver)
3401 emit_insn (gen_nonlocal_goto_receiver ());
3402 #endif
3404 /* The handler expects the desired label address in the static chain
3405 register. It tests the address and does an appropriate jump
3406 to whatever label is desired. */
3407 for (link = nonlocal_labels; link; link = TREE_CHAIN (link))
3408 /* Skip any labels we shouldn't be able to jump to from here. */
3409 if (! DECL_TOO_LATE (TREE_VALUE (link)))
3411 rtx not_this = gen_label_rtx ();
3412 rtx this = gen_label_rtx ();
3413 do_jump_if_equal (static_chain_rtx,
3414 gen_rtx (LABEL_REF, Pmode, DECL_RTL (TREE_VALUE (link))),
3415 this, 0);
3416 emit_jump (not_this);
3417 emit_label (this);
3418 expand_goto (TREE_VALUE (link));
3419 emit_label (not_this);
3421 /* If label is not recognized, abort. */
3422 emit_library_call (gen_rtx (SYMBOL_REF, Pmode, "abort"), 0,
3423 VOIDmode, 0);
3424 emit_barrier ();
3425 emit_label (afterward);
3428 /* Don't allow jumping into a block that has a stack level.
3429 Cleanups are allowed, though. */
3430 if (dont_jump_in
3431 || thisblock->data.block.stack_level != 0)
3433 struct label_chain *chain;
3435 /* Any labels in this block are no longer valid to go to.
3436 Mark them to cause an error message. */
3437 for (chain = thisblock->data.block.label_chain; chain; chain = chain->next)
3439 DECL_TOO_LATE (chain->label) = 1;
3440 /* If any goto without a fixup came to this label,
3441 that must be an error, because gotos without fixups
3442 come from outside all saved stack-levels. */
3443 if (TREE_ADDRESSABLE (chain->label))
3444 error_with_decl (chain->label,
3445 "label `%s' used before containing binding contour");
3449 /* Restore stack level in effect before the block
3450 (only if variable-size objects allocated). */
3451 /* Perform any cleanups associated with the block. */
3453 if (thisblock->data.block.stack_level != 0
3454 || thisblock->data.block.cleanups != 0)
3456 /* Only clean up here if this point can actually be reached. */
3457 int reachable = GET_CODE (get_last_insn ()) != BARRIER;
3459 /* Don't let cleanups affect ({...}) constructs. */
3460 int old_expr_stmts_for_value = expr_stmts_for_value;
3461 rtx old_last_expr_value = last_expr_value;
3462 tree old_last_expr_type = last_expr_type;
3463 expr_stmts_for_value = 0;
3465 /* Do the cleanups. */
3466 expand_cleanups (thisblock->data.block.cleanups, NULL_TREE, 0, reachable);
3467 if (reachable)
3468 do_pending_stack_adjust ();
3470 expr_stmts_for_value = old_expr_stmts_for_value;
3471 last_expr_value = old_last_expr_value;
3472 last_expr_type = old_last_expr_type;
3474 /* Restore the stack level. */
3476 if (reachable && thisblock->data.block.stack_level != 0)
3478 emit_stack_restore (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3479 thisblock->data.block.stack_level, NULL_RTX);
3480 if (nonlocal_goto_handler_slot != 0)
3481 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level,
3482 NULL_RTX);
3485 /* Any gotos out of this block must also do these things.
3486 Also report any gotos with fixups that came to labels in this
3487 level. */
3488 fixup_gotos (thisblock,
3489 thisblock->data.block.stack_level,
3490 thisblock->data.block.cleanups,
3491 thisblock->data.block.first_insn,
3492 dont_jump_in);
3495 /* Mark the beginning and end of the scope if requested.
3496 We do this now, after running cleanups on the variables
3497 just going out of scope, so they are in scope for their cleanups. */
3499 if (mark_ends)
3500 last_block_end_note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_END);
3501 else
3502 /* Get rid of the beginning-mark if we don't make an end-mark. */
3503 NOTE_LINE_NUMBER (thisblock->data.block.first_insn) = NOTE_INSN_DELETED;
3505 /* If doing stupid register allocation, make sure lives of all
3506 register variables declared here extend thru end of scope. */
3508 if (obey_regdecls)
3509 for (decl = vars; decl; decl = TREE_CHAIN (decl))
3511 rtx rtl = DECL_RTL (decl);
3512 if (TREE_CODE (decl) == VAR_DECL && rtl != 0)
3513 use_variable (rtl);
3516 /* Restore the temporary level of TARGET_EXPRs. */
3517 target_temp_slot_level = thisblock->data.block.target_temp_slot_level;
3519 /* Restore block_stack level for containing block. */
3521 stack_block_stack = thisblock->data.block.innermost_stack_block;
3522 POPSTACK (block_stack);
3524 /* Pop the stack slot nesting and free any slots at this level. */
3525 pop_temp_slots ();
3529 /* End a binding contour.
3530 VARS is the chain of VAR_DECL nodes for the variables bound
3531 in this contour. MARK_ENDS is nonzer if we should put a note
3532 at the beginning and end of this binding contour.
3533 DONT_JUMP_IN is nonzero if it is not valid to jump into this
3534 contour. */
3536 static void
3537 bc_expand_end_bindings (vars, mark_ends, dont_jump_in)
3538 tree vars;
3539 int mark_ends;
3540 int dont_jump_in;
3542 struct nesting *thisbind = nesting_stack;
3543 tree decl;
3545 if (warn_unused)
3546 for (decl = vars; decl; decl = TREE_CHAIN (decl))
3547 if (! TREE_USED (TREE_VALUE (decl)) && TREE_CODE (TREE_VALUE (decl)) == VAR_DECL)
3548 warning_with_decl (decl, "unused variable `%s'");
3550 if (thisbind->exit_label)
3551 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thisbind->exit_label));
3553 /* Pop block/bindings off stack */
3554 POPSTACK (block_stack);
3557 /* Generate RTL for the automatic variable declaration DECL.
3558 (Other kinds of declarations are simply ignored if seen here.) */
3560 void
3561 expand_decl (decl)
3562 register tree decl;
3564 struct nesting *thisblock = block_stack;
3565 tree type;
3567 if (output_bytecode)
3569 bc_expand_decl (decl, 0);
3570 return;
3573 type = TREE_TYPE (decl);
3575 /* Only automatic variables need any expansion done.
3576 Static and external variables, and external functions,
3577 will be handled by `assemble_variable' (called from finish_decl).
3578 TYPE_DECL and CONST_DECL require nothing.
3579 PARM_DECLs are handled in `assign_parms'. */
3581 if (TREE_CODE (decl) != VAR_DECL)
3582 return;
3583 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
3584 return;
3586 /* Create the RTL representation for the variable. */
3588 if (type == error_mark_node)
3589 DECL_RTL (decl) = gen_rtx (MEM, BLKmode, const0_rtx);
3590 else if (DECL_SIZE (decl) == 0)
3591 /* Variable with incomplete type. */
3593 if (DECL_INITIAL (decl) == 0)
3594 /* Error message was already done; now avoid a crash. */
3595 DECL_RTL (decl) = assign_stack_temp (DECL_MODE (decl), 0, 1);
3596 else
3597 /* An initializer is going to decide the size of this array.
3598 Until we know the size, represent its address with a reg. */
3599 DECL_RTL (decl) = gen_rtx (MEM, BLKmode, gen_reg_rtx (Pmode));
3600 MEM_IN_STRUCT_P (DECL_RTL (decl)) = AGGREGATE_TYPE_P (type);
3602 else if (DECL_MODE (decl) != BLKmode
3603 /* If -ffloat-store, don't put explicit float vars
3604 into regs. */
3605 && !(flag_float_store
3606 && TREE_CODE (type) == REAL_TYPE)
3607 && ! TREE_THIS_VOLATILE (decl)
3608 && ! TREE_ADDRESSABLE (decl)
3609 && (DECL_REGISTER (decl) || ! obey_regdecls))
3611 /* Automatic variable that can go in a register. */
3612 int unsignedp = TREE_UNSIGNED (type);
3613 enum machine_mode reg_mode
3614 = promote_mode (type, DECL_MODE (decl), &unsignedp, 0);
3616 DECL_RTL (decl) = gen_reg_rtx (reg_mode);
3617 mark_user_reg (DECL_RTL (decl));
3619 if (TREE_CODE (type) == POINTER_TYPE)
3620 mark_reg_pointer (DECL_RTL (decl),
3621 (TYPE_ALIGN (TREE_TYPE (TREE_TYPE (decl)))
3622 / BITS_PER_UNIT));
3625 else if (TREE_CODE (DECL_SIZE (decl)) == INTEGER_CST
3626 && ! (flag_stack_check && ! STACK_CHECK_BUILTIN
3627 && (TREE_INT_CST_HIGH (DECL_SIZE (decl)) != 0
3628 || (TREE_INT_CST_LOW (DECL_SIZE (decl))
3629 > STACK_CHECK_MAX_VAR_SIZE * BITS_PER_UNIT))))
3631 /* Variable of fixed size that goes on the stack. */
3632 rtx oldaddr = 0;
3633 rtx addr;
3635 /* If we previously made RTL for this decl, it must be an array
3636 whose size was determined by the initializer.
3637 The old address was a register; set that register now
3638 to the proper address. */
3639 if (DECL_RTL (decl) != 0)
3641 if (GET_CODE (DECL_RTL (decl)) != MEM
3642 || GET_CODE (XEXP (DECL_RTL (decl), 0)) != REG)
3643 abort ();
3644 oldaddr = XEXP (DECL_RTL (decl), 0);
3647 DECL_RTL (decl)
3648 = assign_stack_temp (DECL_MODE (decl),
3649 ((TREE_INT_CST_LOW (DECL_SIZE (decl))
3650 + BITS_PER_UNIT - 1)
3651 / BITS_PER_UNIT),
3653 MEM_IN_STRUCT_P (DECL_RTL (decl)) = AGGREGATE_TYPE_P (TREE_TYPE (decl));
3655 /* Set alignment we actually gave this decl. */
3656 DECL_ALIGN (decl) = (DECL_MODE (decl) == BLKmode ? BIGGEST_ALIGNMENT
3657 : GET_MODE_BITSIZE (DECL_MODE (decl)));
3659 if (oldaddr)
3661 addr = force_operand (XEXP (DECL_RTL (decl), 0), oldaddr);
3662 if (addr != oldaddr)
3663 emit_move_insn (oldaddr, addr);
3666 /* If this is a memory ref that contains aggregate components,
3667 mark it as such for cse and loop optimize. */
3668 MEM_IN_STRUCT_P (DECL_RTL (decl)) = AGGREGATE_TYPE_P (TREE_TYPE (decl));
3669 #if 0
3670 /* If this is in memory because of -ffloat-store,
3671 set the volatile bit, to prevent optimizations from
3672 undoing the effects. */
3673 if (flag_float_store && TREE_CODE (type) == REAL_TYPE)
3674 MEM_VOLATILE_P (DECL_RTL (decl)) = 1;
3675 #endif
3677 else
3678 /* Dynamic-size object: must push space on the stack. */
3680 rtx address, size;
3682 /* Record the stack pointer on entry to block, if have
3683 not already done so. */
3684 if (thisblock->data.block.stack_level == 0)
3686 do_pending_stack_adjust ();
3687 emit_stack_save (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3688 &thisblock->data.block.stack_level,
3689 thisblock->data.block.first_insn);
3690 stack_block_stack = thisblock;
3693 /* Compute the variable's size, in bytes. */
3694 size = expand_expr (size_binop (CEIL_DIV_EXPR,
3695 DECL_SIZE (decl),
3696 size_int (BITS_PER_UNIT)),
3697 NULL_RTX, VOIDmode, 0);
3698 free_temp_slots ();
3700 /* Allocate space on the stack for the variable. Note that
3701 DECL_ALIGN says how the variable is to be aligned and we
3702 cannot use it to conclude anything about the alignment of
3703 the size. */
3704 address = allocate_dynamic_stack_space (size, NULL_RTX,
3705 TYPE_ALIGN (TREE_TYPE (decl)));
3707 /* Reference the variable indirect through that rtx. */
3708 DECL_RTL (decl) = gen_rtx (MEM, DECL_MODE (decl), address);
3710 /* If this is a memory ref that contains aggregate components,
3711 mark it as such for cse and loop optimize. */
3712 MEM_IN_STRUCT_P (DECL_RTL (decl)) = AGGREGATE_TYPE_P (TREE_TYPE (decl));
3714 /* Indicate the alignment we actually gave this variable. */
3715 #ifdef STACK_BOUNDARY
3716 DECL_ALIGN (decl) = STACK_BOUNDARY;
3717 #else
3718 DECL_ALIGN (decl) = BIGGEST_ALIGNMENT;
3719 #endif
3722 if (TREE_THIS_VOLATILE (decl))
3723 MEM_VOLATILE_P (DECL_RTL (decl)) = 1;
3724 #if 0 /* A variable is not necessarily unchanging
3725 just because it is const. RTX_UNCHANGING_P
3726 means no change in the function,
3727 not merely no change in the variable's scope.
3728 It is correct to set RTX_UNCHANGING_P if the variable's scope
3729 is the whole function. There's no convenient way to test that. */
3730 if (TREE_READONLY (decl))
3731 RTX_UNCHANGING_P (DECL_RTL (decl)) = 1;
3732 #endif
3734 /* If doing stupid register allocation, make sure life of any
3735 register variable starts here, at the start of its scope. */
3737 if (obey_regdecls)
3738 use_variable (DECL_RTL (decl));
3742 /* Generate code for the automatic variable declaration DECL. For
3743 most variables this just means we give it a stack offset. The
3744 compiler sometimes emits cleanups without variables and we will
3745 have to deal with those too. */
3747 static void
3748 bc_expand_decl (decl, cleanup)
3749 tree decl;
3750 tree cleanup;
3752 tree type;
3754 if (!decl)
3756 /* A cleanup with no variable. */
3757 if (!cleanup)
3758 abort ();
3760 return;
3763 /* Only auto variables need any work. */
3764 if (TREE_CODE (decl) != VAR_DECL || TREE_STATIC (decl) || DECL_EXTERNAL (decl))
3765 return;
3767 type = TREE_TYPE (decl);
3769 if (type == error_mark_node)
3770 DECL_RTL (decl) = bc_gen_rtx ((char *) 0, 0, (struct bc_label *) 0);
3772 else if (DECL_SIZE (decl) == 0)
3774 /* Variable with incomplete type. The stack offset herein will be
3775 fixed later in expand_decl_init. */
3776 DECL_RTL (decl) = bc_gen_rtx ((char *) 0, 0, (struct bc_label *) 0);
3778 else if (TREE_CONSTANT (DECL_SIZE (decl)))
3780 DECL_RTL (decl) = bc_allocate_local (TREE_INT_CST_LOW (DECL_SIZE (decl)) / BITS_PER_UNIT,
3781 DECL_ALIGN (decl));
3783 else
3784 DECL_RTL (decl) = bc_allocate_variable_array (DECL_SIZE (decl));
3787 /* Emit code to perform the initialization of a declaration DECL. */
3789 void
3790 expand_decl_init (decl)
3791 tree decl;
3793 int was_used = TREE_USED (decl);
3795 if (output_bytecode)
3797 bc_expand_decl_init (decl);
3798 return;
3801 /* If this is a CONST_DECL, we don't have to generate any code, but
3802 if DECL_INITIAL is a constant, call expand_expr to force TREE_CST_RTL
3803 to be set while in the obstack containing the constant. If we don't
3804 do this, we can lose if we have functions nested three deep and the middle
3805 function makes a CONST_DECL whose DECL_INITIAL is a STRING_CST while
3806 the innermost function is the first to expand that STRING_CST. */
3807 if (TREE_CODE (decl) == CONST_DECL)
3809 if (DECL_INITIAL (decl) && TREE_CONSTANT (DECL_INITIAL (decl)))
3810 expand_expr (DECL_INITIAL (decl), NULL_RTX, VOIDmode,
3811 EXPAND_INITIALIZER);
3812 return;
3815 if (TREE_STATIC (decl))
3816 return;
3818 /* Compute and store the initial value now. */
3820 if (DECL_INITIAL (decl) == error_mark_node)
3822 enum tree_code code = TREE_CODE (TREE_TYPE (decl));
3823 if (code == INTEGER_TYPE || code == REAL_TYPE || code == ENUMERAL_TYPE
3824 || code == POINTER_TYPE)
3825 expand_assignment (decl, convert (TREE_TYPE (decl), integer_zero_node),
3826 0, 0);
3827 emit_queue ();
3829 else if (DECL_INITIAL (decl) && TREE_CODE (DECL_INITIAL (decl)) != TREE_LIST)
3831 emit_line_note (DECL_SOURCE_FILE (decl), DECL_SOURCE_LINE (decl));
3832 expand_assignment (decl, DECL_INITIAL (decl), 0, 0);
3833 emit_queue ();
3836 /* Don't let the initialization count as "using" the variable. */
3837 TREE_USED (decl) = was_used;
3839 /* Free any temporaries we made while initializing the decl. */
3840 preserve_temp_slots (NULL_RTX);
3841 free_temp_slots ();
3844 /* Expand initialization for variable-sized types. Allocate array
3845 using newlocalSI and set local variable, which is a pointer to the
3846 storage. */
3848 static void
3849 bc_expand_variable_local_init (decl)
3850 tree decl;
3852 /* Evaluate size expression and coerce to SI */
3853 bc_expand_expr (DECL_SIZE (decl));
3855 /* Type sizes are always (?) of TREE_CODE INTEGER_CST, so
3856 no coercion is necessary (?) */
3858 /* emit_typecode_conversion (preferred_typecode (TYPE_MODE (DECL_SIZE (decl)),
3859 TREE_UNSIGNED (DECL_SIZE (decl))), SIcode); */
3861 /* Emit code to allocate array */
3862 bc_emit_instruction (newlocalSI);
3864 /* Store array pointer in local variable. This is the only instance
3865 where we actually want the address of the pointer to the
3866 variable-size block, rather than the pointer itself. We avoid
3867 using expand_address() since that would cause the pointer to be
3868 pushed rather than its address. Hence the hard-coded reference;
3869 notice also that the variable is always local (no global
3870 variable-size type variables). */
3872 bc_load_localaddr (DECL_RTL (decl));
3873 bc_emit_instruction (storeP);
3877 /* Emit code to initialize a declaration. */
3879 static void
3880 bc_expand_decl_init (decl)
3881 tree decl;
3883 int org_stack_depth;
3885 /* Statical initializers are handled elsewhere */
3887 if (TREE_STATIC (decl))
3888 return;
3890 /* Memory original stack depth */
3891 org_stack_depth = stack_depth;
3893 /* If the type is variable-size, we first create its space (we ASSUME
3894 it CAN'T be static). We do this regardless of whether there's an
3895 initializer assignment or not. */
3897 if (TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
3898 bc_expand_variable_local_init (decl);
3900 /* Expand initializer assignment */
3901 if (DECL_INITIAL (decl) == error_mark_node)
3903 enum tree_code code = TREE_CODE (TREE_TYPE (decl));
3905 if (code == INTEGER_TYPE || code == REAL_TYPE || code == ENUMERAL_TYPE
3906 || code == POINTER_TYPE)
3908 expand_assignment (TREE_TYPE (decl), decl, 0, 0);
3910 else if (DECL_INITIAL (decl))
3911 expand_assignment (TREE_TYPE (decl), decl, 0, 0);
3913 /* Restore stack depth */
3914 if (org_stack_depth > stack_depth)
3915 abort ();
3917 bc_adjust_stack (stack_depth - org_stack_depth);
3921 /* CLEANUP is an expression to be executed at exit from this binding contour;
3922 for example, in C++, it might call the destructor for this variable.
3924 We wrap CLEANUP in an UNSAVE_EXPR node, so that we can expand the
3925 CLEANUP multiple times, and have the correct semantics. This
3926 happens in exception handling, for gotos, returns, breaks that
3927 leave the current scope.
3929 If CLEANUP is nonzero and DECL is zero, we record a cleanup
3930 that is not associated with any particular variable. */
3933 expand_decl_cleanup (decl, cleanup)
3934 tree decl, cleanup;
3936 struct nesting *thisblock = block_stack;
3938 /* Error if we are not in any block. */
3939 if (thisblock == 0)
3940 return 0;
3942 /* Record the cleanup if there is one. */
3944 if (cleanup != 0)
3946 tree t;
3947 rtx seq;
3948 tree *cleanups = &thisblock->data.block.cleanups;
3949 int cond_context = conditional_context ();
3951 if (cond_context)
3953 rtx flag = gen_reg_rtx (word_mode);
3954 rtx set_flag_0;
3955 tree cond;
3957 start_sequence ();
3958 emit_move_insn (flag, const0_rtx);
3959 set_flag_0 = get_insns ();
3960 end_sequence ();
3962 thisblock->data.block.last_unconditional_cleanup
3963 = emit_insns_after (set_flag_0,
3964 thisblock->data.block.last_unconditional_cleanup);
3966 emit_move_insn (flag, const1_rtx);
3968 /* All cleanups must be on the function_obstack. */
3969 push_obstacks_nochange ();
3970 resume_temporary_allocation ();
3972 cond = build_decl (VAR_DECL, NULL_TREE, type_for_mode (word_mode, 1));
3973 DECL_RTL (cond) = flag;
3975 /* Conditionalize the cleanup. */
3976 cleanup = build (COND_EXPR, void_type_node,
3977 truthvalue_conversion (cond),
3978 cleanup, integer_zero_node);
3979 cleanup = fold (cleanup);
3981 pop_obstacks ();
3983 cleanups = thisblock->data.block.cleanup_ptr;
3986 /* All cleanups must be on the function_obstack. */
3987 push_obstacks_nochange ();
3988 resume_temporary_allocation ();
3989 cleanup = unsave_expr (cleanup);
3990 pop_obstacks ();
3992 t = *cleanups = temp_tree_cons (decl, cleanup, *cleanups);
3994 if (! cond_context)
3995 /* If this block has a cleanup, it belongs in stack_block_stack. */
3996 stack_block_stack = thisblock;
3998 if (cond_context)
4000 start_sequence ();
4003 /* If this was optimized so that there is no exception region for the
4004 cleanup, then mark the TREE_LIST node, so that we can later tell
4005 if we need to call expand_eh_region_end. */
4006 if (! using_eh_for_cleanups_p
4007 || expand_eh_region_start_tree (decl, cleanup))
4008 TREE_ADDRESSABLE (t) = 1;
4009 /* If that started a new EH region, we're in a new block. */
4010 thisblock = block_stack;
4012 if (cond_context)
4014 seq = get_insns ();
4015 end_sequence ();
4016 if (seq)
4017 thisblock->data.block.last_unconditional_cleanup
4018 = emit_insns_after (seq,
4019 thisblock->data.block.last_unconditional_cleanup);
4021 else
4023 thisblock->data.block.last_unconditional_cleanup
4024 = get_last_insn ();
4025 thisblock->data.block.cleanup_ptr = &thisblock->data.block.cleanups;
4028 return 1;
4031 /* Like expand_decl_cleanup, but suppress generating an exception handler
4032 to perform the cleanup. */
4035 expand_decl_cleanup_no_eh (decl, cleanup)
4036 tree decl, cleanup;
4038 int save_eh = using_eh_for_cleanups_p;
4039 int result;
4041 using_eh_for_cleanups_p = 0;
4042 result = expand_decl_cleanup (decl, cleanup);
4043 using_eh_for_cleanups_p = save_eh;
4045 return result;
4048 /* Arrange for the top element of the dynamic cleanup chain to be
4049 popped if we exit the current binding contour. DECL is the
4050 associated declaration, if any, otherwise NULL_TREE. If the
4051 current contour is left via an exception, then __sjthrow will pop
4052 the top element off the dynamic cleanup chain. The code that
4053 avoids doing the action we push into the cleanup chain in the
4054 exceptional case is contained in expand_cleanups.
4056 This routine is only used by expand_eh_region_start, and that is
4057 the only way in which an exception region should be started. This
4058 routine is only used when using the setjmp/longjmp codegen method
4059 for exception handling. */
4062 expand_dcc_cleanup (decl)
4063 tree decl;
4065 struct nesting *thisblock = block_stack;
4066 tree cleanup;
4068 /* Error if we are not in any block. */
4069 if (thisblock == 0)
4070 return 0;
4072 /* Record the cleanup for the dynamic handler chain. */
4074 /* All cleanups must be on the function_obstack. */
4075 push_obstacks_nochange ();
4076 resume_temporary_allocation ();
4077 cleanup = make_node (POPDCC_EXPR);
4078 pop_obstacks ();
4080 /* Add the cleanup in a manner similar to expand_decl_cleanup. */
4081 thisblock->data.block.cleanups
4082 = temp_tree_cons (decl, cleanup, thisblock->data.block.cleanups);
4084 /* If this block has a cleanup, it belongs in stack_block_stack. */
4085 stack_block_stack = thisblock;
4086 return 1;
4089 /* Arrange for the top element of the dynamic handler chain to be
4090 popped if we exit the current binding contour. DECL is the
4091 associated declaration, if any, otherwise NULL_TREE. If the current
4092 contour is left via an exception, then __sjthrow will pop the top
4093 element off the dynamic handler chain. The code that avoids doing
4094 the action we push into the handler chain in the exceptional case
4095 is contained in expand_cleanups.
4097 This routine is only used by expand_eh_region_start, and that is
4098 the only way in which an exception region should be started. This
4099 routine is only used when using the setjmp/longjmp codegen method
4100 for exception handling. */
4103 expand_dhc_cleanup (decl)
4104 tree decl;
4106 struct nesting *thisblock = block_stack;
4107 tree cleanup;
4109 /* Error if we are not in any block. */
4110 if (thisblock == 0)
4111 return 0;
4113 /* Record the cleanup for the dynamic handler chain. */
4115 /* All cleanups must be on the function_obstack. */
4116 push_obstacks_nochange ();
4117 resume_temporary_allocation ();
4118 cleanup = make_node (POPDHC_EXPR);
4119 pop_obstacks ();
4121 /* Add the cleanup in a manner similar to expand_decl_cleanup. */
4122 thisblock->data.block.cleanups
4123 = temp_tree_cons (decl, cleanup, thisblock->data.block.cleanups);
4125 /* If this block has a cleanup, it belongs in stack_block_stack. */
4126 stack_block_stack = thisblock;
4127 return 1;
4130 /* DECL is an anonymous union. CLEANUP is a cleanup for DECL.
4131 DECL_ELTS is the list of elements that belong to DECL's type.
4132 In each, the TREE_VALUE is a VAR_DECL, and the TREE_PURPOSE a cleanup. */
4134 void
4135 expand_anon_union_decl (decl, cleanup, decl_elts)
4136 tree decl, cleanup, decl_elts;
4138 struct nesting *thisblock = block_stack;
4139 rtx x;
4141 expand_decl (decl);
4142 expand_decl_cleanup (decl, cleanup);
4143 x = DECL_RTL (decl);
4145 while (decl_elts)
4147 tree decl_elt = TREE_VALUE (decl_elts);
4148 tree cleanup_elt = TREE_PURPOSE (decl_elts);
4149 enum machine_mode mode = TYPE_MODE (TREE_TYPE (decl_elt));
4151 /* Propagate the union's alignment to the elements. */
4152 DECL_ALIGN (decl_elt) = DECL_ALIGN (decl);
4154 /* If the element has BLKmode and the union doesn't, the union is
4155 aligned such that the element doesn't need to have BLKmode, so
4156 change the element's mode to the appropriate one for its size. */
4157 if (mode == BLKmode && DECL_MODE (decl) != BLKmode)
4158 DECL_MODE (decl_elt) = mode
4159 = mode_for_size (TREE_INT_CST_LOW (DECL_SIZE (decl_elt)),
4160 MODE_INT, 1);
4162 /* (SUBREG (MEM ...)) at RTL generation time is invalid, so we
4163 instead create a new MEM rtx with the proper mode. */
4164 if (GET_CODE (x) == MEM)
4166 if (mode == GET_MODE (x))
4167 DECL_RTL (decl_elt) = x;
4168 else
4170 DECL_RTL (decl_elt) = gen_rtx (MEM, mode, copy_rtx (XEXP (x, 0)));
4171 MEM_IN_STRUCT_P (DECL_RTL (decl_elt)) = MEM_IN_STRUCT_P (x);
4172 RTX_UNCHANGING_P (DECL_RTL (decl_elt)) = RTX_UNCHANGING_P (x);
4175 else if (GET_CODE (x) == REG)
4177 if (mode == GET_MODE (x))
4178 DECL_RTL (decl_elt) = x;
4179 else
4180 DECL_RTL (decl_elt) = gen_rtx (SUBREG, mode, x, 0);
4182 else
4183 abort ();
4185 /* Record the cleanup if there is one. */
4187 if (cleanup != 0)
4188 thisblock->data.block.cleanups
4189 = temp_tree_cons (decl_elt, cleanup_elt,
4190 thisblock->data.block.cleanups);
4192 decl_elts = TREE_CHAIN (decl_elts);
4196 /* Expand a list of cleanups LIST.
4197 Elements may be expressions or may be nested lists.
4199 If DONT_DO is nonnull, then any list-element
4200 whose TREE_PURPOSE matches DONT_DO is omitted.
4201 This is sometimes used to avoid a cleanup associated with
4202 a value that is being returned out of the scope.
4204 If IN_FIXUP is non-zero, we are generating this cleanup for a fixup
4205 goto and handle protection regions specially in that case.
4207 If REACHABLE, we emit code, otherwise just inform the exception handling
4208 code about this finalization. */
4210 static void
4211 expand_cleanups (list, dont_do, in_fixup, reachable)
4212 tree list;
4213 tree dont_do;
4214 int in_fixup;
4215 int reachable;
4217 tree tail;
4218 for (tail = list; tail; tail = TREE_CHAIN (tail))
4219 if (dont_do == 0 || TREE_PURPOSE (tail) != dont_do)
4221 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
4222 expand_cleanups (TREE_VALUE (tail), dont_do, in_fixup, reachable);
4223 else
4225 if (! in_fixup)
4227 tree cleanup = TREE_VALUE (tail);
4229 /* See expand_d{h,c}c_cleanup for why we avoid this. */
4230 if (TREE_CODE (cleanup) != POPDHC_EXPR
4231 && TREE_CODE (cleanup) != POPDCC_EXPR
4232 /* See expand_eh_region_start_tree for this case. */
4233 && ! TREE_ADDRESSABLE (tail))
4235 cleanup = protect_with_terminate (cleanup);
4236 expand_eh_region_end (cleanup);
4240 if (reachable)
4242 /* Cleanups may be run multiple times. For example,
4243 when exiting a binding contour, we expand the
4244 cleanups associated with that contour. When a goto
4245 within that binding contour has a target outside that
4246 contour, it will expand all cleanups from its scope to
4247 the target. Though the cleanups are expanded multiple
4248 times, the control paths are non-overlapping so the
4249 cleanups will not be executed twice. */
4251 /* We may need to protect fixups with rethrow regions. */
4252 int protect = (in_fixup && ! TREE_ADDRESSABLE (tail));
4253 if (protect)
4254 expand_fixup_region_start ();
4255 expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
4256 if (protect)
4257 expand_fixup_region_end (TREE_VALUE (tail));
4258 free_temp_slots ();
4264 /* Mark when the context we are emitting RTL for as a conditional
4265 context, so that any cleanup actions we register with
4266 expand_decl_init will be properly conditionalized when those
4267 cleanup actions are later performed. Must be called before any
4268 expression (tree) is expanded that is within a conditional context. */
4270 void
4271 start_cleanup_deferral ()
4273 /* block_stack can be NULL if we are inside the parameter list. It is
4274 OK to do nothing, because cleanups aren't possible here. */
4275 if (block_stack)
4276 ++block_stack->data.block.conditional_code;
4279 /* Mark the end of a conditional region of code. Because cleanup
4280 deferrals may be nested, we may still be in a conditional region
4281 after we end the currently deferred cleanups, only after we end all
4282 deferred cleanups, are we back in unconditional code. */
4284 void
4285 end_cleanup_deferral ()
4287 /* block_stack can be NULL if we are inside the parameter list. It is
4288 OK to do nothing, because cleanups aren't possible here. */
4289 if (block_stack)
4290 --block_stack->data.block.conditional_code;
4293 /* Move all cleanups from the current block_stack
4294 to the containing block_stack, where they are assumed to
4295 have been created. If anything can cause a temporary to
4296 be created, but not expanded for more than one level of
4297 block_stacks, then this code will have to change. */
4299 void
4300 move_cleanups_up ()
4302 struct nesting *block = block_stack;
4303 struct nesting *outer = block->next;
4305 outer->data.block.cleanups
4306 = chainon (block->data.block.cleanups,
4307 outer->data.block.cleanups);
4308 block->data.block.cleanups = 0;
4311 tree
4312 last_cleanup_this_contour ()
4314 if (block_stack == 0)
4315 return 0;
4317 return block_stack->data.block.cleanups;
4320 /* Return 1 if there are any pending cleanups at this point.
4321 If THIS_CONTOUR is nonzero, check the current contour as well.
4322 Otherwise, look only at the contours that enclose this one. */
4325 any_pending_cleanups (this_contour)
4326 int this_contour;
4328 struct nesting *block;
4330 if (block_stack == 0)
4331 return 0;
4333 if (this_contour && block_stack->data.block.cleanups != NULL)
4334 return 1;
4335 if (block_stack->data.block.cleanups == 0
4336 && block_stack->data.block.outer_cleanups == 0)
4337 return 0;
4339 for (block = block_stack->next; block; block = block->next)
4340 if (block->data.block.cleanups != 0)
4341 return 1;
4343 return 0;
4346 /* Enter a case (Pascal) or switch (C) statement.
4347 Push a block onto case_stack and nesting_stack
4348 to accumulate the case-labels that are seen
4349 and to record the labels generated for the statement.
4351 EXIT_FLAG is nonzero if `exit_something' should exit this case stmt.
4352 Otherwise, this construct is transparent for `exit_something'.
4354 EXPR is the index-expression to be dispatched on.
4355 TYPE is its nominal type. We could simply convert EXPR to this type,
4356 but instead we take short cuts. */
4358 void
4359 expand_start_case (exit_flag, expr, type, printname)
4360 int exit_flag;
4361 tree expr;
4362 tree type;
4363 char *printname;
4365 register struct nesting *thiscase = ALLOC_NESTING ();
4367 /* Make an entry on case_stack for the case we are entering. */
4369 thiscase->next = case_stack;
4370 thiscase->all = nesting_stack;
4371 thiscase->depth = ++nesting_depth;
4372 thiscase->exit_label = exit_flag ? gen_label_rtx () : 0;
4373 thiscase->data.case_stmt.case_list = 0;
4374 thiscase->data.case_stmt.index_expr = expr;
4375 thiscase->data.case_stmt.nominal_type = type;
4376 thiscase->data.case_stmt.default_label = 0;
4377 thiscase->data.case_stmt.num_ranges = 0;
4378 thiscase->data.case_stmt.printname = printname;
4379 thiscase->data.case_stmt.seenlabel = 0;
4380 case_stack = thiscase;
4381 nesting_stack = thiscase;
4383 if (output_bytecode)
4385 bc_expand_start_case (thiscase, expr, type, printname);
4386 return;
4389 do_pending_stack_adjust ();
4391 /* Make sure case_stmt.start points to something that won't
4392 need any transformation before expand_end_case. */
4393 if (GET_CODE (get_last_insn ()) != NOTE)
4394 emit_note (NULL_PTR, NOTE_INSN_DELETED);
4396 thiscase->data.case_stmt.start = get_last_insn ();
4398 start_cleanup_deferral ();
4402 /* Enter a case statement. It is assumed that the caller has pushed
4403 the current context onto the case stack. */
4405 static void
4406 bc_expand_start_case (thiscase, expr, type, printname)
4407 struct nesting *thiscase;
4408 tree expr;
4409 tree type;
4410 char *printname;
4412 bc_expand_expr (expr);
4413 bc_expand_conversion (TREE_TYPE (expr), type);
4415 /* For cases, the skip is a place we jump to that's emitted after
4416 the size of the jump table is known. */
4418 thiscase->data.case_stmt.skip_label = gen_label_rtx ();
4419 bc_emit_bytecode (jump);
4420 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (thiscase->data.case_stmt.skip_label));
4422 #ifdef DEBUG_PRINT_CODE
4423 fputc ('\n', stderr);
4424 #endif
4428 /* Start a "dummy case statement" within which case labels are invalid
4429 and are not connected to any larger real case statement.
4430 This can be used if you don't want to let a case statement jump
4431 into the middle of certain kinds of constructs. */
4433 void
4434 expand_start_case_dummy ()
4436 register struct nesting *thiscase = ALLOC_NESTING ();
4438 /* Make an entry on case_stack for the dummy. */
4440 thiscase->next = case_stack;
4441 thiscase->all = nesting_stack;
4442 thiscase->depth = ++nesting_depth;
4443 thiscase->exit_label = 0;
4444 thiscase->data.case_stmt.case_list = 0;
4445 thiscase->data.case_stmt.start = 0;
4446 thiscase->data.case_stmt.nominal_type = 0;
4447 thiscase->data.case_stmt.default_label = 0;
4448 thiscase->data.case_stmt.num_ranges = 0;
4449 case_stack = thiscase;
4450 nesting_stack = thiscase;
4451 start_cleanup_deferral ();
4454 /* End a dummy case statement. */
4456 void
4457 expand_end_case_dummy ()
4459 end_cleanup_deferral ();
4460 POPSTACK (case_stack);
4463 /* Return the data type of the index-expression
4464 of the innermost case statement, or null if none. */
4466 tree
4467 case_index_expr_type ()
4469 if (case_stack)
4470 return TREE_TYPE (case_stack->data.case_stmt.index_expr);
4471 return 0;
4474 /* Accumulate one case or default label inside a case or switch statement.
4475 VALUE is the value of the case (a null pointer, for a default label).
4476 The function CONVERTER, when applied to arguments T and V,
4477 converts the value V to the type T.
4479 If not currently inside a case or switch statement, return 1 and do
4480 nothing. The caller will print a language-specific error message.
4481 If VALUE is a duplicate or overlaps, return 2 and do nothing
4482 except store the (first) duplicate node in *DUPLICATE.
4483 If VALUE is out of range, return 3 and do nothing.
4484 If we are jumping into the scope of a cleanup or var-sized array, return 5.
4485 Return 0 on success.
4487 Extended to handle range statements. */
4490 pushcase (value, converter, label, duplicate)
4491 register tree value;
4492 tree (*converter) PROTO((tree, tree));
4493 register tree label;
4494 tree *duplicate;
4496 register struct case_node **l;
4497 register struct case_node *n;
4498 tree index_type;
4499 tree nominal_type;
4501 if (output_bytecode)
4502 return bc_pushcase (value, label);
4504 /* Fail if not inside a real case statement. */
4505 if (! (case_stack && case_stack->data.case_stmt.start))
4506 return 1;
4508 if (stack_block_stack
4509 && stack_block_stack->depth > case_stack->depth)
4510 return 5;
4512 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4513 nominal_type = case_stack->data.case_stmt.nominal_type;
4515 /* If the index is erroneous, avoid more problems: pretend to succeed. */
4516 if (index_type == error_mark_node)
4517 return 0;
4519 /* Convert VALUE to the type in which the comparisons are nominally done. */
4520 if (value != 0)
4521 value = (*converter) (nominal_type, value);
4523 /* If this is the first label, warn if any insns have been emitted. */
4524 if (case_stack->data.case_stmt.seenlabel == 0)
4526 rtx insn;
4527 for (insn = case_stack->data.case_stmt.start;
4528 insn;
4529 insn = NEXT_INSN (insn))
4531 if (GET_CODE (insn) == CODE_LABEL)
4532 break;
4533 if (GET_CODE (insn) != NOTE
4534 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn)) != USE))
4536 warning ("unreachable code at beginning of %s",
4537 case_stack->data.case_stmt.printname);
4538 break;
4542 case_stack->data.case_stmt.seenlabel = 1;
4544 /* Fail if this value is out of range for the actual type of the index
4545 (which may be narrower than NOMINAL_TYPE). */
4546 if (value != 0 && ! int_fits_type_p (value, index_type))
4547 return 3;
4549 /* Fail if this is a duplicate or overlaps another entry. */
4550 if (value == 0)
4552 if (case_stack->data.case_stmt.default_label != 0)
4554 *duplicate = case_stack->data.case_stmt.default_label;
4555 return 2;
4557 case_stack->data.case_stmt.default_label = label;
4559 else
4560 return add_case_node (value, value, label, duplicate);
4562 expand_label (label);
4563 return 0;
4566 /* Like pushcase but this case applies to all values between VALUE1 and
4567 VALUE2 (inclusive). If VALUE1 is NULL, the range starts at the lowest
4568 value of the index type and ends at VALUE2. If VALUE2 is NULL, the range
4569 starts at VALUE1 and ends at the highest value of the index type.
4570 If both are NULL, this case applies to all values.
4572 The return value is the same as that of pushcase but there is one
4573 additional error code: 4 means the specified range was empty. */
4576 pushcase_range (value1, value2, converter, label, duplicate)
4577 register tree value1, value2;
4578 tree (*converter) PROTO((tree, tree));
4579 register tree label;
4580 tree *duplicate;
4582 register struct case_node **l;
4583 register struct case_node *n;
4584 tree index_type;
4585 tree nominal_type;
4587 /* Fail if not inside a real case statement. */
4588 if (! (case_stack && case_stack->data.case_stmt.start))
4589 return 1;
4591 if (stack_block_stack
4592 && stack_block_stack->depth > case_stack->depth)
4593 return 5;
4595 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4596 nominal_type = case_stack->data.case_stmt.nominal_type;
4598 /* If the index is erroneous, avoid more problems: pretend to succeed. */
4599 if (index_type == error_mark_node)
4600 return 0;
4602 /* If this is the first label, warn if any insns have been emitted. */
4603 if (case_stack->data.case_stmt.seenlabel == 0)
4605 rtx insn;
4606 for (insn = case_stack->data.case_stmt.start;
4607 insn;
4608 insn = NEXT_INSN (insn))
4610 if (GET_CODE (insn) == CODE_LABEL)
4611 break;
4612 if (GET_CODE (insn) != NOTE
4613 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn)) != USE))
4615 warning ("unreachable code at beginning of %s",
4616 case_stack->data.case_stmt.printname);
4617 break;
4621 case_stack->data.case_stmt.seenlabel = 1;
4623 /* Convert VALUEs to type in which the comparisons are nominally done
4624 and replace any unspecified value with the corresponding bound. */
4625 if (value1 == 0)
4626 value1 = TYPE_MIN_VALUE (index_type);
4627 if (value2 == 0)
4628 value2 = TYPE_MAX_VALUE (index_type);
4630 /* Fail if the range is empty. Do this before any conversion since
4631 we want to allow out-of-range empty ranges. */
4632 if (value2 && tree_int_cst_lt (value2, value1))
4633 return 4;
4635 value1 = (*converter) (nominal_type, value1);
4637 /* If the max was unbounded, use the max of the nominal_type we are
4638 converting to. Do this after the < check above to suppress false
4639 positives. */
4640 if (!value2)
4641 value2 = TYPE_MAX_VALUE (nominal_type);
4642 value2 = (*converter) (nominal_type, value2);
4644 /* Fail if these values are out of range. */
4645 if (TREE_CONSTANT_OVERFLOW (value1)
4646 || ! int_fits_type_p (value1, index_type))
4647 return 3;
4649 if (TREE_CONSTANT_OVERFLOW (value2)
4650 || ! int_fits_type_p (value2, index_type))
4651 return 3;
4653 return add_case_node (value1, value2, label, duplicate);
4656 /* Do the actual insertion of a case label for pushcase and pushcase_range
4657 into case_stack->data.case_stmt.case_list. Use an AVL tree to avoid
4658 slowdown for large switch statements. */
4660 static int
4661 add_case_node (low, high, label, duplicate)
4662 tree low, high;
4663 tree label;
4664 tree *duplicate;
4666 struct case_node *p, **q, *r;
4668 q = &case_stack->data.case_stmt.case_list;
4669 p = *q;
4671 while (r = *q)
4673 p = r;
4675 /* Keep going past elements distinctly greater than HIGH. */
4676 if (tree_int_cst_lt (high, p->low))
4677 q = &p->left;
4679 /* or distinctly less than LOW. */
4680 else if (tree_int_cst_lt (p->high, low))
4681 q = &p->right;
4683 else
4685 /* We have an overlap; this is an error. */
4686 *duplicate = p->code_label;
4687 return 2;
4691 /* Add this label to the chain, and succeed.
4692 Copy LOW, HIGH so they are on temporary rather than momentary
4693 obstack and will thus survive till the end of the case statement. */
4695 r = (struct case_node *) oballoc (sizeof (struct case_node));
4696 r->low = copy_node (low);
4698 /* If the bounds are equal, turn this into the one-value case. */
4700 if (tree_int_cst_equal (low, high))
4701 r->high = r->low;
4702 else
4704 r->high = copy_node (high);
4705 case_stack->data.case_stmt.num_ranges++;
4708 r->code_label = label;
4709 expand_label (label);
4711 *q = r;
4712 r->parent = p;
4713 r->left = 0;
4714 r->right = 0;
4715 r->balance = 0;
4717 while (p)
4719 struct case_node *s;
4721 if (r == p->left)
4723 int b;
4725 if (! (b = p->balance))
4726 /* Growth propagation from left side. */
4727 p->balance = -1;
4728 else if (b < 0)
4730 if (r->balance < 0)
4732 /* R-Rotation */
4733 if (p->left = s = r->right)
4734 s->parent = p;
4736 r->right = p;
4737 p->balance = 0;
4738 r->balance = 0;
4739 s = p->parent;
4740 p->parent = r;
4742 if (r->parent = s)
4744 if (s->left == p)
4745 s->left = r;
4746 else
4747 s->right = r;
4749 else
4750 case_stack->data.case_stmt.case_list = r;
4752 else
4753 /* r->balance == +1 */
4755 /* LR-Rotation */
4757 int b2;
4758 struct case_node *t = r->right;
4760 if (p->left = s = t->right)
4761 s->parent = p;
4763 t->right = p;
4764 if (r->right = s = t->left)
4765 s->parent = r;
4767 t->left = r;
4768 b = t->balance;
4769 b2 = b < 0;
4770 p->balance = b2;
4771 b2 = -b2 - b;
4772 r->balance = b2;
4773 t->balance = 0;
4774 s = p->parent;
4775 p->parent = t;
4776 r->parent = t;
4778 if (t->parent = s)
4780 if (s->left == p)
4781 s->left = t;
4782 else
4783 s->right = t;
4785 else
4786 case_stack->data.case_stmt.case_list = t;
4788 break;
4791 else
4793 /* p->balance == +1; growth of left side balances the node. */
4794 p->balance = 0;
4795 break;
4798 else
4799 /* r == p->right */
4801 int b;
4803 if (! (b = p->balance))
4804 /* Growth propagation from right side. */
4805 p->balance++;
4806 else if (b > 0)
4808 if (r->balance > 0)
4810 /* L-Rotation */
4812 if (p->right = s = r->left)
4813 s->parent = p;
4815 r->left = p;
4816 p->balance = 0;
4817 r->balance = 0;
4818 s = p->parent;
4819 p->parent = r;
4820 if (r->parent = s)
4822 if (s->left == p)
4823 s->left = r;
4824 else
4825 s->right = r;
4828 else
4829 case_stack->data.case_stmt.case_list = r;
4832 else
4833 /* r->balance == -1 */
4835 /* RL-Rotation */
4836 int b2;
4837 struct case_node *t = r->left;
4839 if (p->right = s = t->left)
4840 s->parent = p;
4842 t->left = p;
4844 if (r->left = s = t->right)
4845 s->parent = r;
4847 t->right = r;
4848 b = t->balance;
4849 b2 = b < 0;
4850 r->balance = b2;
4851 b2 = -b2 - b;
4852 p->balance = b2;
4853 t->balance = 0;
4854 s = p->parent;
4855 p->parent = t;
4856 r->parent = t;
4858 if (t->parent = s)
4860 if (s->left == p)
4861 s->left = t;
4862 else
4863 s->right = t;
4866 else
4867 case_stack->data.case_stmt.case_list = t;
4869 break;
4871 else
4873 /* p->balance == -1; growth of right side balances the node. */
4874 p->balance = 0;
4875 break;
4879 r = p;
4880 p = p->parent;
4883 return 0;
4886 /* Accumulate one case or default label; VALUE is the value of the
4887 case, or nil for a default label. If not currently inside a case,
4888 return 1 and do nothing. If VALUE is a duplicate or overlaps, return
4889 2 and do nothing. If VALUE is out of range, return 3 and do nothing.
4890 Return 0 on success. This function is a leftover from the earlier
4891 bytecode compiler, which was based on gcc 1.37. It should be
4892 merged into pushcase. */
4894 static int
4895 bc_pushcase (value, label)
4896 tree value;
4897 tree label;
4899 struct nesting *thiscase = case_stack;
4900 struct case_node *case_label, *new_label;
4902 if (! thiscase)
4903 return 1;
4905 /* Fail if duplicate, overlap, or out of type range. */
4906 if (value)
4908 value = convert (thiscase->data.case_stmt.nominal_type, value);
4909 if (! int_fits_type_p (value, thiscase->data.case_stmt.nominal_type))
4910 return 3;
4912 for (case_label = thiscase->data.case_stmt.case_list;
4913 case_label->left; case_label = case_label->left)
4914 if (! tree_int_cst_lt (case_label->left->high, value))
4915 break;
4917 if (case_label != thiscase->data.case_stmt.case_list
4918 && ! tree_int_cst_lt (case_label->high, value)
4919 || (case_label->left && ! tree_int_cst_lt (value, case_label->left->low)))
4920 return 2;
4922 new_label = (struct case_node *) oballoc (sizeof (struct case_node));
4923 new_label->low = new_label->high = copy_node (value);
4924 new_label->code_label = label;
4925 new_label->left = case_label->left;
4927 case_label->left = new_label;
4928 thiscase->data.case_stmt.num_ranges++;
4930 else
4932 if (thiscase->data.case_stmt.default_label)
4933 return 2;
4934 thiscase->data.case_stmt.default_label = label;
4937 expand_label (label);
4938 return 0;
4941 /* Returns the number of possible values of TYPE.
4942 Returns -1 if the number is unknown or variable.
4943 Returns -2 if the number does not fit in a HOST_WIDE_INT.
4944 Sets *SPARENESS to 2 if TYPE is an ENUMERAL_TYPE whose values
4945 do not increase monotonically (there may be duplicates);
4946 to 1 if the values increase monotonically, but not always by 1;
4947 otherwise sets it to 0. */
4949 HOST_WIDE_INT
4950 all_cases_count (type, spareness)
4951 tree type;
4952 int *spareness;
4954 HOST_WIDE_INT count, count_high = 0;
4955 *spareness = 0;
4957 switch (TREE_CODE (type))
4959 tree t;
4960 case BOOLEAN_TYPE:
4961 count = 2;
4962 break;
4963 case CHAR_TYPE:
4964 count = 1 << BITS_PER_UNIT;
4965 break;
4966 default:
4967 case INTEGER_TYPE:
4968 if (TREE_CODE (TYPE_MIN_VALUE (type)) != INTEGER_CST
4969 || TYPE_MAX_VALUE (type) == NULL
4970 || TREE_CODE (TYPE_MAX_VALUE (type)) != INTEGER_CST)
4971 return -1;
4972 else
4974 /* count
4975 = TREE_INT_CST_LOW (TYPE_MAX_VALUE (type))
4976 - TREE_INT_CST_LOW (TYPE_MIN_VALUE (type)) + 1
4977 but with overflow checking. */
4978 tree mint = TYPE_MIN_VALUE (type);
4979 tree maxt = TYPE_MAX_VALUE (type);
4980 HOST_WIDE_INT lo, hi;
4981 neg_double(TREE_INT_CST_LOW (mint), TREE_INT_CST_HIGH (mint),
4982 &lo, &hi);
4983 add_double(TREE_INT_CST_LOW (maxt), TREE_INT_CST_HIGH (maxt),
4984 lo, hi, &lo, &hi);
4985 add_double (lo, hi, 1, 0, &lo, &hi);
4986 if (hi != 0 || lo < 0)
4987 return -2;
4988 count = lo;
4990 break;
4991 case ENUMERAL_TYPE:
4992 count = 0;
4993 for (t = TYPE_VALUES (type); t != NULL_TREE; t = TREE_CHAIN (t))
4995 if (TREE_CODE (TYPE_MIN_VALUE (type)) != INTEGER_CST
4996 || TREE_CODE (TREE_VALUE (t)) != INTEGER_CST
4997 || TREE_INT_CST_LOW (TYPE_MIN_VALUE (type)) + count
4998 != TREE_INT_CST_LOW (TREE_VALUE (t)))
4999 *spareness = 1;
5000 count++;
5002 if (*spareness == 1)
5004 tree prev = TREE_VALUE (TYPE_VALUES (type));
5005 for (t = TYPE_VALUES (type); t = TREE_CHAIN (t), t != NULL_TREE; )
5007 if (! tree_int_cst_lt (prev, TREE_VALUE (t)))
5009 *spareness = 2;
5010 break;
5012 prev = TREE_VALUE (t);
5017 return count;
5021 #define BITARRAY_TEST(ARRAY, INDEX) \
5022 ((ARRAY)[(unsigned) (INDEX) / HOST_BITS_PER_CHAR]\
5023 & (1 << ((unsigned) (INDEX) % HOST_BITS_PER_CHAR)))
5024 #define BITARRAY_SET(ARRAY, INDEX) \
5025 ((ARRAY)[(unsigned) (INDEX) / HOST_BITS_PER_CHAR]\
5026 |= 1 << ((unsigned) (INDEX) % HOST_BITS_PER_CHAR))
5028 /* Set the elements of the bitstring CASES_SEEN (which has length COUNT),
5029 with the case values we have seen, assuming the case expression
5030 has the given TYPE.
5031 SPARSENESS is as determined by all_cases_count.
5033 The time needed is proportional to COUNT, unless
5034 SPARSENESS is 2, in which case quadratic time is needed. */
5036 void
5037 mark_seen_cases (type, cases_seen, count, sparseness)
5038 tree type;
5039 unsigned char *cases_seen;
5040 long count;
5041 int sparseness;
5043 long i;
5045 tree next_node_to_try = NULL_TREE;
5046 long next_node_offset = 0;
5048 register struct case_node *n, *root = case_stack->data.case_stmt.case_list;
5049 tree val = make_node (INTEGER_CST);
5050 TREE_TYPE (val) = type;
5051 if (! root)
5052 ; /* Do nothing */
5053 else if (sparseness == 2)
5055 tree t;
5056 HOST_WIDE_INT xlo;
5058 /* This less efficient loop is only needed to handle
5059 duplicate case values (multiple enum constants
5060 with the same value). */
5061 TREE_TYPE (val) = TREE_TYPE (root->low);
5062 for (t = TYPE_VALUES (type), xlo = 0; t != NULL_TREE;
5063 t = TREE_CHAIN (t), xlo++)
5065 TREE_INT_CST_LOW (val) = TREE_INT_CST_LOW (TREE_VALUE (t));
5066 TREE_INT_CST_HIGH (val) = TREE_INT_CST_HIGH (TREE_VALUE (t));
5067 n = root;
5070 /* Keep going past elements distinctly greater than VAL. */
5071 if (tree_int_cst_lt (val, n->low))
5072 n = n->left;
5074 /* or distinctly less than VAL. */
5075 else if (tree_int_cst_lt (n->high, val))
5076 n = n->right;
5078 else
5080 /* We have found a matching range. */
5081 BITARRAY_SET (cases_seen, xlo);
5082 break;
5085 while (n);
5088 else
5090 if (root->left)
5091 case_stack->data.case_stmt.case_list = root = case_tree2list (root, 0);
5092 for (n = root; n; n = n->right)
5094 TREE_INT_CST_LOW (val) = TREE_INT_CST_LOW (n->low);
5095 TREE_INT_CST_HIGH (val) = TREE_INT_CST_HIGH (n->low);
5096 while ( ! tree_int_cst_lt (n->high, val))
5098 /* Calculate (into xlo) the "offset" of the integer (val).
5099 The element with lowest value has offset 0, the next smallest
5100 element has offset 1, etc. */
5102 HOST_WIDE_INT xlo, xhi;
5103 tree t;
5104 if (sparseness && TYPE_VALUES (type) != NULL_TREE)
5106 /* The TYPE_VALUES will be in increasing order, so
5107 starting searching where we last ended. */
5108 t = next_node_to_try;
5109 xlo = next_node_offset;
5110 xhi = 0;
5111 for (;;)
5113 if (t == NULL_TREE)
5115 t = TYPE_VALUES (type);
5116 xlo = 0;
5118 if (tree_int_cst_equal (val, TREE_VALUE (t)))
5120 next_node_to_try = TREE_CHAIN (t);
5121 next_node_offset = xlo + 1;
5122 break;
5124 xlo++;
5125 t = TREE_CHAIN (t);
5126 if (t == next_node_to_try)
5128 xlo = -1;
5129 break;
5133 else
5135 t = TYPE_MIN_VALUE (type);
5136 if (t)
5137 neg_double (TREE_INT_CST_LOW (t), TREE_INT_CST_HIGH (t),
5138 &xlo, &xhi);
5139 else
5140 xlo = xhi = 0;
5141 add_double (xlo, xhi,
5142 TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
5143 &xlo, &xhi);
5146 if (xhi == 0 && xlo >= 0 && xlo < count)
5147 BITARRAY_SET (cases_seen, xlo);
5148 add_double (TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
5149 1, 0,
5150 &TREE_INT_CST_LOW (val), &TREE_INT_CST_HIGH (val));
5156 /* Called when the index of a switch statement is an enumerated type
5157 and there is no default label.
5159 Checks that all enumeration literals are covered by the case
5160 expressions of a switch. Also, warn if there are any extra
5161 switch cases that are *not* elements of the enumerated type.
5163 If all enumeration literals were covered by the case expressions,
5164 turn one of the expressions into the default expression since it should
5165 not be possible to fall through such a switch. */
5167 void
5168 check_for_full_enumeration_handling (type)
5169 tree type;
5171 register struct case_node *n;
5172 register struct case_node **l;
5173 register tree chain;
5174 int all_values = 1;
5176 /* True iff the selector type is a numbered set mode. */
5177 int sparseness = 0;
5179 /* The number of possible selector values. */
5180 HOST_WIDE_INT size;
5182 /* For each possible selector value. a one iff it has been matched
5183 by a case value alternative. */
5184 unsigned char *cases_seen;
5186 /* The allocated size of cases_seen, in chars. */
5187 long bytes_needed;
5188 tree t;
5190 if (output_bytecode)
5192 bc_check_for_full_enumeration_handling (type);
5193 return;
5196 if (! warn_switch)
5197 return;
5199 size = all_cases_count (type, &sparseness);
5200 bytes_needed = (size + HOST_BITS_PER_CHAR) / HOST_BITS_PER_CHAR;
5202 if (size > 0 && size < 600000
5203 /* We deliberately use malloc here - not xmalloc. */
5204 && (cases_seen = (unsigned char *) malloc (bytes_needed)) != NULL)
5206 long i;
5207 tree v = TYPE_VALUES (type);
5208 bzero (cases_seen, bytes_needed);
5210 /* The time complexity of this code is normally O(N), where
5211 N being the number of members in the enumerated type.
5212 However, if type is a ENUMERAL_TYPE whose values do not
5213 increase monotonically, O(N*log(N)) time may be needed. */
5215 mark_seen_cases (type, cases_seen, size, sparseness);
5217 for (i = 0; v != NULL_TREE && i < size; i++, v = TREE_CHAIN (v))
5219 if (BITARRAY_TEST(cases_seen, i) == 0)
5220 warning ("enumeration value `%s' not handled in switch",
5221 IDENTIFIER_POINTER (TREE_PURPOSE (v)));
5224 free (cases_seen);
5227 /* Now we go the other way around; we warn if there are case
5228 expressions that don't correspond to enumerators. This can
5229 occur since C and C++ don't enforce type-checking of
5230 assignments to enumeration variables. */
5232 if (case_stack->data.case_stmt.case_list
5233 && case_stack->data.case_stmt.case_list->left)
5234 case_stack->data.case_stmt.case_list
5235 = case_tree2list (case_stack->data.case_stmt.case_list, 0);
5236 if (warn_switch)
5237 for (n = case_stack->data.case_stmt.case_list; n; n = n->right)
5239 for (chain = TYPE_VALUES (type);
5240 chain && !tree_int_cst_equal (n->low, TREE_VALUE (chain));
5241 chain = TREE_CHAIN (chain))
5244 if (!chain)
5246 if (TYPE_NAME (type) == 0)
5247 warning ("case value `%d' not in enumerated type",
5248 TREE_INT_CST_LOW (n->low));
5249 else
5250 warning ("case value `%d' not in enumerated type `%s'",
5251 TREE_INT_CST_LOW (n->low),
5252 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
5253 == IDENTIFIER_NODE)
5254 ? TYPE_NAME (type)
5255 : DECL_NAME (TYPE_NAME (type))));
5257 if (!tree_int_cst_equal (n->low, n->high))
5259 for (chain = TYPE_VALUES (type);
5260 chain && !tree_int_cst_equal (n->high, TREE_VALUE (chain));
5261 chain = TREE_CHAIN (chain))
5264 if (!chain)
5266 if (TYPE_NAME (type) == 0)
5267 warning ("case value `%d' not in enumerated type",
5268 TREE_INT_CST_LOW (n->high));
5269 else
5270 warning ("case value `%d' not in enumerated type `%s'",
5271 TREE_INT_CST_LOW (n->high),
5272 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
5273 == IDENTIFIER_NODE)
5274 ? TYPE_NAME (type)
5275 : DECL_NAME (TYPE_NAME (type))));
5280 #if 0
5281 /* ??? This optimization is disabled because it causes valid programs to
5282 fail. ANSI C does not guarantee that an expression with enum type
5283 will have a value that is the same as one of the enumeration literals. */
5285 /* If all values were found as case labels, make one of them the default
5286 label. Thus, this switch will never fall through. We arbitrarily pick
5287 the last one to make the default since this is likely the most
5288 efficient choice. */
5290 if (all_values)
5292 for (l = &case_stack->data.case_stmt.case_list;
5293 (*l)->right != 0;
5294 l = &(*l)->right)
5297 case_stack->data.case_stmt.default_label = (*l)->code_label;
5298 *l = 0;
5300 #endif /* 0 */
5304 /* Check that all enumeration literals are covered by the case
5305 expressions of a switch. Also warn if there are any cases
5306 that are not elements of the enumerated type. */
5308 static void
5309 bc_check_for_full_enumeration_handling (type)
5310 tree type;
5312 struct nesting *thiscase = case_stack;
5313 struct case_node *c;
5314 tree e;
5316 /* Check for enums not handled. */
5317 for (e = TYPE_VALUES (type); e; e = TREE_CHAIN (e))
5319 for (c = thiscase->data.case_stmt.case_list->left;
5320 c && tree_int_cst_lt (c->high, TREE_VALUE (e));
5321 c = c->left)
5323 if (! (c && tree_int_cst_equal (c->low, TREE_VALUE (e))))
5324 warning ("enumerated value `%s' not handled in switch",
5325 IDENTIFIER_POINTER (TREE_PURPOSE (e)));
5328 /* Check for cases not in the enumeration. */
5329 for (c = thiscase->data.case_stmt.case_list->left; c; c = c->left)
5331 for (e = TYPE_VALUES (type);
5332 e && !tree_int_cst_equal (c->low, TREE_VALUE (e));
5333 e = TREE_CHAIN (e))
5335 if (! e)
5336 warning ("case value `%d' not in enumerated type `%s'",
5337 TREE_INT_CST_LOW (c->low),
5338 IDENTIFIER_POINTER (TREE_CODE (TYPE_NAME (type)) == IDENTIFIER_NODE
5339 ? TYPE_NAME (type)
5340 : DECL_NAME (TYPE_NAME (type))));
5344 /* Terminate a case (Pascal) or switch (C) statement
5345 in which ORIG_INDEX is the expression to be tested.
5346 Generate the code to test it and jump to the right place. */
5348 void
5349 expand_end_case (orig_index)
5350 tree orig_index;
5352 tree minval, maxval, range, orig_minval;
5353 rtx default_label = 0;
5354 register struct case_node *n;
5355 int count;
5356 rtx index;
5357 rtx table_label;
5358 int ncases;
5359 rtx *labelvec;
5360 register int i;
5361 rtx before_case;
5362 register struct nesting *thiscase = case_stack;
5363 tree index_expr, index_type;
5364 int unsignedp;
5366 if (output_bytecode)
5368 bc_expand_end_case (orig_index);
5369 return;
5372 table_label = gen_label_rtx ();
5373 index_expr = thiscase->data.case_stmt.index_expr;
5374 index_type = TREE_TYPE (index_expr);
5375 unsignedp = TREE_UNSIGNED (index_type);
5377 do_pending_stack_adjust ();
5379 /* An ERROR_MARK occurs for various reasons including invalid data type. */
5380 if (index_type != error_mark_node)
5382 /* If switch expression was an enumerated type, check that all
5383 enumeration literals are covered by the cases.
5384 No sense trying this if there's a default case, however. */
5386 if (!thiscase->data.case_stmt.default_label
5387 && TREE_CODE (TREE_TYPE (orig_index)) == ENUMERAL_TYPE
5388 && TREE_CODE (index_expr) != INTEGER_CST)
5389 check_for_full_enumeration_handling (TREE_TYPE (orig_index));
5391 /* If this is the first label, warn if any insns have been emitted. */
5392 if (thiscase->data.case_stmt.seenlabel == 0)
5394 rtx insn;
5395 for (insn = get_last_insn ();
5396 insn != case_stack->data.case_stmt.start;
5397 insn = PREV_INSN (insn))
5398 if (GET_CODE (insn) != NOTE
5399 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn))!= USE))
5401 warning ("unreachable code at beginning of %s",
5402 case_stack->data.case_stmt.printname);
5403 break;
5407 /* If we don't have a default-label, create one here,
5408 after the body of the switch. */
5409 if (thiscase->data.case_stmt.default_label == 0)
5411 thiscase->data.case_stmt.default_label
5412 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
5413 expand_label (thiscase->data.case_stmt.default_label);
5415 default_label = label_rtx (thiscase->data.case_stmt.default_label);
5417 before_case = get_last_insn ();
5419 if (thiscase->data.case_stmt.case_list
5420 && thiscase->data.case_stmt.case_list->left)
5421 thiscase->data.case_stmt.case_list
5422 = case_tree2list(thiscase->data.case_stmt.case_list, 0);
5424 /* Simplify the case-list before we count it. */
5425 group_case_nodes (thiscase->data.case_stmt.case_list);
5427 /* Get upper and lower bounds of case values.
5428 Also convert all the case values to the index expr's data type. */
5430 count = 0;
5431 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5433 /* Check low and high label values are integers. */
5434 if (TREE_CODE (n->low) != INTEGER_CST)
5435 abort ();
5436 if (TREE_CODE (n->high) != INTEGER_CST)
5437 abort ();
5439 n->low = convert (index_type, n->low);
5440 n->high = convert (index_type, n->high);
5442 /* Count the elements and track the largest and smallest
5443 of them (treating them as signed even if they are not). */
5444 if (count++ == 0)
5446 minval = n->low;
5447 maxval = n->high;
5449 else
5451 if (INT_CST_LT (n->low, minval))
5452 minval = n->low;
5453 if (INT_CST_LT (maxval, n->high))
5454 maxval = n->high;
5456 /* A range counts double, since it requires two compares. */
5457 if (! tree_int_cst_equal (n->low, n->high))
5458 count++;
5461 orig_minval = minval;
5463 /* Compute span of values. */
5464 if (count != 0)
5465 range = fold (build (MINUS_EXPR, index_type, maxval, minval));
5467 end_cleanup_deferral ();
5469 if (count == 0)
5471 expand_expr (index_expr, const0_rtx, VOIDmode, 0);
5472 emit_queue ();
5473 emit_jump (default_label);
5476 /* If range of values is much bigger than number of values,
5477 make a sequence of conditional branches instead of a dispatch.
5478 If the switch-index is a constant, do it this way
5479 because we can optimize it. */
5481 #ifndef CASE_VALUES_THRESHOLD
5482 #ifdef HAVE_casesi
5483 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
5484 #else
5485 /* If machine does not have a case insn that compares the
5486 bounds, this means extra overhead for dispatch tables
5487 which raises the threshold for using them. */
5488 #define CASE_VALUES_THRESHOLD 5
5489 #endif /* HAVE_casesi */
5490 #endif /* CASE_VALUES_THRESHOLD */
5492 else if (TREE_INT_CST_HIGH (range) != 0
5493 || count < CASE_VALUES_THRESHOLD
5494 || ((unsigned HOST_WIDE_INT) (TREE_INT_CST_LOW (range))
5495 > 10 * count)
5496 #ifndef ASM_OUTPUT_ADDR_DIFF_ELT
5497 || flag_pic
5498 #endif
5499 || TREE_CODE (index_expr) == INTEGER_CST
5500 /* These will reduce to a constant. */
5501 || (TREE_CODE (index_expr) == CALL_EXPR
5502 && TREE_CODE (TREE_OPERAND (index_expr, 0)) == ADDR_EXPR
5503 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == FUNCTION_DECL
5504 && DECL_FUNCTION_CODE (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == BUILT_IN_CLASSIFY_TYPE)
5505 || (TREE_CODE (index_expr) == COMPOUND_EXPR
5506 && TREE_CODE (TREE_OPERAND (index_expr, 1)) == INTEGER_CST))
5508 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5510 /* If the index is a short or char that we do not have
5511 an insn to handle comparisons directly, convert it to
5512 a full integer now, rather than letting each comparison
5513 generate the conversion. */
5515 if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT
5516 && (cmp_optab->handlers[(int) GET_MODE(index)].insn_code
5517 == CODE_FOR_nothing))
5519 enum machine_mode wider_mode;
5520 for (wider_mode = GET_MODE (index); wider_mode != VOIDmode;
5521 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
5522 if (cmp_optab->handlers[(int) wider_mode].insn_code
5523 != CODE_FOR_nothing)
5525 index = convert_to_mode (wider_mode, index, unsignedp);
5526 break;
5530 emit_queue ();
5531 do_pending_stack_adjust ();
5533 index = protect_from_queue (index, 0);
5534 if (GET_CODE (index) == MEM)
5535 index = copy_to_reg (index);
5536 if (GET_CODE (index) == CONST_INT
5537 || TREE_CODE (index_expr) == INTEGER_CST)
5539 /* Make a tree node with the proper constant value
5540 if we don't already have one. */
5541 if (TREE_CODE (index_expr) != INTEGER_CST)
5543 index_expr
5544 = build_int_2 (INTVAL (index),
5545 unsignedp || INTVAL (index) >= 0 ? 0 : -1);
5546 index_expr = convert (index_type, index_expr);
5549 /* For constant index expressions we need only
5550 issue a unconditional branch to the appropriate
5551 target code. The job of removing any unreachable
5552 code is left to the optimisation phase if the
5553 "-O" option is specified. */
5554 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5555 if (! tree_int_cst_lt (index_expr, n->low)
5556 && ! tree_int_cst_lt (n->high, index_expr))
5557 break;
5559 if (n)
5560 emit_jump (label_rtx (n->code_label));
5561 else
5562 emit_jump (default_label);
5564 else
5566 /* If the index expression is not constant we generate
5567 a binary decision tree to select the appropriate
5568 target code. This is done as follows:
5570 The list of cases is rearranged into a binary tree,
5571 nearly optimal assuming equal probability for each case.
5573 The tree is transformed into RTL, eliminating
5574 redundant test conditions at the same time.
5576 If program flow could reach the end of the
5577 decision tree an unconditional jump to the
5578 default code is emitted. */
5580 use_cost_table
5581 = (TREE_CODE (TREE_TYPE (orig_index)) != ENUMERAL_TYPE
5582 && estimate_case_costs (thiscase->data.case_stmt.case_list));
5583 balance_case_nodes (&thiscase->data.case_stmt.case_list,
5584 NULL_PTR);
5585 emit_case_nodes (index, thiscase->data.case_stmt.case_list,
5586 default_label, index_type);
5587 emit_jump_if_reachable (default_label);
5590 else
5592 int win = 0;
5593 #ifdef HAVE_casesi
5594 if (HAVE_casesi)
5596 enum machine_mode index_mode = SImode;
5597 int index_bits = GET_MODE_BITSIZE (index_mode);
5598 rtx op1, op2;
5599 enum machine_mode op_mode;
5601 /* Convert the index to SImode. */
5602 if (GET_MODE_BITSIZE (TYPE_MODE (index_type))
5603 > GET_MODE_BITSIZE (index_mode))
5605 enum machine_mode omode = TYPE_MODE (index_type);
5606 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
5608 /* We must handle the endpoints in the original mode. */
5609 index_expr = build (MINUS_EXPR, index_type,
5610 index_expr, minval);
5611 minval = integer_zero_node;
5612 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5613 emit_cmp_insn (rangertx, index, LTU, NULL_RTX, omode, 1, 0);
5614 emit_jump_insn (gen_bltu (default_label));
5615 /* Now we can safely truncate. */
5616 index = convert_to_mode (index_mode, index, 0);
5618 else
5620 if (TYPE_MODE (index_type) != index_mode)
5622 index_expr = convert (type_for_size (index_bits, 0),
5623 index_expr);
5624 index_type = TREE_TYPE (index_expr);
5627 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5629 emit_queue ();
5630 index = protect_from_queue (index, 0);
5631 do_pending_stack_adjust ();
5633 op_mode = insn_operand_mode[(int)CODE_FOR_casesi][0];
5634 if (! (*insn_operand_predicate[(int)CODE_FOR_casesi][0])
5635 (index, op_mode))
5636 index = copy_to_mode_reg (op_mode, index);
5638 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
5640 op_mode = insn_operand_mode[(int)CODE_FOR_casesi][1];
5641 if (! (*insn_operand_predicate[(int)CODE_FOR_casesi][1])
5642 (op1, op_mode))
5643 op1 = copy_to_mode_reg (op_mode, op1);
5645 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
5647 op_mode = insn_operand_mode[(int)CODE_FOR_casesi][2];
5648 if (! (*insn_operand_predicate[(int)CODE_FOR_casesi][2])
5649 (op2, op_mode))
5650 op2 = copy_to_mode_reg (op_mode, op2);
5652 emit_jump_insn (gen_casesi (index, op1, op2,
5653 table_label, default_label));
5654 win = 1;
5656 #endif
5657 #ifdef HAVE_tablejump
5658 if (! win && HAVE_tablejump)
5660 index_expr = convert (thiscase->data.case_stmt.nominal_type,
5661 fold (build (MINUS_EXPR, index_type,
5662 index_expr, minval)));
5663 index_type = TREE_TYPE (index_expr);
5664 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5665 emit_queue ();
5666 index = protect_from_queue (index, 0);
5667 do_pending_stack_adjust ();
5669 do_tablejump (index, TYPE_MODE (index_type),
5670 expand_expr (range, NULL_RTX, VOIDmode, 0),
5671 table_label, default_label);
5672 win = 1;
5674 #endif
5675 if (! win)
5676 abort ();
5678 /* Get table of labels to jump to, in order of case index. */
5680 ncases = TREE_INT_CST_LOW (range) + 1;
5681 labelvec = (rtx *) alloca (ncases * sizeof (rtx));
5682 bzero ((char *) labelvec, ncases * sizeof (rtx));
5684 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5686 register HOST_WIDE_INT i
5687 = TREE_INT_CST_LOW (n->low) - TREE_INT_CST_LOW (orig_minval);
5689 while (1)
5691 labelvec[i]
5692 = gen_rtx (LABEL_REF, Pmode, label_rtx (n->code_label));
5693 if (i + TREE_INT_CST_LOW (orig_minval)
5694 == TREE_INT_CST_LOW (n->high))
5695 break;
5696 i++;
5700 /* Fill in the gaps with the default. */
5701 for (i = 0; i < ncases; i++)
5702 if (labelvec[i] == 0)
5703 labelvec[i] = gen_rtx (LABEL_REF, Pmode, default_label);
5705 /* Output the table */
5706 emit_label (table_label);
5708 if (CASE_VECTOR_PC_RELATIVE || flag_pic)
5709 emit_jump_insn (gen_rtx (ADDR_DIFF_VEC, CASE_VECTOR_MODE,
5710 gen_rtx (LABEL_REF, Pmode, table_label),
5711 gen_rtvec_v (ncases, labelvec)));
5712 else
5713 emit_jump_insn (gen_rtx (ADDR_VEC, CASE_VECTOR_MODE,
5714 gen_rtvec_v (ncases, labelvec)));
5716 /* If the case insn drops through the table,
5717 after the table we must jump to the default-label.
5718 Otherwise record no drop-through after the table. */
5719 #ifdef CASE_DROPS_THROUGH
5720 emit_jump (default_label);
5721 #else
5722 emit_barrier ();
5723 #endif
5726 before_case = squeeze_notes (NEXT_INSN (before_case), get_last_insn ());
5727 reorder_insns (before_case, get_last_insn (),
5728 thiscase->data.case_stmt.start);
5730 else
5731 end_cleanup_deferral ();
5733 if (thiscase->exit_label)
5734 emit_label (thiscase->exit_label);
5736 POPSTACK (case_stack);
5738 free_temp_slots ();
5741 /* Convert the tree NODE into a list linked by the right field, with the left
5742 field zeroed. RIGHT is used for recursion; it is a list to be placed
5743 rightmost in the resulting list. */
5745 static struct case_node *
5746 case_tree2list (node, right)
5747 struct case_node *node, *right;
5749 struct case_node *left;
5751 if (node->right)
5752 right = case_tree2list (node->right, right);
5754 node->right = right;
5755 if (left = node->left)
5757 node->left = 0;
5758 return case_tree2list (left, node);
5761 return node;
5764 /* Terminate a case statement. EXPR is the original index
5765 expression. */
5767 static void
5768 bc_expand_end_case (expr)
5769 tree expr;
5771 struct nesting *thiscase = case_stack;
5772 enum bytecode_opcode opcode;
5773 struct bc_label *jump_label;
5774 struct case_node *c;
5776 bc_emit_bytecode (jump);
5777 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (thiscase->exit_label));
5779 #ifdef DEBUG_PRINT_CODE
5780 fputc ('\n', stderr);
5781 #endif
5783 /* Now that the size of the jump table is known, emit the actual
5784 indexed jump instruction. */
5785 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thiscase->data.case_stmt.skip_label));
5787 opcode = TYPE_MODE (thiscase->data.case_stmt.nominal_type) == SImode
5788 ? TREE_UNSIGNED (thiscase->data.case_stmt.nominal_type) ? caseSU : caseSI
5789 : TREE_UNSIGNED (thiscase->data.case_stmt.nominal_type) ? caseDU : caseDI;
5791 bc_emit_bytecode (opcode);
5793 /* Now emit the case instructions literal arguments, in order.
5794 In addition to the value on the stack, it uses:
5795 1. The address of the jump table.
5796 2. The size of the jump table.
5797 3. The default label. */
5799 jump_label = bc_get_bytecode_label ();
5800 bc_emit_bytecode_labelref (jump_label);
5801 bc_emit_bytecode_const ((char *) &thiscase->data.case_stmt.num_ranges,
5802 sizeof thiscase->data.case_stmt.num_ranges);
5804 if (thiscase->data.case_stmt.default_label)
5805 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (DECL_RTL (thiscase->data.case_stmt.default_label)));
5806 else
5807 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (thiscase->exit_label));
5809 /* Output the jump table. */
5811 bc_align_bytecode (3 /* PTR_ALIGN */);
5812 bc_emit_bytecode_labeldef (jump_label);
5814 if (TYPE_MODE (thiscase->data.case_stmt.nominal_type) == SImode)
5815 for (c = thiscase->data.case_stmt.case_list->left; c; c = c->left)
5817 opcode = TREE_INT_CST_LOW (c->low);
5818 bc_emit_bytecode_const ((char *) &opcode, sizeof opcode);
5820 opcode = TREE_INT_CST_LOW (c->high);
5821 bc_emit_bytecode_const ((char *) &opcode, sizeof opcode);
5823 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (DECL_RTL (c->code_label)));
5825 else
5826 if (TYPE_MODE (thiscase->data.case_stmt.nominal_type) == DImode)
5827 for (c = thiscase->data.case_stmt.case_list->left; c; c = c->left)
5829 bc_emit_bytecode_DI_const (c->low);
5830 bc_emit_bytecode_DI_const (c->high);
5832 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (DECL_RTL (c->code_label)));
5834 else
5835 /* Bad mode */
5836 abort ();
5839 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thiscase->exit_label));
5841 /* Possibly issue enumeration warnings. */
5843 if (!thiscase->data.case_stmt.default_label
5844 && TREE_CODE (TREE_TYPE (expr)) == ENUMERAL_TYPE
5845 && TREE_CODE (expr) != INTEGER_CST
5846 && warn_switch)
5847 check_for_full_enumeration_handling (TREE_TYPE (expr));
5850 #ifdef DEBUG_PRINT_CODE
5851 fputc ('\n', stderr);
5852 #endif
5854 POPSTACK (case_stack);
5858 /* Return unique bytecode ID. */
5860 int
5861 bc_new_uid ()
5863 static int bc_uid = 0;
5865 return (++bc_uid);
5868 /* Generate code to jump to LABEL if OP1 and OP2 are equal. */
5870 static void
5871 do_jump_if_equal (op1, op2, label, unsignedp)
5872 rtx op1, op2, label;
5873 int unsignedp;
5875 if (GET_CODE (op1) == CONST_INT
5876 && GET_CODE (op2) == CONST_INT)
5878 if (INTVAL (op1) == INTVAL (op2))
5879 emit_jump (label);
5881 else
5883 enum machine_mode mode = GET_MODE (op1);
5884 if (mode == VOIDmode)
5885 mode = GET_MODE (op2);
5886 emit_cmp_insn (op1, op2, EQ, NULL_RTX, mode, unsignedp, 0);
5887 emit_jump_insn (gen_beq (label));
5891 /* Not all case values are encountered equally. This function
5892 uses a heuristic to weight case labels, in cases where that
5893 looks like a reasonable thing to do.
5895 Right now, all we try to guess is text, and we establish the
5896 following weights:
5898 chars above space: 16
5899 digits: 16
5900 default: 12
5901 space, punct: 8
5902 tab: 4
5903 newline: 2
5904 other "\" chars: 1
5905 remaining chars: 0
5907 If we find any cases in the switch that are not either -1 or in the range
5908 of valid ASCII characters, or are control characters other than those
5909 commonly used with "\", don't treat this switch scanning text.
5911 Return 1 if these nodes are suitable for cost estimation, otherwise
5912 return 0. */
5914 static int
5915 estimate_case_costs (node)
5916 case_node_ptr node;
5918 tree min_ascii = build_int_2 (-1, -1);
5919 tree max_ascii = convert (TREE_TYPE (node->high), build_int_2 (127, 0));
5920 case_node_ptr n;
5921 int i;
5923 /* If we haven't already made the cost table, make it now. Note that the
5924 lower bound of the table is -1, not zero. */
5926 if (cost_table == NULL)
5928 cost_table = ((short *) xmalloc (129 * sizeof (short))) + 1;
5929 bzero ((char *) (cost_table - 1), 129 * sizeof (short));
5931 for (i = 0; i < 128; i++)
5933 if (isalnum (i))
5934 cost_table[i] = 16;
5935 else if (ispunct (i))
5936 cost_table[i] = 8;
5937 else if (iscntrl (i))
5938 cost_table[i] = -1;
5941 cost_table[' '] = 8;
5942 cost_table['\t'] = 4;
5943 cost_table['\0'] = 4;
5944 cost_table['\n'] = 2;
5945 cost_table['\f'] = 1;
5946 cost_table['\v'] = 1;
5947 cost_table['\b'] = 1;
5950 /* See if all the case expressions look like text. It is text if the
5951 constant is >= -1 and the highest constant is <= 127. Do all comparisons
5952 as signed arithmetic since we don't want to ever access cost_table with a
5953 value less than -1. Also check that none of the constants in a range
5954 are strange control characters. */
5956 for (n = node; n; n = n->right)
5958 if ((INT_CST_LT (n->low, min_ascii)) || INT_CST_LT (max_ascii, n->high))
5959 return 0;
5961 for (i = TREE_INT_CST_LOW (n->low); i <= TREE_INT_CST_LOW (n->high); i++)
5962 if (cost_table[i] < 0)
5963 return 0;
5966 /* All interesting values are within the range of interesting
5967 ASCII characters. */
5968 return 1;
5971 /* Scan an ordered list of case nodes
5972 combining those with consecutive values or ranges.
5974 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
5976 static void
5977 group_case_nodes (head)
5978 case_node_ptr head;
5980 case_node_ptr node = head;
5982 while (node)
5984 rtx lb = next_real_insn (label_rtx (node->code_label));
5985 rtx lb2;
5986 case_node_ptr np = node;
5988 /* Try to group the successors of NODE with NODE. */
5989 while (((np = np->right) != 0)
5990 /* Do they jump to the same place? */
5991 && ((lb2 = next_real_insn (label_rtx (np->code_label))) == lb
5992 || (lb != 0 && lb2 != 0
5993 && simplejump_p (lb)
5994 && simplejump_p (lb2)
5995 && rtx_equal_p (SET_SRC (PATTERN (lb)),
5996 SET_SRC (PATTERN (lb2)))))
5997 /* Are their ranges consecutive? */
5998 && tree_int_cst_equal (np->low,
5999 fold (build (PLUS_EXPR,
6000 TREE_TYPE (node->high),
6001 node->high,
6002 integer_one_node)))
6003 /* An overflow is not consecutive. */
6004 && tree_int_cst_lt (node->high,
6005 fold (build (PLUS_EXPR,
6006 TREE_TYPE (node->high),
6007 node->high,
6008 integer_one_node))))
6010 node->high = np->high;
6012 /* NP is the first node after NODE which can't be grouped with it.
6013 Delete the nodes in between, and move on to that node. */
6014 node->right = np;
6015 node = np;
6019 /* Take an ordered list of case nodes
6020 and transform them into a near optimal binary tree,
6021 on the assumption that any target code selection value is as
6022 likely as any other.
6024 The transformation is performed by splitting the ordered
6025 list into two equal sections plus a pivot. The parts are
6026 then attached to the pivot as left and right branches. Each
6027 branch is is then transformed recursively. */
6029 static void
6030 balance_case_nodes (head, parent)
6031 case_node_ptr *head;
6032 case_node_ptr parent;
6034 register case_node_ptr np;
6036 np = *head;
6037 if (np)
6039 int cost = 0;
6040 int i = 0;
6041 int ranges = 0;
6042 register case_node_ptr *npp;
6043 case_node_ptr left;
6045 /* Count the number of entries on branch. Also count the ranges. */
6047 while (np)
6049 if (!tree_int_cst_equal (np->low, np->high))
6051 ranges++;
6052 if (use_cost_table)
6053 cost += cost_table[TREE_INT_CST_LOW (np->high)];
6056 if (use_cost_table)
6057 cost += cost_table[TREE_INT_CST_LOW (np->low)];
6059 i++;
6060 np = np->right;
6063 if (i > 2)
6065 /* Split this list if it is long enough for that to help. */
6066 npp = head;
6067 left = *npp;
6068 if (use_cost_table)
6070 /* Find the place in the list that bisects the list's total cost,
6071 Here I gets half the total cost. */
6072 int n_moved = 0;
6073 i = (cost + 1) / 2;
6074 while (1)
6076 /* Skip nodes while their cost does not reach that amount. */
6077 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
6078 i -= cost_table[TREE_INT_CST_LOW ((*npp)->high)];
6079 i -= cost_table[TREE_INT_CST_LOW ((*npp)->low)];
6080 if (i <= 0)
6081 break;
6082 npp = &(*npp)->right;
6083 n_moved += 1;
6085 if (n_moved == 0)
6087 /* Leave this branch lopsided, but optimize left-hand
6088 side and fill in `parent' fields for right-hand side. */
6089 np = *head;
6090 np->parent = parent;
6091 balance_case_nodes (&np->left, np);
6092 for (; np->right; np = np->right)
6093 np->right->parent = np;
6094 return;
6097 /* If there are just three nodes, split at the middle one. */
6098 else if (i == 3)
6099 npp = &(*npp)->right;
6100 else
6102 /* Find the place in the list that bisects the list's total cost,
6103 where ranges count as 2.
6104 Here I gets half the total cost. */
6105 i = (i + ranges + 1) / 2;
6106 while (1)
6108 /* Skip nodes while their cost does not reach that amount. */
6109 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
6110 i--;
6111 i--;
6112 if (i <= 0)
6113 break;
6114 npp = &(*npp)->right;
6117 *head = np = *npp;
6118 *npp = 0;
6119 np->parent = parent;
6120 np->left = left;
6122 /* Optimize each of the two split parts. */
6123 balance_case_nodes (&np->left, np);
6124 balance_case_nodes (&np->right, np);
6126 else
6128 /* Else leave this branch as one level,
6129 but fill in `parent' fields. */
6130 np = *head;
6131 np->parent = parent;
6132 for (; np->right; np = np->right)
6133 np->right->parent = np;
6138 /* Search the parent sections of the case node tree
6139 to see if a test for the lower bound of NODE would be redundant.
6140 INDEX_TYPE is the type of the index expression.
6142 The instructions to generate the case decision tree are
6143 output in the same order as nodes are processed so it is
6144 known that if a parent node checks the range of the current
6145 node minus one that the current node is bounded at its lower
6146 span. Thus the test would be redundant. */
6148 static int
6149 node_has_low_bound (node, index_type)
6150 case_node_ptr node;
6151 tree index_type;
6153 tree low_minus_one;
6154 case_node_ptr pnode;
6156 /* If the lower bound of this node is the lowest value in the index type,
6157 we need not test it. */
6159 if (tree_int_cst_equal (node->low, TYPE_MIN_VALUE (index_type)))
6160 return 1;
6162 /* If this node has a left branch, the value at the left must be less
6163 than that at this node, so it cannot be bounded at the bottom and
6164 we need not bother testing any further. */
6166 if (node->left)
6167 return 0;
6169 low_minus_one = fold (build (MINUS_EXPR, TREE_TYPE (node->low),
6170 node->low, integer_one_node));
6172 /* If the subtraction above overflowed, we can't verify anything.
6173 Otherwise, look for a parent that tests our value - 1. */
6175 if (! tree_int_cst_lt (low_minus_one, node->low))
6176 return 0;
6178 for (pnode = node->parent; pnode; pnode = pnode->parent)
6179 if (tree_int_cst_equal (low_minus_one, pnode->high))
6180 return 1;
6182 return 0;
6185 /* Search the parent sections of the case node tree
6186 to see if a test for the upper bound of NODE would be redundant.
6187 INDEX_TYPE is the type of the index expression.
6189 The instructions to generate the case decision tree are
6190 output in the same order as nodes are processed so it is
6191 known that if a parent node checks the range of the current
6192 node plus one that the current node is bounded at its upper
6193 span. Thus the test would be redundant. */
6195 static int
6196 node_has_high_bound (node, index_type)
6197 case_node_ptr node;
6198 tree index_type;
6200 tree high_plus_one;
6201 case_node_ptr pnode;
6203 /* If there is no upper bound, obviously no test is needed. */
6205 if (TYPE_MAX_VALUE (index_type) == NULL)
6206 return 1;
6208 /* If the upper bound of this node is the highest value in the type
6209 of the index expression, we need not test against it. */
6211 if (tree_int_cst_equal (node->high, TYPE_MAX_VALUE (index_type)))
6212 return 1;
6214 /* If this node has a right branch, the value at the right must be greater
6215 than that at this node, so it cannot be bounded at the top and
6216 we need not bother testing any further. */
6218 if (node->right)
6219 return 0;
6221 high_plus_one = fold (build (PLUS_EXPR, TREE_TYPE (node->high),
6222 node->high, integer_one_node));
6224 /* If the addition above overflowed, we can't verify anything.
6225 Otherwise, look for a parent that tests our value + 1. */
6227 if (! tree_int_cst_lt (node->high, high_plus_one))
6228 return 0;
6230 for (pnode = node->parent; pnode; pnode = pnode->parent)
6231 if (tree_int_cst_equal (high_plus_one, pnode->low))
6232 return 1;
6234 return 0;
6237 /* Search the parent sections of the
6238 case node tree to see if both tests for the upper and lower
6239 bounds of NODE would be redundant. */
6241 static int
6242 node_is_bounded (node, index_type)
6243 case_node_ptr node;
6244 tree index_type;
6246 return (node_has_low_bound (node, index_type)
6247 && node_has_high_bound (node, index_type));
6250 /* Emit an unconditional jump to LABEL unless it would be dead code. */
6252 static void
6253 emit_jump_if_reachable (label)
6254 rtx label;
6256 if (GET_CODE (get_last_insn ()) != BARRIER)
6257 emit_jump (label);
6260 /* Emit step-by-step code to select a case for the value of INDEX.
6261 The thus generated decision tree follows the form of the
6262 case-node binary tree NODE, whose nodes represent test conditions.
6263 INDEX_TYPE is the type of the index of the switch.
6265 Care is taken to prune redundant tests from the decision tree
6266 by detecting any boundary conditions already checked by
6267 emitted rtx. (See node_has_high_bound, node_has_low_bound
6268 and node_is_bounded, above.)
6270 Where the test conditions can be shown to be redundant we emit
6271 an unconditional jump to the target code. As a further
6272 optimization, the subordinates of a tree node are examined to
6273 check for bounded nodes. In this case conditional and/or
6274 unconditional jumps as a result of the boundary check for the
6275 current node are arranged to target the subordinates associated
6276 code for out of bound conditions on the current node node.
6278 We can assume that when control reaches the code generated here,
6279 the index value has already been compared with the parents
6280 of this node, and determined to be on the same side of each parent
6281 as this node is. Thus, if this node tests for the value 51,
6282 and a parent tested for 52, we don't need to consider
6283 the possibility of a value greater than 51. If another parent
6284 tests for the value 50, then this node need not test anything. */
6286 static void
6287 emit_case_nodes (index, node, default_label, index_type)
6288 rtx index;
6289 case_node_ptr node;
6290 rtx default_label;
6291 tree index_type;
6293 /* If INDEX has an unsigned type, we must make unsigned branches. */
6294 int unsignedp = TREE_UNSIGNED (index_type);
6295 typedef rtx rtx_function ();
6296 rtx_function *gen_bgt_pat = unsignedp ? gen_bgtu : gen_bgt;
6297 rtx_function *gen_bge_pat = unsignedp ? gen_bgeu : gen_bge;
6298 rtx_function *gen_blt_pat = unsignedp ? gen_bltu : gen_blt;
6299 rtx_function *gen_ble_pat = unsignedp ? gen_bleu : gen_ble;
6300 enum machine_mode mode = GET_MODE (index);
6302 /* See if our parents have already tested everything for us.
6303 If they have, emit an unconditional jump for this node. */
6304 if (node_is_bounded (node, index_type))
6305 emit_jump (label_rtx (node->code_label));
6307 else if (tree_int_cst_equal (node->low, node->high))
6309 /* Node is single valued. First see if the index expression matches
6310 this node and then check our children, if any. */
6312 do_jump_if_equal (index, expand_expr (node->low, NULL_RTX, VOIDmode, 0),
6313 label_rtx (node->code_label), unsignedp);
6315 if (node->right != 0 && node->left != 0)
6317 /* This node has children on both sides.
6318 Dispatch to one side or the other
6319 by comparing the index value with this node's value.
6320 If one subtree is bounded, check that one first,
6321 so we can avoid real branches in the tree. */
6323 if (node_is_bounded (node->right, index_type))
6325 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
6326 VOIDmode, 0),
6327 GT, NULL_RTX, mode, unsignedp, 0);
6329 emit_jump_insn ((*gen_bgt_pat) (label_rtx (node->right->code_label)));
6330 emit_case_nodes (index, node->left, default_label, index_type);
6333 else if (node_is_bounded (node->left, index_type))
6335 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
6336 VOIDmode, 0),
6337 LT, NULL_RTX, mode, unsignedp, 0);
6338 emit_jump_insn ((*gen_blt_pat) (label_rtx (node->left->code_label)));
6339 emit_case_nodes (index, node->right, default_label, index_type);
6342 else
6344 /* Neither node is bounded. First distinguish the two sides;
6345 then emit the code for one side at a time. */
6347 tree test_label
6348 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
6350 /* See if the value is on the right. */
6351 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
6352 VOIDmode, 0),
6353 GT, NULL_RTX, mode, unsignedp, 0);
6354 emit_jump_insn ((*gen_bgt_pat) (label_rtx (test_label)));
6356 /* Value must be on the left.
6357 Handle the left-hand subtree. */
6358 emit_case_nodes (index, node->left, default_label, index_type);
6359 /* If left-hand subtree does nothing,
6360 go to default. */
6361 emit_jump_if_reachable (default_label);
6363 /* Code branches here for the right-hand subtree. */
6364 expand_label (test_label);
6365 emit_case_nodes (index, node->right, default_label, index_type);
6369 else if (node->right != 0 && node->left == 0)
6371 /* Here we have a right child but no left so we issue conditional
6372 branch to default and process the right child.
6374 Omit the conditional branch to default if we it avoid only one
6375 right child; it costs too much space to save so little time. */
6377 if (node->right->right || node->right->left
6378 || !tree_int_cst_equal (node->right->low, node->right->high))
6380 if (!node_has_low_bound (node, index_type))
6382 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
6383 VOIDmode, 0),
6384 LT, NULL_RTX, mode, unsignedp, 0);
6385 emit_jump_insn ((*gen_blt_pat) (default_label));
6388 emit_case_nodes (index, node->right, default_label, index_type);
6390 else
6391 /* We cannot process node->right normally
6392 since we haven't ruled out the numbers less than
6393 this node's value. So handle node->right explicitly. */
6394 do_jump_if_equal (index,
6395 expand_expr (node->right->low, NULL_RTX,
6396 VOIDmode, 0),
6397 label_rtx (node->right->code_label), unsignedp);
6400 else if (node->right == 0 && node->left != 0)
6402 /* Just one subtree, on the left. */
6404 #if 0 /* The following code and comment were formerly part
6405 of the condition here, but they didn't work
6406 and I don't understand what the idea was. -- rms. */
6407 /* If our "most probable entry" is less probable
6408 than the default label, emit a jump to
6409 the default label using condition codes
6410 already lying around. With no right branch,
6411 a branch-greater-than will get us to the default
6412 label correctly. */
6413 if (use_cost_table
6414 && cost_table[TREE_INT_CST_LOW (node->high)] < 12)
6416 #endif /* 0 */
6417 if (node->left->left || node->left->right
6418 || !tree_int_cst_equal (node->left->low, node->left->high))
6420 if (!node_has_high_bound (node, index_type))
6422 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
6423 VOIDmode, 0),
6424 GT, NULL_RTX, mode, unsignedp, 0);
6425 emit_jump_insn ((*gen_bgt_pat) (default_label));
6428 emit_case_nodes (index, node->left, default_label, index_type);
6430 else
6431 /* We cannot process node->left normally
6432 since we haven't ruled out the numbers less than
6433 this node's value. So handle node->left explicitly. */
6434 do_jump_if_equal (index,
6435 expand_expr (node->left->low, NULL_RTX,
6436 VOIDmode, 0),
6437 label_rtx (node->left->code_label), unsignedp);
6440 else
6442 /* Node is a range. These cases are very similar to those for a single
6443 value, except that we do not start by testing whether this node
6444 is the one to branch to. */
6446 if (node->right != 0 && node->left != 0)
6448 /* Node has subtrees on both sides.
6449 If the right-hand subtree is bounded,
6450 test for it first, since we can go straight there.
6451 Otherwise, we need to make a branch in the control structure,
6452 then handle the two subtrees. */
6453 tree test_label = 0;
6455 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
6456 VOIDmode, 0),
6457 GT, NULL_RTX, mode, unsignedp, 0);
6459 if (node_is_bounded (node->right, index_type))
6460 /* Right hand node is fully bounded so we can eliminate any
6461 testing and branch directly to the target code. */
6462 emit_jump_insn ((*gen_bgt_pat) (label_rtx (node->right->code_label)));
6463 else
6465 /* Right hand node requires testing.
6466 Branch to a label where we will handle it later. */
6468 test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
6469 emit_jump_insn ((*gen_bgt_pat) (label_rtx (test_label)));
6472 /* Value belongs to this node or to the left-hand subtree. */
6474 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX, VOIDmode, 0),
6475 GE, NULL_RTX, mode, unsignedp, 0);
6476 emit_jump_insn ((*gen_bge_pat) (label_rtx (node->code_label)));
6478 /* Handle the left-hand subtree. */
6479 emit_case_nodes (index, node->left, default_label, index_type);
6481 /* If right node had to be handled later, do that now. */
6483 if (test_label)
6485 /* If the left-hand subtree fell through,
6486 don't let it fall into the right-hand subtree. */
6487 emit_jump_if_reachable (default_label);
6489 expand_label (test_label);
6490 emit_case_nodes (index, node->right, default_label, index_type);
6494 else if (node->right != 0 && node->left == 0)
6496 /* Deal with values to the left of this node,
6497 if they are possible. */
6498 if (!node_has_low_bound (node, index_type))
6500 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX,
6501 VOIDmode, 0),
6502 LT, NULL_RTX, mode, unsignedp, 0);
6503 emit_jump_insn ((*gen_blt_pat) (default_label));
6506 /* Value belongs to this node or to the right-hand subtree. */
6508 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
6509 VOIDmode, 0),
6510 LE, NULL_RTX, mode, unsignedp, 0);
6511 emit_jump_insn ((*gen_ble_pat) (label_rtx (node->code_label)));
6513 emit_case_nodes (index, node->right, default_label, index_type);
6516 else if (node->right == 0 && node->left != 0)
6518 /* Deal with values to the right of this node,
6519 if they are possible. */
6520 if (!node_has_high_bound (node, index_type))
6522 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
6523 VOIDmode, 0),
6524 GT, NULL_RTX, mode, unsignedp, 0);
6525 emit_jump_insn ((*gen_bgt_pat) (default_label));
6528 /* Value belongs to this node or to the left-hand subtree. */
6530 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX, VOIDmode, 0),
6531 GE, NULL_RTX, mode, unsignedp, 0);
6532 emit_jump_insn ((*gen_bge_pat) (label_rtx (node->code_label)));
6534 emit_case_nodes (index, node->left, default_label, index_type);
6537 else
6539 /* Node has no children so we check low and high bounds to remove
6540 redundant tests. Only one of the bounds can exist,
6541 since otherwise this node is bounded--a case tested already. */
6543 if (!node_has_high_bound (node, index_type))
6545 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
6546 VOIDmode, 0),
6547 GT, NULL_RTX, mode, unsignedp, 0);
6548 emit_jump_insn ((*gen_bgt_pat) (default_label));
6551 if (!node_has_low_bound (node, index_type))
6553 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX,
6554 VOIDmode, 0),
6555 LT, NULL_RTX, mode, unsignedp, 0);
6556 emit_jump_insn ((*gen_blt_pat) (default_label));
6559 emit_jump (label_rtx (node->code_label));
6564 /* These routines are used by the loop unrolling code. They copy BLOCK trees
6565 so that the debugging info will be correct for the unrolled loop. */
6567 /* Indexed by block number, contains a pointer to the N'th block node. */
6569 static tree *block_vector;
6571 void
6572 find_loop_tree_blocks ()
6574 tree block = DECL_INITIAL (current_function_decl);
6576 block_vector = identify_blocks (block, get_insns ());
6579 void
6580 unroll_block_trees ()
6582 tree block = DECL_INITIAL (current_function_decl);
6584 reorder_blocks (block_vector, block, get_insns ());