(*zeroextract[qs]i_compare0_scratch): Use const_int_operand
[official-gcc.git] / gcc / stmt.c
blobec6727ae5a9b1c859ed5fa730ffc6cc3942a0aa4
1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 88, 89, 92-5, 1996 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
22 /* This file handles the generation of rtl code from tree structure
23 above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
24 It also creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
27 The functions whose names start with `expand_' are called by the
28 parser to generate RTL instructions for various kinds of constructs.
30 Some control and binding constructs require calling several such
31 functions at different times. For example, a simple if-then
32 is expanded by calling `expand_start_cond' (with the condition-expression
33 as argument) before parsing the then-clause and calling `expand_end_cond'
34 after parsing the then-clause. */
36 #include "config.h"
38 #include <stdio.h>
39 #include <ctype.h>
41 #include "rtl.h"
42 #include "tree.h"
43 #include "flags.h"
44 #include "function.h"
45 #include "insn-flags.h"
46 #include "insn-config.h"
47 #include "insn-codes.h"
48 #include "expr.h"
49 #include "hard-reg-set.h"
50 #include "obstack.h"
51 #include "loop.h"
52 #include "recog.h"
53 #include "machmode.h"
55 #include "bytecode.h"
56 #include "bc-typecd.h"
57 #include "bc-opcode.h"
58 #include "bc-optab.h"
59 #include "bc-emit.h"
61 #define obstack_chunk_alloc xmalloc
62 #define obstack_chunk_free free
63 struct obstack stmt_obstack;
65 /* Filename and line number of last line-number note,
66 whether we actually emitted it or not. */
67 char *emit_filename;
68 int emit_lineno;
70 /* Nonzero if within a ({...}) grouping, in which case we must
71 always compute a value for each expr-stmt in case it is the last one. */
73 int expr_stmts_for_value;
75 /* Each time we expand an expression-statement,
76 record the expr's type and its RTL value here. */
78 static tree last_expr_type;
79 static rtx last_expr_value;
81 /* Each time we expand the end of a binding contour (in `expand_end_bindings')
82 and we emit a new NOTE_INSN_BLOCK_END note, we save a pointer to it here.
83 This is used by the `remember_end_note' function to record the endpoint
84 of each generated block in its associated BLOCK node. */
86 static rtx last_block_end_note;
88 /* Number of binding contours started so far in this function. */
90 int block_start_count;
92 /* Nonzero if function being compiled needs to
93 return the address of where it has put a structure value. */
95 extern int current_function_returns_pcc_struct;
97 /* Label that will go on parm cleanup code, if any.
98 Jumping to this label runs cleanup code for parameters, if
99 such code must be run. Following this code is the logical return label. */
101 extern rtx cleanup_label;
103 /* Label that will go on function epilogue.
104 Jumping to this label serves as a "return" instruction
105 on machines which require execution of the epilogue on all returns. */
107 extern rtx return_label;
109 /* Offset to end of allocated area of stack frame.
110 If stack grows down, this is the address of the last stack slot allocated.
111 If stack grows up, this is the address for the next slot. */
112 extern int frame_offset;
114 /* Label to jump back to for tail recursion, or 0 if we have
115 not yet needed one for this function. */
116 extern rtx tail_recursion_label;
118 /* Place after which to insert the tail_recursion_label if we need one. */
119 extern rtx tail_recursion_reentry;
121 /* Location at which to save the argument pointer if it will need to be
122 referenced. There are two cases where this is done: if nonlocal gotos
123 exist, or if vars whose is an offset from the argument pointer will be
124 needed by inner routines. */
126 extern rtx arg_pointer_save_area;
128 /* Chain of all RTL_EXPRs that have insns in them. */
129 extern tree rtl_expr_chain;
131 #if 0 /* Turned off because 0 seems to work just as well. */
132 /* Cleanup lists are required for binding levels regardless of whether
133 that binding level has cleanups or not. This node serves as the
134 cleanup list whenever an empty list is required. */
135 static tree empty_cleanup_list;
136 #endif
138 extern void (*interim_eh_hook) PROTO((tree));
140 /* Functions and data structures for expanding case statements. */
142 /* Case label structure, used to hold info on labels within case
143 statements. We handle "range" labels; for a single-value label
144 as in C, the high and low limits are the same.
146 A chain of case nodes is initially maintained via the RIGHT fields
147 in the nodes. Nodes with higher case values are later in the list.
149 Switch statements can be output in one of two forms. A branch table
150 is used if there are more than a few labels and the labels are dense
151 within the range between the smallest and largest case value. If a
152 branch table is used, no further manipulations are done with the case
153 node chain.
155 The alternative to the use of a branch table is to generate a series
156 of compare and jump insns. When that is done, we use the LEFT, RIGHT,
157 and PARENT fields to hold a binary tree. Initially the tree is
158 totally unbalanced, with everything on the right. We balance the tree
159 with nodes on the left having lower case values than the parent
160 and nodes on the right having higher values. We then output the tree
161 in order. */
163 struct case_node
165 struct case_node *left; /* Left son in binary tree */
166 struct case_node *right; /* Right son in binary tree; also node chain */
167 struct case_node *parent; /* Parent of node in binary tree */
168 tree low; /* Lowest index value for this label */
169 tree high; /* Highest index value for this label */
170 tree code_label; /* Label to jump to when node matches */
173 typedef struct case_node case_node;
174 typedef struct case_node *case_node_ptr;
176 /* These are used by estimate_case_costs and balance_case_nodes. */
178 /* This must be a signed type, and non-ANSI compilers lack signed char. */
179 static short *cost_table;
180 static int use_cost_table;
182 /* Stack of control and binding constructs we are currently inside.
184 These constructs begin when you call `expand_start_WHATEVER'
185 and end when you call `expand_end_WHATEVER'. This stack records
186 info about how the construct began that tells the end-function
187 what to do. It also may provide information about the construct
188 to alter the behavior of other constructs within the body.
189 For example, they may affect the behavior of C `break' and `continue'.
191 Each construct gets one `struct nesting' object.
192 All of these objects are chained through the `all' field.
193 `nesting_stack' points to the first object (innermost construct).
194 The position of an entry on `nesting_stack' is in its `depth' field.
196 Each type of construct has its own individual stack.
197 For example, loops have `loop_stack'. Each object points to the
198 next object of the same type through the `next' field.
200 Some constructs are visible to `break' exit-statements and others
201 are not. Which constructs are visible depends on the language.
202 Therefore, the data structure allows each construct to be visible
203 or not, according to the args given when the construct is started.
204 The construct is visible if the `exit_label' field is non-null.
205 In that case, the value should be a CODE_LABEL rtx. */
207 struct nesting
209 struct nesting *all;
210 struct nesting *next;
211 int depth;
212 rtx exit_label;
213 union
215 /* For conds (if-then and if-then-else statements). */
216 struct
218 /* Label for the end of the if construct.
219 There is none if EXITFLAG was not set
220 and no `else' has been seen yet. */
221 rtx endif_label;
222 /* Label for the end of this alternative.
223 This may be the end of the if or the next else/elseif. */
224 rtx next_label;
225 } cond;
226 /* For loops. */
227 struct
229 /* Label at the top of the loop; place to loop back to. */
230 rtx start_label;
231 /* Label at the end of the whole construct. */
232 rtx end_label;
233 /* Label before a jump that branches to the end of the whole
234 construct. This is where destructors go if any. */
235 rtx alt_end_label;
236 /* Label for `continue' statement to jump to;
237 this is in front of the stepper of the loop. */
238 rtx continue_label;
239 } loop;
240 /* For variable binding contours. */
241 struct
243 /* Sequence number of this binding contour within the function,
244 in order of entry. */
245 int block_start_count;
246 /* Nonzero => value to restore stack to on exit. Complemented by
247 bc_stack_level (see below) when generating bytecodes. */
248 rtx stack_level;
249 /* The NOTE that starts this contour.
250 Used by expand_goto to check whether the destination
251 is within each contour or not. */
252 rtx first_insn;
253 /* Innermost containing binding contour that has a stack level. */
254 struct nesting *innermost_stack_block;
255 /* List of cleanups to be run on exit from this contour.
256 This is a list of expressions to be evaluated.
257 The TREE_PURPOSE of each link is the ..._DECL node
258 which the cleanup pertains to. */
259 tree cleanups;
260 /* List of cleanup-lists of blocks containing this block,
261 as they were at the locus where this block appears.
262 There is an element for each containing block,
263 ordered innermost containing block first.
264 The tail of this list can be 0 (was empty_cleanup_list),
265 if all remaining elements would be empty lists.
266 The element's TREE_VALUE is the cleanup-list of that block,
267 which may be null. */
268 tree outer_cleanups;
269 /* Chain of labels defined inside this binding contour.
270 For contours that have stack levels or cleanups. */
271 struct label_chain *label_chain;
272 /* Number of function calls seen, as of start of this block. */
273 int function_call_count;
274 /* Bytecode specific: stack level to restore stack to on exit. */
275 int bc_stack_level;
276 } block;
277 /* For switch (C) or case (Pascal) statements,
278 and also for dummies (see `expand_start_case_dummy'). */
279 struct
281 /* The insn after which the case dispatch should finally
282 be emitted. Zero for a dummy. */
283 rtx start;
284 /* For bytecodes, the case table is in-lined right in the code.
285 A label is needed for skipping over this block. It is only
286 used when generating bytecodes. */
287 rtx skip_label;
288 /* A list of case labels, kept in ascending order by value
289 as the list is built.
290 During expand_end_case, this list may be rearranged into a
291 nearly balanced binary tree. */
292 struct case_node *case_list;
293 /* Label to jump to if no case matches. */
294 tree default_label;
295 /* The expression to be dispatched on. */
296 tree index_expr;
297 /* Type that INDEX_EXPR should be converted to. */
298 tree nominal_type;
299 /* Number of range exprs in case statement. */
300 int num_ranges;
301 /* Name of this kind of statement, for warnings. */
302 char *printname;
303 /* Nonzero if a case label has been seen in this case stmt. */
304 char seenlabel;
305 } case_stmt;
306 } data;
309 /* Chain of all pending binding contours. */
310 struct nesting *block_stack;
312 /* If any new stacks are added here, add them to POPSTACKS too. */
314 /* Chain of all pending binding contours that restore stack levels
315 or have cleanups. */
316 struct nesting *stack_block_stack;
318 /* Chain of all pending conditional statements. */
319 struct nesting *cond_stack;
321 /* Chain of all pending loops. */
322 struct nesting *loop_stack;
324 /* Chain of all pending case or switch statements. */
325 struct nesting *case_stack;
327 /* Separate chain including all of the above,
328 chained through the `all' field. */
329 struct nesting *nesting_stack;
331 /* Number of entries on nesting_stack now. */
332 int nesting_depth;
334 /* Allocate and return a new `struct nesting'. */
336 #define ALLOC_NESTING() \
337 (struct nesting *) obstack_alloc (&stmt_obstack, sizeof (struct nesting))
339 /* Pop the nesting stack element by element until we pop off
340 the element which is at the top of STACK.
341 Update all the other stacks, popping off elements from them
342 as we pop them from nesting_stack. */
344 #define POPSTACK(STACK) \
345 do { struct nesting *target = STACK; \
346 struct nesting *this; \
347 do { this = nesting_stack; \
348 if (loop_stack == this) \
349 loop_stack = loop_stack->next; \
350 if (cond_stack == this) \
351 cond_stack = cond_stack->next; \
352 if (block_stack == this) \
353 block_stack = block_stack->next; \
354 if (stack_block_stack == this) \
355 stack_block_stack = stack_block_stack->next; \
356 if (case_stack == this) \
357 case_stack = case_stack->next; \
358 nesting_depth = nesting_stack->depth - 1; \
359 nesting_stack = this->all; \
360 obstack_free (&stmt_obstack, this); } \
361 while (this != target); } while (0)
363 /* In some cases it is impossible to generate code for a forward goto
364 until the label definition is seen. This happens when it may be necessary
365 for the goto to reset the stack pointer: we don't yet know how to do that.
366 So expand_goto puts an entry on this fixup list.
367 Each time a binding contour that resets the stack is exited,
368 we check each fixup.
369 If the target label has now been defined, we can insert the proper code. */
371 struct goto_fixup
373 /* Points to following fixup. */
374 struct goto_fixup *next;
375 /* Points to the insn before the jump insn.
376 If more code must be inserted, it goes after this insn. */
377 rtx before_jump;
378 /* The LABEL_DECL that this jump is jumping to, or 0
379 for break, continue or return. */
380 tree target;
381 /* The BLOCK for the place where this goto was found. */
382 tree context;
383 /* The CODE_LABEL rtx that this is jumping to. */
384 rtx target_rtl;
385 /* Number of binding contours started in current function
386 before the label reference. */
387 int block_start_count;
388 /* The outermost stack level that should be restored for this jump.
389 Each time a binding contour that resets the stack is exited,
390 if the target label is *not* yet defined, this slot is updated. */
391 rtx stack_level;
392 /* List of lists of cleanup expressions to be run by this goto.
393 There is one element for each block that this goto is within.
394 The tail of this list can be 0 (was empty_cleanup_list),
395 if all remaining elements would be empty.
396 The TREE_VALUE contains the cleanup list of that block as of the
397 time this goto was seen.
398 The TREE_ADDRESSABLE flag is 1 for a block that has been exited. */
399 tree cleanup_list_list;
401 /* Bytecode specific members follow */
403 /* The label that this jump is jumping to, or 0 for break, continue
404 or return. */
405 struct bc_label *bc_target;
407 /* The label we use for the fixup patch */
408 struct bc_label *label;
410 /* True (non-0) if fixup has been handled */
411 int bc_handled:1;
413 /* Like stack_level above, except refers to the interpreter stack */
414 int bc_stack_level;
417 static struct goto_fixup *goto_fixup_chain;
419 /* Within any binding contour that must restore a stack level,
420 all labels are recorded with a chain of these structures. */
422 struct label_chain
424 /* Points to following fixup. */
425 struct label_chain *next;
426 tree label;
428 static void expand_goto_internal PROTO((tree, rtx, rtx));
429 static void bc_expand_goto_internal PROTO((enum bytecode_opcode,
430 struct bc_label *, tree));
431 static int expand_fixup PROTO((tree, rtx, rtx));
432 static void bc_expand_fixup PROTO((enum bytecode_opcode,
433 struct bc_label *, int));
434 static void fixup_gotos PROTO((struct nesting *, rtx, tree,
435 rtx, int));
436 static void bc_fixup_gotos PROTO((struct nesting *, int, tree,
437 rtx, int));
438 static void bc_expand_start_cond PROTO((tree, int));
439 static void bc_expand_end_cond PROTO((void));
440 static void bc_expand_start_else PROTO((void));
441 static void bc_expand_end_loop PROTO((void));
442 static void bc_expand_end_bindings PROTO((tree, int, int));
443 static void bc_expand_decl PROTO((tree, tree));
444 static void bc_expand_variable_local_init PROTO((tree));
445 static void bc_expand_decl_init PROTO((tree));
446 static void expand_null_return_1 PROTO((rtx, int));
447 static void expand_value_return PROTO((rtx));
448 static int tail_recursion_args PROTO((tree, tree));
449 static void expand_cleanups PROTO((tree, tree, int, int));
450 static void bc_expand_start_case PROTO((struct nesting *, tree,
451 tree, char *));
452 static int bc_pushcase PROTO((tree, tree));
453 static void bc_check_for_full_enumeration_handling PROTO((tree));
454 static void bc_expand_end_case PROTO((tree));
455 static void do_jump_if_equal PROTO((rtx, rtx, rtx, int));
456 static int estimate_case_costs PROTO((case_node_ptr));
457 static void group_case_nodes PROTO((case_node_ptr));
458 static void balance_case_nodes PROTO((case_node_ptr *,
459 case_node_ptr));
460 static int node_has_low_bound PROTO((case_node_ptr, tree));
461 static int node_has_high_bound PROTO((case_node_ptr, tree));
462 static int node_is_bounded PROTO((case_node_ptr, tree));
463 static void emit_jump_if_reachable PROTO((rtx));
464 static void emit_case_nodes PROTO((rtx, case_node_ptr, rtx, tree));
466 extern rtx bc_allocate_local ();
467 extern rtx bc_allocate_variable_array ();
469 void
470 init_stmt ()
472 gcc_obstack_init (&stmt_obstack);
473 #if 0
474 empty_cleanup_list = build_tree_list (NULL_TREE, NULL_TREE);
475 #endif
478 void
479 init_stmt_for_function ()
481 /* We are not currently within any block, conditional, loop or case. */
482 block_stack = 0;
483 stack_block_stack = 0;
484 loop_stack = 0;
485 case_stack = 0;
486 cond_stack = 0;
487 nesting_stack = 0;
488 nesting_depth = 0;
490 block_start_count = 0;
492 /* No gotos have been expanded yet. */
493 goto_fixup_chain = 0;
495 /* We are not processing a ({...}) grouping. */
496 expr_stmts_for_value = 0;
497 last_expr_type = 0;
500 void
501 save_stmt_status (p)
502 struct function *p;
504 p->block_stack = block_stack;
505 p->stack_block_stack = stack_block_stack;
506 p->cond_stack = cond_stack;
507 p->loop_stack = loop_stack;
508 p->case_stack = case_stack;
509 p->nesting_stack = nesting_stack;
510 p->nesting_depth = nesting_depth;
511 p->block_start_count = block_start_count;
512 p->last_expr_type = last_expr_type;
513 p->last_expr_value = last_expr_value;
514 p->expr_stmts_for_value = expr_stmts_for_value;
515 p->emit_filename = emit_filename;
516 p->emit_lineno = emit_lineno;
517 p->goto_fixup_chain = goto_fixup_chain;
520 void
521 restore_stmt_status (p)
522 struct function *p;
524 block_stack = p->block_stack;
525 stack_block_stack = p->stack_block_stack;
526 cond_stack = p->cond_stack;
527 loop_stack = p->loop_stack;
528 case_stack = p->case_stack;
529 nesting_stack = p->nesting_stack;
530 nesting_depth = p->nesting_depth;
531 block_start_count = p->block_start_count;
532 last_expr_type = p->last_expr_type;
533 last_expr_value = p->last_expr_value;
534 expr_stmts_for_value = p->expr_stmts_for_value;
535 emit_filename = p->emit_filename;
536 emit_lineno = p->emit_lineno;
537 goto_fixup_chain = p->goto_fixup_chain;
540 /* Emit a no-op instruction. */
542 void
543 emit_nop ()
545 rtx last_insn;
547 if (!output_bytecode)
549 last_insn = get_last_insn ();
550 if (!optimize
551 && (GET_CODE (last_insn) == CODE_LABEL
552 || (GET_CODE (last_insn) == NOTE
553 && prev_real_insn (last_insn) == 0)))
554 emit_insn (gen_nop ());
558 /* Return the rtx-label that corresponds to a LABEL_DECL,
559 creating it if necessary. */
562 label_rtx (label)
563 tree label;
565 if (TREE_CODE (label) != LABEL_DECL)
566 abort ();
568 if (DECL_RTL (label))
569 return DECL_RTL (label);
571 return DECL_RTL (label) = gen_label_rtx ();
574 /* Add an unconditional jump to LABEL as the next sequential instruction. */
576 void
577 emit_jump (label)
578 rtx label;
580 do_pending_stack_adjust ();
581 emit_jump_insn (gen_jump (label));
582 emit_barrier ();
585 /* Emit code to jump to the address
586 specified by the pointer expression EXP. */
588 void
589 expand_computed_goto (exp)
590 tree exp;
592 if (output_bytecode)
594 bc_expand_expr (exp);
595 bc_emit_instruction (jumpP);
597 else
599 rtx x = expand_expr (exp, NULL_RTX, VOIDmode, 0);
601 #ifdef POINTERS_EXTEND_UNSIGNED
602 x = convert_memory_address (Pmode, x);
603 #endif
605 emit_queue ();
606 do_pending_stack_adjust ();
607 emit_indirect_jump (x);
611 /* Handle goto statements and the labels that they can go to. */
613 /* Specify the location in the RTL code of a label LABEL,
614 which is a LABEL_DECL tree node.
616 This is used for the kind of label that the user can jump to with a
617 goto statement, and for alternatives of a switch or case statement.
618 RTL labels generated for loops and conditionals don't go through here;
619 they are generated directly at the RTL level, by other functions below.
621 Note that this has nothing to do with defining label *names*.
622 Languages vary in how they do that and what that even means. */
624 void
625 expand_label (label)
626 tree label;
628 struct label_chain *p;
630 if (output_bytecode)
632 if (! DECL_RTL (label))
633 DECL_RTL (label) = bc_gen_rtx ((char *) 0, 0, bc_get_bytecode_label ());
634 if (! bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (DECL_RTL (label))))
635 error ("multiply defined label");
636 return;
639 do_pending_stack_adjust ();
640 emit_label (label_rtx (label));
641 if (DECL_NAME (label))
642 LABEL_NAME (DECL_RTL (label)) = IDENTIFIER_POINTER (DECL_NAME (label));
644 if (stack_block_stack != 0)
646 p = (struct label_chain *) oballoc (sizeof (struct label_chain));
647 p->next = stack_block_stack->data.block.label_chain;
648 stack_block_stack->data.block.label_chain = p;
649 p->label = label;
653 /* Declare that LABEL (a LABEL_DECL) may be used for nonlocal gotos
654 from nested functions. */
656 void
657 declare_nonlocal_label (label)
658 tree label;
660 nonlocal_labels = tree_cons (NULL_TREE, label, nonlocal_labels);
661 LABEL_PRESERVE_P (label_rtx (label)) = 1;
662 if (nonlocal_goto_handler_slot == 0)
664 nonlocal_goto_handler_slot
665 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
666 emit_stack_save (SAVE_NONLOCAL,
667 &nonlocal_goto_stack_level,
668 PREV_INSN (tail_recursion_reentry));
672 /* Generate RTL code for a `goto' statement with target label LABEL.
673 LABEL should be a LABEL_DECL tree node that was or will later be
674 defined with `expand_label'. */
676 void
677 expand_goto (label)
678 tree label;
680 tree context;
682 if (output_bytecode)
684 expand_goto_internal (label, label_rtx (label), NULL_RTX);
685 return;
688 /* Check for a nonlocal goto to a containing function. */
689 context = decl_function_context (label);
690 if (context != 0 && context != current_function_decl)
692 struct function *p = find_function_data (context);
693 rtx label_ref = gen_rtx (LABEL_REF, Pmode, label_rtx (label));
694 rtx temp;
696 p->has_nonlocal_label = 1;
697 current_function_has_nonlocal_goto = 1;
698 LABEL_REF_NONLOCAL_P (label_ref) = 1;
700 /* Copy the rtl for the slots so that they won't be shared in
701 case the virtual stack vars register gets instantiated differently
702 in the parent than in the child. */
704 #if HAVE_nonlocal_goto
705 if (HAVE_nonlocal_goto)
706 emit_insn (gen_nonlocal_goto (lookup_static_chain (label),
707 copy_rtx (p->nonlocal_goto_handler_slot),
708 copy_rtx (p->nonlocal_goto_stack_level),
709 label_ref));
710 else
711 #endif
713 rtx addr;
715 /* Restore frame pointer for containing function.
716 This sets the actual hard register used for the frame pointer
717 to the location of the function's incoming static chain info.
718 The non-local goto handler will then adjust it to contain the
719 proper value and reload the argument pointer, if needed. */
720 emit_move_insn (hard_frame_pointer_rtx, lookup_static_chain (label));
722 /* We have now loaded the frame pointer hardware register with
723 the address of that corresponds to the start of the virtual
724 stack vars. So replace virtual_stack_vars_rtx in all
725 addresses we use with stack_pointer_rtx. */
727 /* Get addr of containing function's current nonlocal goto handler,
728 which will do any cleanups and then jump to the label. */
729 addr = copy_rtx (p->nonlocal_goto_handler_slot);
730 temp = copy_to_reg (replace_rtx (addr, virtual_stack_vars_rtx,
731 hard_frame_pointer_rtx));
733 /* Restore the stack pointer. Note this uses fp just restored. */
734 addr = p->nonlocal_goto_stack_level;
735 if (addr)
736 addr = replace_rtx (copy_rtx (addr),
737 virtual_stack_vars_rtx,
738 hard_frame_pointer_rtx);
740 emit_stack_restore (SAVE_NONLOCAL, addr, NULL_RTX);
742 /* Put in the static chain register the nonlocal label address. */
743 emit_move_insn (static_chain_rtx, label_ref);
744 /* USE of hard_frame_pointer_rtx added for consistency; not clear if
745 really needed. */
746 emit_insn (gen_rtx (USE, VOIDmode, hard_frame_pointer_rtx));
747 emit_insn (gen_rtx (USE, VOIDmode, stack_pointer_rtx));
748 emit_insn (gen_rtx (USE, VOIDmode, static_chain_rtx));
749 emit_indirect_jump (temp);
752 else
753 expand_goto_internal (label, label_rtx (label), NULL_RTX);
756 /* Generate RTL code for a `goto' statement with target label BODY.
757 LABEL should be a LABEL_REF.
758 LAST_INSN, if non-0, is the rtx we should consider as the last
759 insn emitted (for the purposes of cleaning up a return). */
761 static void
762 expand_goto_internal (body, label, last_insn)
763 tree body;
764 rtx label;
765 rtx last_insn;
767 struct nesting *block;
768 rtx stack_level = 0;
770 /* NOTICE! If a bytecode instruction other than `jump' is needed,
771 then the caller has to call bc_expand_goto_internal()
772 directly. This is rather an exceptional case, and there aren't
773 that many places where this is necessary. */
774 if (output_bytecode)
776 expand_goto_internal (body, label, last_insn);
777 return;
780 if (GET_CODE (label) != CODE_LABEL)
781 abort ();
783 /* If label has already been defined, we can tell now
784 whether and how we must alter the stack level. */
786 if (PREV_INSN (label) != 0)
788 /* Find the innermost pending block that contains the label.
789 (Check containment by comparing insn-uids.)
790 Then restore the outermost stack level within that block,
791 and do cleanups of all blocks contained in it. */
792 for (block = block_stack; block; block = block->next)
794 if (INSN_UID (block->data.block.first_insn) < INSN_UID (label))
795 break;
796 if (block->data.block.stack_level != 0)
797 stack_level = block->data.block.stack_level;
798 /* Execute the cleanups for blocks we are exiting. */
799 if (block->data.block.cleanups != 0)
801 expand_cleanups (block->data.block.cleanups, NULL_TREE, 1, 1);
802 do_pending_stack_adjust ();
806 if (stack_level)
808 /* Ensure stack adjust isn't done by emit_jump, as this would clobber
809 the stack pointer. This one should be deleted as dead by flow. */
810 clear_pending_stack_adjust ();
811 do_pending_stack_adjust ();
812 emit_stack_restore (SAVE_BLOCK, stack_level, NULL_RTX);
815 if (body != 0 && DECL_TOO_LATE (body))
816 error ("jump to `%s' invalidly jumps into binding contour",
817 IDENTIFIER_POINTER (DECL_NAME (body)));
819 /* Label not yet defined: may need to put this goto
820 on the fixup list. */
821 else if (! expand_fixup (body, label, last_insn))
823 /* No fixup needed. Record that the label is the target
824 of at least one goto that has no fixup. */
825 if (body != 0)
826 TREE_ADDRESSABLE (body) = 1;
829 emit_jump (label);
832 /* Generate a jump with OPCODE to the given bytecode LABEL which is
833 found within BODY. */
835 static void
836 bc_expand_goto_internal (opcode, label, body)
837 enum bytecode_opcode opcode;
838 struct bc_label *label;
839 tree body;
841 struct nesting *block;
842 int stack_level = -1;
844 /* If the label is defined, adjust the stack as necessary.
845 If it's not defined, we have to push the reference on the
846 fixup list. */
848 if (label->defined)
851 /* Find the innermost pending block that contains the label.
852 (Check containment by comparing bytecode uids.) Then restore the
853 outermost stack level within that block. */
855 for (block = block_stack; block; block = block->next)
857 if (BYTECODE_BC_LABEL (block->data.block.first_insn)->uid < label->uid)
858 break;
859 if (block->data.block.bc_stack_level)
860 stack_level = block->data.block.bc_stack_level;
862 /* Execute the cleanups for blocks we are exiting. */
863 if (block->data.block.cleanups != 0)
865 expand_cleanups (block->data.block.cleanups, NULL_TREE, 1, 1);
866 do_pending_stack_adjust ();
870 /* Restore the stack level. If we need to adjust the stack, we
871 must do so after the jump, since the jump may depend on
872 what's on the stack. Thus, any stack-modifying conditional
873 jumps (these are the only ones that rely on what's on the
874 stack) go into the fixup list. */
876 if (stack_level >= 0
877 && stack_depth != stack_level
878 && opcode != jump)
880 bc_expand_fixup (opcode, label, stack_level);
881 else
883 if (stack_level >= 0)
884 bc_adjust_stack (stack_depth - stack_level);
886 if (body && DECL_BIT_FIELD (body))
887 error ("jump to `%s' invalidly jumps into binding contour",
888 IDENTIFIER_POINTER (DECL_NAME (body)));
890 /* Emit immediate jump */
891 bc_emit_bytecode (opcode);
892 bc_emit_bytecode_labelref (label);
894 #ifdef DEBUG_PRINT_CODE
895 fputc ('\n', stderr);
896 #endif
899 else
900 /* Put goto in the fixup list */
901 bc_expand_fixup (opcode, label, stack_level);
904 /* Generate if necessary a fixup for a goto
905 whose target label in tree structure (if any) is TREE_LABEL
906 and whose target in rtl is RTL_LABEL.
908 If LAST_INSN is nonzero, we pretend that the jump appears
909 after insn LAST_INSN instead of at the current point in the insn stream.
911 The fixup will be used later to insert insns just before the goto.
912 Those insns will restore the stack level as appropriate for the
913 target label, and will (in the case of C++) also invoke any object
914 destructors which have to be invoked when we exit the scopes which
915 are exited by the goto.
917 Value is nonzero if a fixup is made. */
919 static int
920 expand_fixup (tree_label, rtl_label, last_insn)
921 tree tree_label;
922 rtx rtl_label;
923 rtx last_insn;
925 struct nesting *block, *end_block;
927 /* See if we can recognize which block the label will be output in.
928 This is possible in some very common cases.
929 If we succeed, set END_BLOCK to that block.
930 Otherwise, set it to 0. */
932 if (cond_stack
933 && (rtl_label == cond_stack->data.cond.endif_label
934 || rtl_label == cond_stack->data.cond.next_label))
935 end_block = cond_stack;
936 /* If we are in a loop, recognize certain labels which
937 are likely targets. This reduces the number of fixups
938 we need to create. */
939 else if (loop_stack
940 && (rtl_label == loop_stack->data.loop.start_label
941 || rtl_label == loop_stack->data.loop.end_label
942 || rtl_label == loop_stack->data.loop.continue_label))
943 end_block = loop_stack;
944 else
945 end_block = 0;
947 /* Now set END_BLOCK to the binding level to which we will return. */
949 if (end_block)
951 struct nesting *next_block = end_block->all;
952 block = block_stack;
954 /* First see if the END_BLOCK is inside the innermost binding level.
955 If so, then no cleanups or stack levels are relevant. */
956 while (next_block && next_block != block)
957 next_block = next_block->all;
959 if (next_block)
960 return 0;
962 /* Otherwise, set END_BLOCK to the innermost binding level
963 which is outside the relevant control-structure nesting. */
964 next_block = block_stack->next;
965 for (block = block_stack; block != end_block; block = block->all)
966 if (block == next_block)
967 next_block = next_block->next;
968 end_block = next_block;
971 /* Does any containing block have a stack level or cleanups?
972 If not, no fixup is needed, and that is the normal case
973 (the only case, for standard C). */
974 for (block = block_stack; block != end_block; block = block->next)
975 if (block->data.block.stack_level != 0
976 || block->data.block.cleanups != 0)
977 break;
979 if (block != end_block)
981 /* Ok, a fixup is needed. Add a fixup to the list of such. */
982 struct goto_fixup *fixup
983 = (struct goto_fixup *) oballoc (sizeof (struct goto_fixup));
984 /* In case an old stack level is restored, make sure that comes
985 after any pending stack adjust. */
986 /* ?? If the fixup isn't to come at the present position,
987 doing the stack adjust here isn't useful. Doing it with our
988 settings at that location isn't useful either. Let's hope
989 someone does it! */
990 if (last_insn == 0)
991 do_pending_stack_adjust ();
992 fixup->target = tree_label;
993 fixup->target_rtl = rtl_label;
995 /* Create a BLOCK node and a corresponding matched set of
996 NOTE_INSN_BEGIN_BLOCK and NOTE_INSN_END_BLOCK notes at
997 this point. The notes will encapsulate any and all fixup
998 code which we might later insert at this point in the insn
999 stream. Also, the BLOCK node will be the parent (i.e. the
1000 `SUPERBLOCK') of any other BLOCK nodes which we might create
1001 later on when we are expanding the fixup code. */
1004 register rtx original_before_jump
1005 = last_insn ? last_insn : get_last_insn ();
1007 start_sequence ();
1008 pushlevel (0);
1009 fixup->before_jump = emit_note (NULL_PTR, NOTE_INSN_BLOCK_BEG);
1010 last_block_end_note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_END);
1011 fixup->context = poplevel (1, 0, 0); /* Create the BLOCK node now! */
1012 end_sequence ();
1013 emit_insns_after (fixup->before_jump, original_before_jump);
1016 fixup->block_start_count = block_start_count;
1017 fixup->stack_level = 0;
1018 fixup->cleanup_list_list
1019 = (((block->data.block.outer_cleanups
1020 #if 0
1021 && block->data.block.outer_cleanups != empty_cleanup_list
1022 #endif
1024 || block->data.block.cleanups)
1025 ? tree_cons (NULL_TREE, block->data.block.cleanups,
1026 block->data.block.outer_cleanups)
1027 : 0);
1028 fixup->next = goto_fixup_chain;
1029 goto_fixup_chain = fixup;
1032 return block != 0;
1036 /* Generate bytecode jump with OPCODE to a fixup routine that links to LABEL.
1037 Make the fixup restore the stack level to STACK_LEVEL. */
1039 static void
1040 bc_expand_fixup (opcode, label, stack_level)
1041 enum bytecode_opcode opcode;
1042 struct bc_label *label;
1043 int stack_level;
1045 struct goto_fixup *fixup
1046 = (struct goto_fixup *) oballoc (sizeof (struct goto_fixup));
1048 fixup->label = bc_get_bytecode_label ();
1049 fixup->bc_target = label;
1050 fixup->bc_stack_level = stack_level;
1051 fixup->bc_handled = FALSE;
1053 fixup->next = goto_fixup_chain;
1054 goto_fixup_chain = fixup;
1056 /* Insert a jump to the fixup code */
1057 bc_emit_bytecode (opcode);
1058 bc_emit_bytecode_labelref (fixup->label);
1060 #ifdef DEBUG_PRINT_CODE
1061 fputc ('\n', stderr);
1062 #endif
1065 /* Expand any needed fixups in the outputmost binding level of the
1066 function. FIRST_INSN is the first insn in the function. */
1068 void
1069 expand_fixups (first_insn)
1070 rtx first_insn;
1072 fixup_gotos (NULL_PTR, NULL_RTX, NULL_TREE, first_insn, 0);
1075 /* When exiting a binding contour, process all pending gotos requiring fixups.
1076 THISBLOCK is the structure that describes the block being exited.
1077 STACK_LEVEL is the rtx for the stack level to restore exiting this contour.
1078 CLEANUP_LIST is a list of expressions to evaluate on exiting this contour.
1079 FIRST_INSN is the insn that began this contour.
1081 Gotos that jump out of this contour must restore the
1082 stack level and do the cleanups before actually jumping.
1084 DONT_JUMP_IN nonzero means report error there is a jump into this
1085 contour from before the beginning of the contour.
1086 This is also done if STACK_LEVEL is nonzero. */
1088 static void
1089 fixup_gotos (thisblock, stack_level, cleanup_list, first_insn, dont_jump_in)
1090 struct nesting *thisblock;
1091 rtx stack_level;
1092 tree cleanup_list;
1093 rtx first_insn;
1094 int dont_jump_in;
1096 register struct goto_fixup *f, *prev;
1098 if (output_bytecode)
1100 /* ??? The second arg is the bc stack level, which is not the same
1101 as STACK_LEVEL. I have no idea what should go here, so I'll
1102 just pass 0. */
1103 bc_fixup_gotos (thisblock, 0, cleanup_list, first_insn, dont_jump_in);
1104 return;
1107 /* F is the fixup we are considering; PREV is the previous one. */
1108 /* We run this loop in two passes so that cleanups of exited blocks
1109 are run first, and blocks that are exited are marked so
1110 afterwards. */
1112 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1114 /* Test for a fixup that is inactive because it is already handled. */
1115 if (f->before_jump == 0)
1117 /* Delete inactive fixup from the chain, if that is easy to do. */
1118 if (prev != 0)
1119 prev->next = f->next;
1121 /* Has this fixup's target label been defined?
1122 If so, we can finalize it. */
1123 else if (PREV_INSN (f->target_rtl) != 0)
1125 register rtx cleanup_insns;
1127 /* Get the first non-label after the label
1128 this goto jumps to. If that's before this scope begins,
1129 we don't have a jump into the scope. */
1130 rtx after_label = f->target_rtl;
1131 while (after_label != 0 && GET_CODE (after_label) == CODE_LABEL)
1132 after_label = NEXT_INSN (after_label);
1134 /* If this fixup jumped into this contour from before the beginning
1135 of this contour, report an error. */
1136 /* ??? Bug: this does not detect jumping in through intermediate
1137 blocks that have stack levels or cleanups.
1138 It detects only a problem with the innermost block
1139 around the label. */
1140 if (f->target != 0
1141 && (dont_jump_in || stack_level || cleanup_list)
1142 /* If AFTER_LABEL is 0, it means the jump goes to the end
1143 of the rtl, which means it jumps into this scope. */
1144 && (after_label == 0
1145 || INSN_UID (first_insn) < INSN_UID (after_label))
1146 && INSN_UID (first_insn) > INSN_UID (f->before_jump)
1147 && ! DECL_ERROR_ISSUED (f->target))
1149 error_with_decl (f->target,
1150 "label `%s' used before containing binding contour");
1151 /* Prevent multiple errors for one label. */
1152 DECL_ERROR_ISSUED (f->target) = 1;
1155 /* We will expand the cleanups into a sequence of their own and
1156 then later on we will attach this new sequence to the insn
1157 stream just ahead of the actual jump insn. */
1159 start_sequence ();
1161 /* Temporarily restore the lexical context where we will
1162 logically be inserting the fixup code. We do this for the
1163 sake of getting the debugging information right. */
1165 pushlevel (0);
1166 set_block (f->context);
1168 /* Expand the cleanups for blocks this jump exits. */
1169 if (f->cleanup_list_list)
1171 tree lists;
1172 for (lists = f->cleanup_list_list; lists; lists = TREE_CHAIN (lists))
1173 /* Marked elements correspond to blocks that have been closed.
1174 Do their cleanups. */
1175 if (TREE_ADDRESSABLE (lists)
1176 && TREE_VALUE (lists) != 0)
1178 expand_cleanups (TREE_VALUE (lists), NULL_TREE, 1, 1);
1179 /* Pop any pushes done in the cleanups,
1180 in case function is about to return. */
1181 do_pending_stack_adjust ();
1185 /* Restore stack level for the biggest contour that this
1186 jump jumps out of. */
1187 if (f->stack_level)
1188 emit_stack_restore (SAVE_BLOCK, f->stack_level, f->before_jump);
1190 /* Finish up the sequence containing the insns which implement the
1191 necessary cleanups, and then attach that whole sequence to the
1192 insn stream just ahead of the actual jump insn. Attaching it
1193 at that point insures that any cleanups which are in fact
1194 implicit C++ object destructions (which must be executed upon
1195 leaving the block) appear (to the debugger) to be taking place
1196 in an area of the generated code where the object(s) being
1197 destructed are still "in scope". */
1199 cleanup_insns = get_insns ();
1200 poplevel (1, 0, 0);
1202 end_sequence ();
1203 emit_insns_after (cleanup_insns, f->before_jump);
1206 f->before_jump = 0;
1210 /* For any still-undefined labels, do the cleanups for this block now.
1211 We must do this now since items in the cleanup list may go out
1212 of scope when the block ends. */
1213 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1214 if (f->before_jump != 0
1215 && PREV_INSN (f->target_rtl) == 0
1216 /* Label has still not appeared. If we are exiting a block with
1217 a stack level to restore, that started before the fixup,
1218 mark this stack level as needing restoration
1219 when the fixup is later finalized. */
1220 && thisblock != 0
1221 /* Note: if THISBLOCK == 0 and we have a label that hasn't appeared, it
1222 means the label is undefined. That's erroneous, but possible. */
1223 && (thisblock->data.block.block_start_count
1224 <= f->block_start_count))
1226 tree lists = f->cleanup_list_list;
1227 rtx cleanup_insns;
1229 for (; lists; lists = TREE_CHAIN (lists))
1230 /* If the following elt. corresponds to our containing block
1231 then the elt. must be for this block. */
1232 if (TREE_CHAIN (lists) == thisblock->data.block.outer_cleanups)
1234 start_sequence ();
1235 pushlevel (0);
1236 set_block (f->context);
1237 expand_cleanups (TREE_VALUE (lists), NULL_TREE, 1, 1);
1238 do_pending_stack_adjust ();
1239 cleanup_insns = get_insns ();
1240 poplevel (1, 0, 0);
1241 end_sequence ();
1242 f->before_jump
1243 = emit_insns_after (cleanup_insns, f->before_jump);
1245 f->cleanup_list_list = TREE_CHAIN (lists);
1248 if (stack_level)
1249 f->stack_level = stack_level;
1254 /* When exiting a binding contour, process all pending gotos requiring fixups.
1255 Note: STACK_DEPTH is not altered.
1257 The arguments are currently not used in the bytecode compiler, but we may
1258 need them one day for languages other than C.
1260 THISBLOCK is the structure that describes the block being exited.
1261 STACK_LEVEL is the rtx for the stack level to restore exiting this contour.
1262 CLEANUP_LIST is a list of expressions to evaluate on exiting this contour.
1263 FIRST_INSN is the insn that began this contour.
1265 Gotos that jump out of this contour must restore the
1266 stack level and do the cleanups before actually jumping.
1268 DONT_JUMP_IN nonzero means report error there is a jump into this
1269 contour from before the beginning of the contour.
1270 This is also done if STACK_LEVEL is nonzero. */
1272 static void
1273 bc_fixup_gotos (thisblock, stack_level, cleanup_list, first_insn, dont_jump_in)
1274 struct nesting *thisblock;
1275 int stack_level;
1276 tree cleanup_list;
1277 rtx first_insn;
1278 int dont_jump_in;
1280 register struct goto_fixup *f, *prev;
1281 int saved_stack_depth;
1283 /* F is the fixup we are considering; PREV is the previous one. */
1285 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1287 /* Test for a fixup that is inactive because it is already handled. */
1288 if (f->before_jump == 0)
1290 /* Delete inactive fixup from the chain, if that is easy to do. */
1291 if (prev)
1292 prev->next = f->next;
1295 /* Emit code to restore the stack and continue */
1296 bc_emit_bytecode_labeldef (f->label);
1298 /* Save stack_depth across call, since bc_adjust_stack () will alter
1299 the perceived stack depth via the instructions generated. */
1301 if (f->bc_stack_level >= 0)
1303 saved_stack_depth = stack_depth;
1304 bc_adjust_stack (stack_depth - f->bc_stack_level);
1305 stack_depth = saved_stack_depth;
1308 bc_emit_bytecode (jump);
1309 bc_emit_bytecode_labelref (f->bc_target);
1311 #ifdef DEBUG_PRINT_CODE
1312 fputc ('\n', stderr);
1313 #endif
1316 goto_fixup_chain = NULL;
1319 /* Generate RTL for an asm statement (explicit assembler code).
1320 BODY is a STRING_CST node containing the assembler code text,
1321 or an ADDR_EXPR containing a STRING_CST. */
1323 void
1324 expand_asm (body)
1325 tree body;
1327 if (output_bytecode)
1329 error ("`asm' is invalid when generating bytecode");
1330 return;
1333 if (TREE_CODE (body) == ADDR_EXPR)
1334 body = TREE_OPERAND (body, 0);
1336 emit_insn (gen_rtx (ASM_INPUT, VOIDmode,
1337 TREE_STRING_POINTER (body)));
1338 last_expr_type = 0;
1341 /* Generate RTL for an asm statement with arguments.
1342 STRING is the instruction template.
1343 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
1344 Each output or input has an expression in the TREE_VALUE and
1345 a constraint-string in the TREE_PURPOSE.
1346 CLOBBERS is a list of STRING_CST nodes each naming a hard register
1347 that is clobbered by this insn.
1349 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
1350 Some elements of OUTPUTS may be replaced with trees representing temporary
1351 values. The caller should copy those temporary values to the originally
1352 specified lvalues.
1354 VOL nonzero means the insn is volatile; don't optimize it. */
1356 void
1357 expand_asm_operands (string, outputs, inputs, clobbers, vol, filename, line)
1358 tree string, outputs, inputs, clobbers;
1359 int vol;
1360 char *filename;
1361 int line;
1363 rtvec argvec, constraints;
1364 rtx body;
1365 int ninputs = list_length (inputs);
1366 int noutputs = list_length (outputs);
1367 int nclobbers;
1368 tree tail;
1369 register int i;
1370 /* Vector of RTX's of evaluated output operands. */
1371 rtx *output_rtx = (rtx *) alloca (noutputs * sizeof (rtx));
1372 /* The insn we have emitted. */
1373 rtx insn;
1375 if (output_bytecode)
1377 error ("`asm' is invalid when generating bytecode");
1378 return;
1381 /* Count the number of meaningful clobbered registers, ignoring what
1382 we would ignore later. */
1383 nclobbers = 0;
1384 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1386 char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1387 i = decode_reg_name (regname);
1388 if (i >= 0 || i == -4)
1389 ++nclobbers;
1390 else if (i == -2)
1391 error ("unknown register name `%s' in `asm'", regname);
1394 last_expr_type = 0;
1396 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1398 tree val = TREE_VALUE (tail);
1399 tree type = TREE_TYPE (val);
1400 tree val1;
1401 int j;
1402 int found_equal = 0;
1403 int allows_reg = 0;
1405 /* If there's an erroneous arg, emit no insn. */
1406 if (TREE_TYPE (val) == error_mark_node)
1407 return;
1409 /* Make sure constraint has `=' and does not have `+'. Also, see
1410 if it allows any register. Be liberal on the latter test, since
1411 the worst that happens if we get it wrong is we issue an error
1412 message. */
1414 for (j = 0; j < TREE_STRING_LENGTH (TREE_PURPOSE (tail)) - 1; j++)
1415 switch (TREE_STRING_POINTER (TREE_PURPOSE (tail))[j])
1417 case '+':
1418 error ("output operand constraint contains `+'");
1419 return;
1421 case '=':
1422 found_equal = 1;
1423 break;
1425 case '?': case '!': case '*': case '%': case '&':
1426 case 'V': case 'm': case 'o': case '<': case '>':
1427 case 'E': case 'F': case 'G': case 'H': case 'X':
1428 case 's': case 'i': case 'n':
1429 case 'I': case 'J': case 'K': case 'L': case 'M':
1430 case 'N': case 'O': case 'P': case ',':
1431 #ifdef EXTRA_CONSTRAINT
1432 case 'Q': case 'R': case 'S': case 'T': case 'U':
1433 #endif
1434 break;
1436 case 'p': case 'g': case 'r':
1437 /* Whether or not a numeric constraint allows a register is
1438 decided by the matching constraint, and so there is no need
1439 to do anything special with them. We must handle them in
1440 the default case, so that we don't unnecessarily force
1441 operands to memory. */
1442 case '0': case '1': case '2': case '3': case '4':
1443 default:
1444 allows_reg = 1;
1445 break;
1448 if (! found_equal)
1450 error ("output operand constraint lacks `='");
1451 return;
1454 /* If an output operand is not a decl or indirect ref and our constraint
1455 allows a register, make a temporary to act as an intermediate.
1456 Make the asm insn write into that, then our caller will copy it to
1457 the real output operand. Likewise for promoted variables. */
1459 if (TREE_CODE (val) == INDIRECT_REF
1460 || (TREE_CODE_CLASS (TREE_CODE (val)) == 'd'
1461 && ! (GET_CODE (DECL_RTL (val)) == REG
1462 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
1463 || ! allows_reg)
1465 if (! allows_reg)
1466 mark_addressable (TREE_VALUE (tail));
1468 output_rtx[i]
1469 = expand_expr (TREE_VALUE (tail), NULL_RTX, VOIDmode, 0);
1471 if (! allows_reg && GET_CODE (output_rtx[i]) != MEM)
1472 error ("output number %d not directly addressable", i);
1474 else
1476 output_rtx[i] = assign_temp (type, 0, 0, 0);
1477 TREE_VALUE (tail) = make_tree (type, output_rtx[i]);
1481 if (ninputs + noutputs > MAX_RECOG_OPERANDS)
1483 error ("more than %d operands in `asm'", MAX_RECOG_OPERANDS);
1484 return;
1487 /* Make vectors for the expression-rtx and constraint strings. */
1489 argvec = rtvec_alloc (ninputs);
1490 constraints = rtvec_alloc (ninputs);
1492 body = gen_rtx (ASM_OPERANDS, VOIDmode,
1493 TREE_STRING_POINTER (string), "", 0, argvec, constraints,
1494 filename, line);
1495 MEM_VOLATILE_P (body) = vol;
1497 /* Eval the inputs and put them into ARGVEC.
1498 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
1500 i = 0;
1501 for (tail = inputs; tail; tail = TREE_CHAIN (tail))
1503 int j;
1504 int allows_reg = 0;
1506 /* If there's an erroneous arg, emit no insn,
1507 because the ASM_INPUT would get VOIDmode
1508 and that could cause a crash in reload. */
1509 if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
1510 return;
1511 if (TREE_PURPOSE (tail) == NULL_TREE)
1513 error ("hard register `%s' listed as input operand to `asm'",
1514 TREE_STRING_POINTER (TREE_VALUE (tail)) );
1515 return;
1518 /* Make sure constraint has neither `=' nor `+'. */
1520 for (j = 0; j < TREE_STRING_LENGTH (TREE_PURPOSE (tail)) - 1; j++)
1521 switch (TREE_STRING_POINTER (TREE_PURPOSE (tail))[j])
1523 case '+': case '=':
1524 error ("input operand constraint contains `%c'",
1525 TREE_STRING_POINTER (TREE_PURPOSE (tail))[j]);
1526 return;
1528 case '?': case '!': case '*': case '%': case '&':
1529 case 'V': case 'm': case 'o': case '<': case '>':
1530 case 'E': case 'F': case 'G': case 'H': case 'X':
1531 case 's': case 'i': case 'n':
1532 case 'I': case 'J': case 'K': case 'L': case 'M':
1533 case 'N': case 'O': case 'P': case ',':
1534 #ifdef EXTRA_CONSTRAINT
1535 case 'Q': case 'R': case 'S': case 'T': case 'U':
1536 #endif
1537 break;
1539 case 'p': case 'g': case 'r':
1540 /* Whether or not a numeric constraint allows a register is
1541 decided by the matching constraint, and so there is no need
1542 to do anything special with them. We must handle them in
1543 the default case, so that we don't unnecessarily force
1544 operands to memory. */
1545 case '0': case '1': case '2': case '3': case '4':
1546 default:
1547 allows_reg = 1;
1548 break;
1551 if (! allows_reg)
1552 mark_addressable (TREE_VALUE (tail));
1554 XVECEXP (body, 3, i) /* argvec */
1555 = expand_expr (TREE_VALUE (tail), NULL_RTX, VOIDmode, 0);
1556 if (CONSTANT_P (XVECEXP (body, 3, i))
1557 && ! general_operand (XVECEXP (body, 3, i),
1558 TYPE_MODE (TREE_TYPE (TREE_VALUE (tail)))))
1560 if (allows_reg)
1561 XVECEXP (body, 3, i)
1562 = force_reg (TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))),
1563 XVECEXP (body, 3, i));
1564 else
1565 XVECEXP (body, 3, i)
1566 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))),
1567 XVECEXP (body, 3, i));
1570 if (! allows_reg
1571 && (GET_CODE (XVECEXP (body, 3, i)) == REG
1572 || GET_CODE (XVECEXP (body, 3, i)) == SUBREG
1573 || GET_CODE (XVECEXP (body, 3, i)) == CONCAT))
1575 tree type = TREE_TYPE (TREE_VALUE (tail));
1576 rtx memloc = assign_temp (type, 1, 1, 1);
1578 emit_move_insn (memloc, XVECEXP (body, 3, i));
1579 XVECEXP (body, 3, i) = memloc;
1582 XVECEXP (body, 4, i) /* constraints */
1583 = gen_rtx (ASM_INPUT, TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))),
1584 TREE_STRING_POINTER (TREE_PURPOSE (tail)));
1585 i++;
1588 /* Protect all the operands from the queue,
1589 now that they have all been evaluated. */
1591 for (i = 0; i < ninputs; i++)
1592 XVECEXP (body, 3, i) = protect_from_queue (XVECEXP (body, 3, i), 0);
1594 for (i = 0; i < noutputs; i++)
1595 output_rtx[i] = protect_from_queue (output_rtx[i], 1);
1597 /* Now, for each output, construct an rtx
1598 (set OUTPUT (asm_operands INSN OUTPUTNUMBER OUTPUTCONSTRAINT
1599 ARGVEC CONSTRAINTS))
1600 If there is more than one, put them inside a PARALLEL. */
1602 if (noutputs == 1 && nclobbers == 0)
1604 XSTR (body, 1) = TREE_STRING_POINTER (TREE_PURPOSE (outputs));
1605 insn = emit_insn (gen_rtx (SET, VOIDmode, output_rtx[0], body));
1607 else if (noutputs == 0 && nclobbers == 0)
1609 /* No output operands: put in a raw ASM_OPERANDS rtx. */
1610 insn = emit_insn (body);
1612 else
1614 rtx obody = body;
1615 int num = noutputs;
1616 if (num == 0) num = 1;
1617 body = gen_rtx (PARALLEL, VOIDmode, rtvec_alloc (num + nclobbers));
1619 /* For each output operand, store a SET. */
1621 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1623 XVECEXP (body, 0, i)
1624 = gen_rtx (SET, VOIDmode,
1625 output_rtx[i],
1626 gen_rtx (ASM_OPERANDS, VOIDmode,
1627 TREE_STRING_POINTER (string),
1628 TREE_STRING_POINTER (TREE_PURPOSE (tail)),
1629 i, argvec, constraints,
1630 filename, line));
1631 MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
1634 /* If there are no outputs (but there are some clobbers)
1635 store the bare ASM_OPERANDS into the PARALLEL. */
1637 if (i == 0)
1638 XVECEXP (body, 0, i++) = obody;
1640 /* Store (clobber REG) for each clobbered register specified. */
1642 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1644 char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1645 int j = decode_reg_name (regname);
1647 if (j < 0)
1649 if (j == -3) /* `cc', which is not a register */
1650 continue;
1652 if (j == -4) /* `memory', don't cache memory across asm */
1654 XVECEXP (body, 0, i++)
1655 = gen_rtx (CLOBBER, VOIDmode,
1656 gen_rtx (MEM, BLKmode,
1657 gen_rtx (SCRATCH, VOIDmode, 0)));
1658 continue;
1661 /* Ignore unknown register, error already signalled. */
1662 continue;
1665 /* Use QImode since that's guaranteed to clobber just one reg. */
1666 XVECEXP (body, 0, i++)
1667 = gen_rtx (CLOBBER, VOIDmode, gen_rtx (REG, QImode, j));
1670 insn = emit_insn (body);
1673 free_temp_slots ();
1676 /* Generate RTL to evaluate the expression EXP
1677 and remember it in case this is the VALUE in a ({... VALUE; }) constr. */
1679 void
1680 expand_expr_stmt (exp)
1681 tree exp;
1683 if (output_bytecode)
1685 int org_stack_depth = stack_depth;
1687 bc_expand_expr (exp);
1689 /* Restore stack depth */
1690 if (stack_depth < org_stack_depth)
1691 abort ();
1693 bc_emit_instruction (drop);
1695 last_expr_type = TREE_TYPE (exp);
1696 return;
1699 /* If -W, warn about statements with no side effects,
1700 except for an explicit cast to void (e.g. for assert()), and
1701 except inside a ({...}) where they may be useful. */
1702 if (expr_stmts_for_value == 0 && exp != error_mark_node)
1704 if (! TREE_SIDE_EFFECTS (exp) && (extra_warnings || warn_unused)
1705 && !(TREE_CODE (exp) == CONVERT_EXPR
1706 && TREE_TYPE (exp) == void_type_node))
1707 warning_with_file_and_line (emit_filename, emit_lineno,
1708 "statement with no effect");
1709 else if (warn_unused)
1710 warn_if_unused_value (exp);
1713 /* If EXP is of function type and we are expanding statements for
1714 value, convert it to pointer-to-function. */
1715 if (expr_stmts_for_value && TREE_CODE (TREE_TYPE (exp)) == FUNCTION_TYPE)
1716 exp = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (exp)), exp);
1718 last_expr_type = TREE_TYPE (exp);
1719 if (! flag_syntax_only)
1720 last_expr_value = expand_expr (exp,
1721 (expr_stmts_for_value
1722 ? NULL_RTX : const0_rtx),
1723 VOIDmode, 0);
1725 /* If all we do is reference a volatile value in memory,
1726 copy it to a register to be sure it is actually touched. */
1727 if (last_expr_value != 0 && GET_CODE (last_expr_value) == MEM
1728 && TREE_THIS_VOLATILE (exp))
1730 if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode)
1732 else if (TYPE_MODE (TREE_TYPE (exp)) != BLKmode)
1733 copy_to_reg (last_expr_value);
1734 else
1736 rtx lab = gen_label_rtx ();
1738 /* Compare the value with itself to reference it. */
1739 emit_cmp_insn (last_expr_value, last_expr_value, EQ,
1740 expand_expr (TYPE_SIZE (last_expr_type),
1741 NULL_RTX, VOIDmode, 0),
1742 BLKmode, 0,
1743 TYPE_ALIGN (last_expr_type) / BITS_PER_UNIT);
1744 emit_jump_insn ((*bcc_gen_fctn[(int) EQ]) (lab));
1745 emit_label (lab);
1749 /* If this expression is part of a ({...}) and is in memory, we may have
1750 to preserve temporaries. */
1751 preserve_temp_slots (last_expr_value);
1753 /* Free any temporaries used to evaluate this expression. Any temporary
1754 used as a result of this expression will already have been preserved
1755 above. */
1756 free_temp_slots ();
1758 emit_queue ();
1761 /* Warn if EXP contains any computations whose results are not used.
1762 Return 1 if a warning is printed; 0 otherwise. */
1765 warn_if_unused_value (exp)
1766 tree exp;
1768 if (TREE_USED (exp))
1769 return 0;
1771 switch (TREE_CODE (exp))
1773 case PREINCREMENT_EXPR:
1774 case POSTINCREMENT_EXPR:
1775 case PREDECREMENT_EXPR:
1776 case POSTDECREMENT_EXPR:
1777 case MODIFY_EXPR:
1778 case INIT_EXPR:
1779 case TARGET_EXPR:
1780 case CALL_EXPR:
1781 case METHOD_CALL_EXPR:
1782 case RTL_EXPR:
1783 case WITH_CLEANUP_EXPR:
1784 case EXIT_EXPR:
1785 /* We don't warn about COND_EXPR because it may be a useful
1786 construct if either arm contains a side effect. */
1787 case COND_EXPR:
1788 return 0;
1790 case BIND_EXPR:
1791 /* For a binding, warn if no side effect within it. */
1792 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1794 case SAVE_EXPR:
1795 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1797 case TRUTH_ORIF_EXPR:
1798 case TRUTH_ANDIF_EXPR:
1799 /* In && or ||, warn if 2nd operand has no side effect. */
1800 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1802 case COMPOUND_EXPR:
1803 if (TREE_NO_UNUSED_WARNING (exp))
1804 return 0;
1805 if (warn_if_unused_value (TREE_OPERAND (exp, 0)))
1806 return 1;
1807 /* Let people do `(foo (), 0)' without a warning. */
1808 if (TREE_CONSTANT (TREE_OPERAND (exp, 1)))
1809 return 0;
1810 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1812 case NOP_EXPR:
1813 case CONVERT_EXPR:
1814 case NON_LVALUE_EXPR:
1815 /* Don't warn about values cast to void. */
1816 if (TREE_TYPE (exp) == void_type_node)
1817 return 0;
1818 /* Don't warn about conversions not explicit in the user's program. */
1819 if (TREE_NO_UNUSED_WARNING (exp))
1820 return 0;
1821 /* Assignment to a cast usually results in a cast of a modify.
1822 Don't complain about that. There can be an arbitrary number of
1823 casts before the modify, so we must loop until we find the first
1824 non-cast expression and then test to see if that is a modify. */
1826 tree tem = TREE_OPERAND (exp, 0);
1828 while (TREE_CODE (tem) == CONVERT_EXPR || TREE_CODE (tem) == NOP_EXPR)
1829 tem = TREE_OPERAND (tem, 0);
1831 if (TREE_CODE (tem) == MODIFY_EXPR || TREE_CODE (tem) == INIT_EXPR
1832 || TREE_CODE (tem) == CALL_EXPR)
1833 return 0;
1835 goto warn;
1837 case INDIRECT_REF:
1838 /* Don't warn about automatic dereferencing of references, since
1839 the user cannot control it. */
1840 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == REFERENCE_TYPE)
1841 return warn_if_unused_value (TREE_OPERAND (exp, 0));
1842 /* ... fall through ... */
1844 default:
1845 /* Referencing a volatile value is a side effect, so don't warn. */
1846 if ((TREE_CODE_CLASS (TREE_CODE (exp)) == 'd'
1847 || TREE_CODE_CLASS (TREE_CODE (exp)) == 'r')
1848 && TREE_THIS_VOLATILE (exp))
1849 return 0;
1850 warn:
1851 warning_with_file_and_line (emit_filename, emit_lineno,
1852 "value computed is not used");
1853 return 1;
1857 /* Clear out the memory of the last expression evaluated. */
1859 void
1860 clear_last_expr ()
1862 last_expr_type = 0;
1865 /* Begin a statement which will return a value.
1866 Return the RTL_EXPR for this statement expr.
1867 The caller must save that value and pass it to expand_end_stmt_expr. */
1869 tree
1870 expand_start_stmt_expr ()
1872 int momentary;
1873 tree t;
1875 /* When generating bytecode just note down the stack depth */
1876 if (output_bytecode)
1877 return (build_int_2 (stack_depth, 0));
1879 /* Make the RTL_EXPR node temporary, not momentary,
1880 so that rtl_expr_chain doesn't become garbage. */
1881 momentary = suspend_momentary ();
1882 t = make_node (RTL_EXPR);
1883 resume_momentary (momentary);
1884 do_pending_stack_adjust ();
1885 start_sequence_for_rtl_expr (t);
1886 NO_DEFER_POP;
1887 expr_stmts_for_value++;
1888 return t;
1891 /* Restore the previous state at the end of a statement that returns a value.
1892 Returns a tree node representing the statement's value and the
1893 insns to compute the value.
1895 The nodes of that expression have been freed by now, so we cannot use them.
1896 But we don't want to do that anyway; the expression has already been
1897 evaluated and now we just want to use the value. So generate a RTL_EXPR
1898 with the proper type and RTL value.
1900 If the last substatement was not an expression,
1901 return something with type `void'. */
1903 tree
1904 expand_end_stmt_expr (t)
1905 tree t;
1907 if (output_bytecode)
1909 int i;
1910 tree t;
1913 /* At this point, all expressions have been evaluated in order.
1914 However, all expression values have been popped when evaluated,
1915 which means we have to recover the last expression value. This is
1916 the last value removed by means of a `drop' instruction. Instead
1917 of adding code to inhibit dropping the last expression value, it
1918 is here recovered by undoing the `drop'. Since `drop' is
1919 equivalent to `adjustackSI [1]', it can be undone with `adjstackSI
1920 [-1]'. */
1922 bc_adjust_stack (-1);
1924 if (!last_expr_type)
1925 last_expr_type = void_type_node;
1927 t = make_node (RTL_EXPR);
1928 TREE_TYPE (t) = last_expr_type;
1929 RTL_EXPR_RTL (t) = NULL;
1930 RTL_EXPR_SEQUENCE (t) = NULL;
1932 /* Don't consider deleting this expr or containing exprs at tree level. */
1933 TREE_THIS_VOLATILE (t) = 1;
1935 last_expr_type = 0;
1936 return t;
1939 OK_DEFER_POP;
1941 if (last_expr_type == 0)
1943 last_expr_type = void_type_node;
1944 last_expr_value = const0_rtx;
1946 else if (last_expr_value == 0)
1947 /* There are some cases where this can happen, such as when the
1948 statement is void type. */
1949 last_expr_value = const0_rtx;
1950 else if (GET_CODE (last_expr_value) != REG && ! CONSTANT_P (last_expr_value))
1951 /* Remove any possible QUEUED. */
1952 last_expr_value = protect_from_queue (last_expr_value, 0);
1954 emit_queue ();
1956 TREE_TYPE (t) = last_expr_type;
1957 RTL_EXPR_RTL (t) = last_expr_value;
1958 RTL_EXPR_SEQUENCE (t) = get_insns ();
1960 rtl_expr_chain = tree_cons (NULL_TREE, t, rtl_expr_chain);
1962 end_sequence ();
1964 /* Don't consider deleting this expr or containing exprs at tree level. */
1965 TREE_SIDE_EFFECTS (t) = 1;
1966 /* Propagate volatility of the actual RTL expr. */
1967 TREE_THIS_VOLATILE (t) = volatile_refs_p (last_expr_value);
1969 last_expr_type = 0;
1970 expr_stmts_for_value--;
1972 return t;
1975 /* Generate RTL for the start of an if-then. COND is the expression
1976 whose truth should be tested.
1978 If EXITFLAG is nonzero, this conditional is visible to
1979 `exit_something'. */
1981 void
1982 expand_start_cond (cond, exitflag)
1983 tree cond;
1984 int exitflag;
1986 struct nesting *thiscond = ALLOC_NESTING ();
1988 /* Make an entry on cond_stack for the cond we are entering. */
1990 thiscond->next = cond_stack;
1991 thiscond->all = nesting_stack;
1992 thiscond->depth = ++nesting_depth;
1993 thiscond->data.cond.next_label = gen_label_rtx ();
1994 /* Before we encounter an `else', we don't need a separate exit label
1995 unless there are supposed to be exit statements
1996 to exit this conditional. */
1997 thiscond->exit_label = exitflag ? gen_label_rtx () : 0;
1998 thiscond->data.cond.endif_label = thiscond->exit_label;
1999 cond_stack = thiscond;
2000 nesting_stack = thiscond;
2002 if (output_bytecode)
2003 bc_expand_start_cond (cond, exitflag);
2004 else
2005 do_jump (cond, thiscond->data.cond.next_label, NULL_RTX);
2008 /* Generate RTL between then-clause and the elseif-clause
2009 of an if-then-elseif-.... */
2011 void
2012 expand_start_elseif (cond)
2013 tree cond;
2015 if (cond_stack->data.cond.endif_label == 0)
2016 cond_stack->data.cond.endif_label = gen_label_rtx ();
2017 emit_jump (cond_stack->data.cond.endif_label);
2018 emit_label (cond_stack->data.cond.next_label);
2019 cond_stack->data.cond.next_label = gen_label_rtx ();
2020 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
2023 /* Generate RTL between the then-clause and the else-clause
2024 of an if-then-else. */
2026 void
2027 expand_start_else ()
2029 if (cond_stack->data.cond.endif_label == 0)
2030 cond_stack->data.cond.endif_label = gen_label_rtx ();
2032 if (output_bytecode)
2034 bc_expand_start_else ();
2035 return;
2038 emit_jump (cond_stack->data.cond.endif_label);
2039 emit_label (cond_stack->data.cond.next_label);
2040 cond_stack->data.cond.next_label = 0; /* No more _else or _elseif calls. */
2043 /* After calling expand_start_else, turn this "else" into an "else if"
2044 by providing another condition. */
2046 void
2047 expand_elseif (cond)
2048 tree cond;
2050 cond_stack->data.cond.next_label = gen_label_rtx ();
2051 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
2054 /* Generate RTL for the end of an if-then.
2055 Pop the record for it off of cond_stack. */
2057 void
2058 expand_end_cond ()
2060 struct nesting *thiscond = cond_stack;
2062 if (output_bytecode)
2063 bc_expand_end_cond ();
2064 else
2066 do_pending_stack_adjust ();
2067 if (thiscond->data.cond.next_label)
2068 emit_label (thiscond->data.cond.next_label);
2069 if (thiscond->data.cond.endif_label)
2070 emit_label (thiscond->data.cond.endif_label);
2073 POPSTACK (cond_stack);
2074 last_expr_type = 0;
2078 /* Generate code for the start of an if-then. COND is the expression
2079 whose truth is to be tested; if EXITFLAG is nonzero this conditional
2080 is to be visible to exit_something. It is assumed that the caller
2081 has pushed the previous context on the cond stack. */
2083 static void
2084 bc_expand_start_cond (cond, exitflag)
2085 tree cond;
2086 int exitflag;
2088 struct nesting *thiscond = cond_stack;
2090 thiscond->data.case_stmt.nominal_type = cond;
2091 if (! exitflag)
2092 thiscond->exit_label = gen_label_rtx ();
2093 bc_expand_expr (cond);
2094 bc_emit_bytecode (xjumpifnot);
2095 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (thiscond->exit_label));
2097 #ifdef DEBUG_PRINT_CODE
2098 fputc ('\n', stderr);
2099 #endif
2102 /* Generate the label for the end of an if with
2103 no else- clause. */
2105 static void
2106 bc_expand_end_cond ()
2108 struct nesting *thiscond = cond_stack;
2110 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thiscond->exit_label));
2113 /* Generate code for the start of the else- clause of
2114 an if-then-else. */
2116 static void
2117 bc_expand_start_else ()
2119 struct nesting *thiscond = cond_stack;
2121 thiscond->data.cond.endif_label = thiscond->exit_label;
2122 thiscond->exit_label = gen_label_rtx ();
2123 bc_emit_bytecode (jump);
2124 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (thiscond->exit_label));
2126 #ifdef DEBUG_PRINT_CODE
2127 fputc ('\n', stderr);
2128 #endif
2130 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thiscond->data.cond.endif_label));
2133 /* Generate RTL for the start of a loop. EXIT_FLAG is nonzero if this
2134 loop should be exited by `exit_something'. This is a loop for which
2135 `expand_continue' will jump to the top of the loop.
2137 Make an entry on loop_stack to record the labels associated with
2138 this loop. */
2140 struct nesting *
2141 expand_start_loop (exit_flag)
2142 int exit_flag;
2144 register struct nesting *thisloop = ALLOC_NESTING ();
2146 /* Make an entry on loop_stack for the loop we are entering. */
2148 thisloop->next = loop_stack;
2149 thisloop->all = nesting_stack;
2150 thisloop->depth = ++nesting_depth;
2151 thisloop->data.loop.start_label = gen_label_rtx ();
2152 thisloop->data.loop.end_label = gen_label_rtx ();
2153 thisloop->data.loop.alt_end_label = 0;
2154 thisloop->data.loop.continue_label = thisloop->data.loop.start_label;
2155 thisloop->exit_label = exit_flag ? thisloop->data.loop.end_label : 0;
2156 loop_stack = thisloop;
2157 nesting_stack = thisloop;
2159 if (output_bytecode)
2161 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thisloop->data.loop.start_label));
2162 return thisloop;
2165 do_pending_stack_adjust ();
2166 emit_queue ();
2167 emit_note (NULL_PTR, NOTE_INSN_LOOP_BEG);
2168 emit_label (thisloop->data.loop.start_label);
2170 return thisloop;
2173 /* Like expand_start_loop but for a loop where the continuation point
2174 (for expand_continue_loop) will be specified explicitly. */
2176 struct nesting *
2177 expand_start_loop_continue_elsewhere (exit_flag)
2178 int exit_flag;
2180 struct nesting *thisloop = expand_start_loop (exit_flag);
2181 loop_stack->data.loop.continue_label = gen_label_rtx ();
2182 return thisloop;
2185 /* Specify the continuation point for a loop started with
2186 expand_start_loop_continue_elsewhere.
2187 Use this at the point in the code to which a continue statement
2188 should jump. */
2190 void
2191 expand_loop_continue_here ()
2193 if (output_bytecode)
2195 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (loop_stack->data.loop.continue_label));
2196 return;
2198 do_pending_stack_adjust ();
2199 emit_note (NULL_PTR, NOTE_INSN_LOOP_CONT);
2200 emit_label (loop_stack->data.loop.continue_label);
2203 /* End a loop. */
2205 static void
2206 bc_expand_end_loop ()
2208 struct nesting *thisloop = loop_stack;
2210 bc_emit_bytecode (jump);
2211 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (thisloop->data.loop.start_label));
2213 #ifdef DEBUG_PRINT_CODE
2214 fputc ('\n', stderr);
2215 #endif
2217 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thisloop->exit_label));
2218 POPSTACK (loop_stack);
2219 last_expr_type = 0;
2223 /* Finish a loop. Generate a jump back to the top and the loop-exit label.
2224 Pop the block off of loop_stack. */
2226 void
2227 expand_end_loop ()
2229 register rtx insn;
2230 register rtx start_label;
2231 rtx last_test_insn = 0;
2232 int num_insns = 0;
2234 if (output_bytecode)
2236 bc_expand_end_loop ();
2237 return;
2240 insn = get_last_insn ();
2241 start_label = loop_stack->data.loop.start_label;
2243 /* Mark the continue-point at the top of the loop if none elsewhere. */
2244 if (start_label == loop_stack->data.loop.continue_label)
2245 emit_note_before (NOTE_INSN_LOOP_CONT, start_label);
2247 do_pending_stack_adjust ();
2249 /* If optimizing, perhaps reorder the loop. If the loop
2250 starts with a conditional exit, roll that to the end
2251 where it will optimize together with the jump back.
2253 We look for the last conditional branch to the exit that we encounter
2254 before hitting 30 insns or a CALL_INSN. If we see an unconditional
2255 branch to the exit first, use it.
2257 We must also stop at NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes
2258 because moving them is not valid. */
2260 if (optimize
2262 ! (GET_CODE (insn) == JUMP_INSN
2263 && GET_CODE (PATTERN (insn)) == SET
2264 && SET_DEST (PATTERN (insn)) == pc_rtx
2265 && GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE))
2267 /* Scan insns from the top of the loop looking for a qualified
2268 conditional exit. */
2269 for (insn = NEXT_INSN (loop_stack->data.loop.start_label); insn;
2270 insn = NEXT_INSN (insn))
2272 if (GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == CODE_LABEL)
2273 break;
2275 if (GET_CODE (insn) == NOTE
2276 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2277 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
2278 break;
2280 if (GET_CODE (insn) == JUMP_INSN || GET_CODE (insn) == INSN)
2281 num_insns++;
2283 if (last_test_insn && num_insns > 30)
2284 break;
2286 if (GET_CODE (insn) == JUMP_INSN && GET_CODE (PATTERN (insn)) == SET
2287 && SET_DEST (PATTERN (insn)) == pc_rtx
2288 && GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE
2289 && ((GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 1)) == LABEL_REF
2290 && ((XEXP (XEXP (SET_SRC (PATTERN (insn)), 1), 0)
2291 == loop_stack->data.loop.end_label)
2292 || (XEXP (XEXP (SET_SRC (PATTERN (insn)), 1), 0)
2293 == loop_stack->data.loop.alt_end_label)))
2294 || (GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 2)) == LABEL_REF
2295 && ((XEXP (XEXP (SET_SRC (PATTERN (insn)), 2), 0)
2296 == loop_stack->data.loop.end_label)
2297 || (XEXP (XEXP (SET_SRC (PATTERN (insn)), 2), 0)
2298 == loop_stack->data.loop.alt_end_label)))))
2299 last_test_insn = insn;
2301 if (last_test_insn == 0 && GET_CODE (insn) == JUMP_INSN
2302 && GET_CODE (PATTERN (insn)) == SET
2303 && SET_DEST (PATTERN (insn)) == pc_rtx
2304 && GET_CODE (SET_SRC (PATTERN (insn))) == LABEL_REF
2305 && ((XEXP (SET_SRC (PATTERN (insn)), 0)
2306 == loop_stack->data.loop.end_label)
2307 || (XEXP (SET_SRC (PATTERN (insn)), 0)
2308 == loop_stack->data.loop.alt_end_label)))
2309 /* Include BARRIER. */
2310 last_test_insn = NEXT_INSN (insn);
2313 if (last_test_insn != 0 && last_test_insn != get_last_insn ())
2315 /* We found one. Move everything from there up
2316 to the end of the loop, and add a jump into the loop
2317 to jump to there. */
2318 register rtx newstart_label = gen_label_rtx ();
2319 register rtx start_move = start_label;
2321 /* If the start label is preceded by a NOTE_INSN_LOOP_CONT note,
2322 then we want to move this note also. */
2323 if (GET_CODE (PREV_INSN (start_move)) == NOTE
2324 && (NOTE_LINE_NUMBER (PREV_INSN (start_move))
2325 == NOTE_INSN_LOOP_CONT))
2326 start_move = PREV_INSN (start_move);
2328 emit_label_after (newstart_label, PREV_INSN (start_move));
2329 reorder_insns (start_move, last_test_insn, get_last_insn ());
2330 emit_jump_insn_after (gen_jump (start_label),
2331 PREV_INSN (newstart_label));
2332 emit_barrier_after (PREV_INSN (newstart_label));
2333 start_label = newstart_label;
2337 emit_jump (start_label);
2338 emit_note (NULL_PTR, NOTE_INSN_LOOP_END);
2339 emit_label (loop_stack->data.loop.end_label);
2341 POPSTACK (loop_stack);
2343 last_expr_type = 0;
2346 /* Generate a jump to the current loop's continue-point.
2347 This is usually the top of the loop, but may be specified
2348 explicitly elsewhere. If not currently inside a loop,
2349 return 0 and do nothing; caller will print an error message. */
2352 expand_continue_loop (whichloop)
2353 struct nesting *whichloop;
2355 last_expr_type = 0;
2356 if (whichloop == 0)
2357 whichloop = loop_stack;
2358 if (whichloop == 0)
2359 return 0;
2360 expand_goto_internal (NULL_TREE, whichloop->data.loop.continue_label,
2361 NULL_RTX);
2362 return 1;
2365 /* Generate a jump to exit the current loop. If not currently inside a loop,
2366 return 0 and do nothing; caller will print an error message. */
2369 expand_exit_loop (whichloop)
2370 struct nesting *whichloop;
2372 last_expr_type = 0;
2373 if (whichloop == 0)
2374 whichloop = loop_stack;
2375 if (whichloop == 0)
2376 return 0;
2377 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label, NULL_RTX);
2378 return 1;
2381 /* Generate a conditional jump to exit the current loop if COND
2382 evaluates to zero. If not currently inside a loop,
2383 return 0 and do nothing; caller will print an error message. */
2386 expand_exit_loop_if_false (whichloop, cond)
2387 struct nesting *whichloop;
2388 tree cond;
2390 last_expr_type = 0;
2391 if (whichloop == 0)
2392 whichloop = loop_stack;
2393 if (whichloop == 0)
2394 return 0;
2395 if (output_bytecode)
2397 bc_expand_expr (cond);
2398 bc_expand_goto_internal (xjumpifnot,
2399 BYTECODE_BC_LABEL (whichloop->exit_label),
2400 NULL_TREE);
2402 else
2404 /* In order to handle fixups, we actually create a conditional jump
2405 around a unconditional branch to exit the loop. If fixups are
2406 necessary, they go before the unconditional branch. */
2408 rtx label = gen_label_rtx ();
2409 rtx last_insn;
2411 do_jump (cond, NULL_RTX, label);
2412 last_insn = get_last_insn ();
2413 if (GET_CODE (last_insn) == CODE_LABEL)
2414 whichloop->data.loop.alt_end_label = last_insn;
2415 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label,
2416 NULL_RTX);
2417 emit_label (label);
2420 return 1;
2423 /* Return non-zero if we should preserve sub-expressions as separate
2424 pseudos. We never do so if we aren't optimizing. We always do so
2425 if -fexpensive-optimizations.
2427 Otherwise, we only do so if we are in the "early" part of a loop. I.e.,
2428 the loop may still be a small one. */
2431 preserve_subexpressions_p ()
2433 rtx insn;
2435 if (flag_expensive_optimizations)
2436 return 1;
2438 if (optimize == 0 || loop_stack == 0)
2439 return 0;
2441 insn = get_last_insn_anywhere ();
2443 return (insn
2444 && (INSN_UID (insn) - INSN_UID (loop_stack->data.loop.start_label)
2445 < n_non_fixed_regs * 3));
2449 /* Generate a jump to exit the current loop, conditional, binding contour
2450 or case statement. Not all such constructs are visible to this function,
2451 only those started with EXIT_FLAG nonzero. Individual languages use
2452 the EXIT_FLAG parameter to control which kinds of constructs you can
2453 exit this way.
2455 If not currently inside anything that can be exited,
2456 return 0 and do nothing; caller will print an error message. */
2459 expand_exit_something ()
2461 struct nesting *n;
2462 last_expr_type = 0;
2463 for (n = nesting_stack; n; n = n->all)
2464 if (n->exit_label != 0)
2466 expand_goto_internal (NULL_TREE, n->exit_label, NULL_RTX);
2467 return 1;
2470 return 0;
2473 /* Generate RTL to return from the current function, with no value.
2474 (That is, we do not do anything about returning any value.) */
2476 void
2477 expand_null_return ()
2479 struct nesting *block = block_stack;
2480 rtx last_insn = 0;
2482 if (output_bytecode)
2484 bc_emit_instruction (ret);
2485 return;
2488 /* Does any pending block have cleanups? */
2490 while (block && block->data.block.cleanups == 0)
2491 block = block->next;
2493 /* If yes, use a goto to return, since that runs cleanups. */
2495 expand_null_return_1 (last_insn, block != 0);
2498 /* Generate RTL to return from the current function, with value VAL. */
2500 static void
2501 expand_value_return (val)
2502 rtx val;
2504 struct nesting *block = block_stack;
2505 rtx last_insn = get_last_insn ();
2506 rtx return_reg = DECL_RTL (DECL_RESULT (current_function_decl));
2508 /* Copy the value to the return location
2509 unless it's already there. */
2511 if (return_reg != val)
2513 #ifdef PROMOTE_FUNCTION_RETURN
2514 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
2515 int unsignedp = TREE_UNSIGNED (type);
2516 enum machine_mode mode
2517 = promote_mode (type, DECL_MODE (DECL_RESULT (current_function_decl)),
2518 &unsignedp, 1);
2520 if (GET_MODE (val) != VOIDmode && GET_MODE (val) != mode)
2521 convert_move (return_reg, val, unsignedp);
2522 else
2523 #endif
2524 emit_move_insn (return_reg, val);
2526 if (GET_CODE (return_reg) == REG
2527 && REGNO (return_reg) < FIRST_PSEUDO_REGISTER)
2528 emit_insn (gen_rtx (USE, VOIDmode, return_reg));
2530 /* Does any pending block have cleanups? */
2532 while (block && block->data.block.cleanups == 0)
2533 block = block->next;
2535 /* If yes, use a goto to return, since that runs cleanups.
2536 Use LAST_INSN to put cleanups *before* the move insn emitted above. */
2538 expand_null_return_1 (last_insn, block != 0);
2541 /* Output a return with no value. If LAST_INSN is nonzero,
2542 pretend that the return takes place after LAST_INSN.
2543 If USE_GOTO is nonzero then don't use a return instruction;
2544 go to the return label instead. This causes any cleanups
2545 of pending blocks to be executed normally. */
2547 static void
2548 expand_null_return_1 (last_insn, use_goto)
2549 rtx last_insn;
2550 int use_goto;
2552 rtx end_label = cleanup_label ? cleanup_label : return_label;
2554 clear_pending_stack_adjust ();
2555 do_pending_stack_adjust ();
2556 last_expr_type = 0;
2558 /* PCC-struct return always uses an epilogue. */
2559 if (current_function_returns_pcc_struct || use_goto)
2561 if (end_label == 0)
2562 end_label = return_label = gen_label_rtx ();
2563 expand_goto_internal (NULL_TREE, end_label, last_insn);
2564 return;
2567 /* Otherwise output a simple return-insn if one is available,
2568 unless it won't do the job. */
2569 #ifdef HAVE_return
2570 if (HAVE_return && use_goto == 0 && cleanup_label == 0)
2572 emit_jump_insn (gen_return ());
2573 emit_barrier ();
2574 return;
2576 #endif
2578 /* Otherwise jump to the epilogue. */
2579 expand_goto_internal (NULL_TREE, end_label, last_insn);
2582 /* Generate RTL to evaluate the expression RETVAL and return it
2583 from the current function. */
2585 void
2586 expand_return (retval)
2587 tree retval;
2589 /* If there are any cleanups to be performed, then they will
2590 be inserted following LAST_INSN. It is desirable
2591 that the last_insn, for such purposes, should be the
2592 last insn before computing the return value. Otherwise, cleanups
2593 which call functions can clobber the return value. */
2594 /* ??? rms: I think that is erroneous, because in C++ it would
2595 run destructors on variables that might be used in the subsequent
2596 computation of the return value. */
2597 rtx last_insn = 0;
2598 register rtx val = 0;
2599 register rtx op0;
2600 tree retval_rhs;
2601 int cleanups;
2602 struct nesting *block;
2604 /* Bytecode returns are quite simple, just leave the result on the
2605 arithmetic stack. */
2606 if (output_bytecode)
2608 bc_expand_expr (retval);
2609 bc_emit_instruction (ret);
2610 return;
2613 /* If function wants no value, give it none. */
2614 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
2616 expand_expr (retval, NULL_RTX, VOIDmode, 0);
2617 emit_queue ();
2618 expand_null_return ();
2619 return;
2622 /* Are any cleanups needed? E.g. C++ destructors to be run? */
2623 /* This is not sufficient. We also need to watch for cleanups of the
2624 expression we are about to expand. Unfortunately, we cannot know
2625 if it has cleanups until we expand it, and we want to change how we
2626 expand it depending upon if we need cleanups. We can't win. */
2627 #if 0
2628 cleanups = any_pending_cleanups (1);
2629 #else
2630 cleanups = 1;
2631 #endif
2633 if (TREE_CODE (retval) == RESULT_DECL)
2634 retval_rhs = retval;
2635 else if ((TREE_CODE (retval) == MODIFY_EXPR || TREE_CODE (retval) == INIT_EXPR)
2636 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
2637 retval_rhs = TREE_OPERAND (retval, 1);
2638 else if (TREE_TYPE (retval) == void_type_node)
2639 /* Recognize tail-recursive call to void function. */
2640 retval_rhs = retval;
2641 else
2642 retval_rhs = NULL_TREE;
2644 /* Only use `last_insn' if there are cleanups which must be run. */
2645 if (cleanups || cleanup_label != 0)
2646 last_insn = get_last_insn ();
2648 /* Distribute return down conditional expr if either of the sides
2649 may involve tail recursion (see test below). This enhances the number
2650 of tail recursions we see. Don't do this always since it can produce
2651 sub-optimal code in some cases and we distribute assignments into
2652 conditional expressions when it would help. */
2654 if (optimize && retval_rhs != 0
2655 && frame_offset == 0
2656 && TREE_CODE (retval_rhs) == COND_EXPR
2657 && (TREE_CODE (TREE_OPERAND (retval_rhs, 1)) == CALL_EXPR
2658 || TREE_CODE (TREE_OPERAND (retval_rhs, 2)) == CALL_EXPR))
2660 rtx label = gen_label_rtx ();
2661 tree expr;
2663 do_jump (TREE_OPERAND (retval_rhs, 0), label, NULL_RTX);
2664 expr = build (MODIFY_EXPR, TREE_TYPE (current_function_decl),
2665 DECL_RESULT (current_function_decl),
2666 TREE_OPERAND (retval_rhs, 1));
2667 TREE_SIDE_EFFECTS (expr) = 1;
2668 expand_return (expr);
2669 emit_label (label);
2671 expr = build (MODIFY_EXPR, TREE_TYPE (current_function_decl),
2672 DECL_RESULT (current_function_decl),
2673 TREE_OPERAND (retval_rhs, 2));
2674 TREE_SIDE_EFFECTS (expr) = 1;
2675 expand_return (expr);
2676 return;
2679 /* For tail-recursive call to current function,
2680 just jump back to the beginning.
2681 It's unsafe if any auto variable in this function
2682 has its address taken; for simplicity,
2683 require stack frame to be empty. */
2684 if (optimize && retval_rhs != 0
2685 && frame_offset == 0
2686 && TREE_CODE (retval_rhs) == CALL_EXPR
2687 && TREE_CODE (TREE_OPERAND (retval_rhs, 0)) == ADDR_EXPR
2688 && TREE_OPERAND (TREE_OPERAND (retval_rhs, 0), 0) == current_function_decl
2689 /* Finish checking validity, and if valid emit code
2690 to set the argument variables for the new call. */
2691 && tail_recursion_args (TREE_OPERAND (retval_rhs, 1),
2692 DECL_ARGUMENTS (current_function_decl)))
2694 if (tail_recursion_label == 0)
2696 tail_recursion_label = gen_label_rtx ();
2697 emit_label_after (tail_recursion_label,
2698 tail_recursion_reentry);
2700 emit_queue ();
2701 expand_goto_internal (NULL_TREE, tail_recursion_label, last_insn);
2702 emit_barrier ();
2703 return;
2705 #ifdef HAVE_return
2706 /* This optimization is safe if there are local cleanups
2707 because expand_null_return takes care of them.
2708 ??? I think it should also be safe when there is a cleanup label,
2709 because expand_null_return takes care of them, too.
2710 Any reason why not? */
2711 if (HAVE_return && cleanup_label == 0
2712 && ! current_function_returns_pcc_struct
2713 && BRANCH_COST <= 1)
2715 /* If this is return x == y; then generate
2716 if (x == y) return 1; else return 0;
2717 if we can do it with explicit return insns and
2718 branches are cheap. */
2719 if (retval_rhs)
2720 switch (TREE_CODE (retval_rhs))
2722 case EQ_EXPR:
2723 case NE_EXPR:
2724 case GT_EXPR:
2725 case GE_EXPR:
2726 case LT_EXPR:
2727 case LE_EXPR:
2728 case TRUTH_ANDIF_EXPR:
2729 case TRUTH_ORIF_EXPR:
2730 case TRUTH_AND_EXPR:
2731 case TRUTH_OR_EXPR:
2732 case TRUTH_NOT_EXPR:
2733 case TRUTH_XOR_EXPR:
2734 op0 = gen_label_rtx ();
2735 jumpifnot (retval_rhs, op0);
2736 expand_value_return (const1_rtx);
2737 emit_label (op0);
2738 expand_value_return (const0_rtx);
2739 return;
2742 #endif /* HAVE_return */
2744 /* If the result is an aggregate that is being returned in one (or more)
2745 registers, load the registers here. The compiler currently can't handle
2746 copying a BLKmode value into registers. We could put this code in a
2747 more general area (for use by everyone instead of just function
2748 call/return), but until this feature is generally usable it is kept here
2749 (and in expand_call). The value must go into a pseudo in case there
2750 are cleanups that will clobber the real return register. */
2752 if (retval_rhs != 0
2753 && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
2754 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG)
2756 int i, bitpos, xbitpos;
2757 int big_endian_correction = 0;
2758 int bytes = int_size_in_bytes (TREE_TYPE (retval_rhs));
2759 int n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
2760 int bitsize = MIN (TYPE_ALIGN (TREE_TYPE (retval_rhs)),BITS_PER_WORD);
2761 rtx *result_pseudos = (rtx *) alloca (sizeof (rtx) * n_regs);
2762 rtx result_reg, src, dst;
2763 rtx result_val = expand_expr (retval_rhs, NULL_RTX, VOIDmode, 0);
2764 enum machine_mode tmpmode, result_reg_mode;
2766 /* Structures whose size is not a multiple of a word are aligned
2767 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2768 machine, this means we must skip the empty high order bytes when
2769 calculating the bit offset. */
2770 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2771 big_endian_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2772 * BITS_PER_UNIT));
2774 /* Copy the structure BITSIZE bits at a time. */
2775 for (bitpos = 0, xbitpos = big_endian_correction;
2776 bitpos < bytes * BITS_PER_UNIT;
2777 bitpos += bitsize, xbitpos += bitsize)
2779 /* We need a new destination pseudo each time xbitpos is
2780 on a word boundary and when xbitpos == big_endian_correction
2781 (the first time through). */
2782 if (xbitpos % BITS_PER_WORD == 0
2783 || xbitpos == big_endian_correction)
2785 /* Generate an appropriate register. */
2786 dst = gen_reg_rtx (word_mode);
2787 result_pseudos[xbitpos / BITS_PER_WORD] = dst;
2789 /* Clobber the destination before we move anything into it. */
2790 emit_insn (gen_rtx (CLOBBER, VOIDmode, dst));
2793 /* We need a new source operand each time bitpos is on a word
2794 boundary. */
2795 if (bitpos % BITS_PER_WORD == 0)
2796 src = operand_subword_force (result_val,
2797 bitpos / BITS_PER_WORD,
2798 BLKmode);
2800 /* Use bitpos for the source extraction (left justified) and
2801 xbitpos for the destination store (right justified). */
2802 store_bit_field (dst, bitsize, xbitpos % BITS_PER_WORD, word_mode,
2803 extract_bit_field (src, bitsize,
2804 bitpos % BITS_PER_WORD, 1,
2805 NULL_RTX, word_mode,
2806 word_mode,
2807 bitsize / BITS_PER_UNIT,
2808 BITS_PER_WORD),
2809 bitsize / BITS_PER_UNIT, BITS_PER_WORD);
2812 /* Find the smallest integer mode large enough to hold the
2813 entire structure and use that mode instead of BLKmode
2814 on the USE insn for the return register. */
2815 bytes = int_size_in_bytes (TREE_TYPE (retval_rhs));
2816 for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2817 tmpmode != MAX_MACHINE_MODE;
2818 tmpmode = GET_MODE_WIDER_MODE (tmpmode))
2820 /* Have we found a large enough mode? */
2821 if (GET_MODE_SIZE (tmpmode) >= bytes)
2822 break;
2825 /* No suitable mode found. */
2826 if (tmpmode == MAX_MACHINE_MODE)
2827 abort ();
2829 PUT_MODE (DECL_RTL (DECL_RESULT (current_function_decl)), tmpmode);
2831 if (GET_MODE_SIZE (tmpmode) < GET_MODE_SIZE (word_mode))
2832 result_reg_mode = word_mode;
2833 else
2834 result_reg_mode = tmpmode;
2835 result_reg = gen_reg_rtx (result_reg_mode);
2837 /* Now that the value is in pseudos, copy it to the result reg(s). */
2838 emit_queue ();
2839 free_temp_slots ();
2840 for (i = 0; i < n_regs; i++)
2841 emit_move_insn (operand_subword (result_reg, i, 0, result_reg_mode),
2842 result_pseudos[i]);
2844 if (tmpmode != result_reg_mode)
2845 result_reg = gen_lowpart (tmpmode, result_reg);
2847 expand_value_return (result_reg);
2849 else if (cleanups
2850 && retval_rhs != 0
2851 && TREE_TYPE (retval_rhs) != void_type_node
2852 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG)
2854 /* Calculate the return value into a pseudo reg. */
2855 val = expand_expr (retval_rhs, NULL_RTX, VOIDmode, 0);
2856 emit_queue ();
2857 /* All temporaries have now been used. */
2858 free_temp_slots ();
2859 /* Return the calculated value, doing cleanups first. */
2860 expand_value_return (val);
2862 else
2864 /* No cleanups or no hard reg used;
2865 calculate value into hard return reg. */
2866 expand_expr (retval, const0_rtx, VOIDmode, 0);
2867 emit_queue ();
2868 free_temp_slots ();
2869 expand_value_return (DECL_RTL (DECL_RESULT (current_function_decl)));
2873 /* Return 1 if the end of the generated RTX is not a barrier.
2874 This means code already compiled can drop through. */
2877 drop_through_at_end_p ()
2879 rtx insn = get_last_insn ();
2880 while (insn && GET_CODE (insn) == NOTE)
2881 insn = PREV_INSN (insn);
2882 return insn && GET_CODE (insn) != BARRIER;
2885 /* Emit code to alter this function's formal parms for a tail-recursive call.
2886 ACTUALS is a list of actual parameter expressions (chain of TREE_LISTs).
2887 FORMALS is the chain of decls of formals.
2888 Return 1 if this can be done;
2889 otherwise return 0 and do not emit any code. */
2891 static int
2892 tail_recursion_args (actuals, formals)
2893 tree actuals, formals;
2895 register tree a = actuals, f = formals;
2896 register int i;
2897 register rtx *argvec;
2899 /* Check that number and types of actuals are compatible
2900 with the formals. This is not always true in valid C code.
2901 Also check that no formal needs to be addressable
2902 and that all formals are scalars. */
2904 /* Also count the args. */
2906 for (a = actuals, f = formals, i = 0; a && f; a = TREE_CHAIN (a), f = TREE_CHAIN (f), i++)
2908 if (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_VALUE (a)))
2909 != TYPE_MAIN_VARIANT (TREE_TYPE (f)))
2910 return 0;
2911 if (GET_CODE (DECL_RTL (f)) != REG || DECL_MODE (f) == BLKmode)
2912 return 0;
2914 if (a != 0 || f != 0)
2915 return 0;
2917 /* Compute all the actuals. */
2919 argvec = (rtx *) alloca (i * sizeof (rtx));
2921 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
2922 argvec[i] = expand_expr (TREE_VALUE (a), NULL_RTX, VOIDmode, 0);
2924 /* Find which actual values refer to current values of previous formals.
2925 Copy each of them now, before any formal is changed. */
2927 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
2929 int copy = 0;
2930 register int j;
2931 for (f = formals, j = 0; j < i; f = TREE_CHAIN (f), j++)
2932 if (reg_mentioned_p (DECL_RTL (f), argvec[i]))
2933 { copy = 1; break; }
2934 if (copy)
2935 argvec[i] = copy_to_reg (argvec[i]);
2938 /* Store the values of the actuals into the formals. */
2940 for (f = formals, a = actuals, i = 0; f;
2941 f = TREE_CHAIN (f), a = TREE_CHAIN (a), i++)
2943 if (GET_MODE (DECL_RTL (f)) == GET_MODE (argvec[i]))
2944 emit_move_insn (DECL_RTL (f), argvec[i]);
2945 else
2946 convert_move (DECL_RTL (f), argvec[i],
2947 TREE_UNSIGNED (TREE_TYPE (TREE_VALUE (a))));
2950 free_temp_slots ();
2951 return 1;
2954 /* Generate the RTL code for entering a binding contour.
2955 The variables are declared one by one, by calls to `expand_decl'.
2957 EXIT_FLAG is nonzero if this construct should be visible to
2958 `exit_something'. */
2960 void
2961 expand_start_bindings (exit_flag)
2962 int exit_flag;
2964 struct nesting *thisblock = ALLOC_NESTING ();
2965 rtx note = output_bytecode ? 0 : emit_note (NULL_PTR, NOTE_INSN_BLOCK_BEG);
2967 /* Make an entry on block_stack for the block we are entering. */
2969 thisblock->next = block_stack;
2970 thisblock->all = nesting_stack;
2971 thisblock->depth = ++nesting_depth;
2972 thisblock->data.block.stack_level = 0;
2973 thisblock->data.block.cleanups = 0;
2974 thisblock->data.block.function_call_count = 0;
2975 #if 0
2976 if (block_stack)
2978 if (block_stack->data.block.cleanups == NULL_TREE
2979 && (block_stack->data.block.outer_cleanups == NULL_TREE
2980 || block_stack->data.block.outer_cleanups == empty_cleanup_list))
2981 thisblock->data.block.outer_cleanups = empty_cleanup_list;
2982 else
2983 thisblock->data.block.outer_cleanups
2984 = tree_cons (NULL_TREE, block_stack->data.block.cleanups,
2985 block_stack->data.block.outer_cleanups);
2987 else
2988 thisblock->data.block.outer_cleanups = 0;
2989 #endif
2990 #if 1
2991 if (block_stack
2992 && !(block_stack->data.block.cleanups == NULL_TREE
2993 && block_stack->data.block.outer_cleanups == NULL_TREE))
2994 thisblock->data.block.outer_cleanups
2995 = tree_cons (NULL_TREE, block_stack->data.block.cleanups,
2996 block_stack->data.block.outer_cleanups);
2997 else
2998 thisblock->data.block.outer_cleanups = 0;
2999 #endif
3000 thisblock->data.block.label_chain = 0;
3001 thisblock->data.block.innermost_stack_block = stack_block_stack;
3002 thisblock->data.block.first_insn = note;
3003 thisblock->data.block.block_start_count = ++block_start_count;
3004 thisblock->exit_label = exit_flag ? gen_label_rtx () : 0;
3005 block_stack = thisblock;
3006 nesting_stack = thisblock;
3008 if (!output_bytecode)
3010 /* Make a new level for allocating stack slots. */
3011 push_temp_slots ();
3015 /* Given a pointer to a BLOCK node, save a pointer to the most recently
3016 generated NOTE_INSN_BLOCK_END in the BLOCK_END_NOTE field of the given
3017 BLOCK node. */
3019 void
3020 remember_end_note (block)
3021 register tree block;
3023 BLOCK_END_NOTE (block) = last_block_end_note;
3024 last_block_end_note = NULL_RTX;
3027 /* Generate RTL code to terminate a binding contour.
3028 VARS is the chain of VAR_DECL nodes
3029 for the variables bound in this contour.
3030 MARK_ENDS is nonzero if we should put a note at the beginning
3031 and end of this binding contour.
3033 DONT_JUMP_IN is nonzero if it is not valid to jump into this contour.
3034 (That is true automatically if the contour has a saved stack level.) */
3036 void
3037 expand_end_bindings (vars, mark_ends, dont_jump_in)
3038 tree vars;
3039 int mark_ends;
3040 int dont_jump_in;
3042 register struct nesting *thisblock = block_stack;
3043 register tree decl;
3045 if (output_bytecode)
3047 bc_expand_end_bindings (vars, mark_ends, dont_jump_in);
3048 return;
3051 if (warn_unused)
3052 for (decl = vars; decl; decl = TREE_CHAIN (decl))
3053 if (! TREE_USED (decl) && TREE_CODE (decl) == VAR_DECL
3054 && ! DECL_IN_SYSTEM_HEADER (decl))
3055 warning_with_decl (decl, "unused variable `%s'");
3057 if (thisblock->exit_label)
3059 do_pending_stack_adjust ();
3060 emit_label (thisblock->exit_label);
3063 /* If necessary, make a handler for nonlocal gotos taking
3064 place in the function calls in this block. */
3065 if (function_call_count != thisblock->data.block.function_call_count
3066 && nonlocal_labels
3067 /* Make handler for outermost block
3068 if there were any nonlocal gotos to this function. */
3069 && (thisblock->next == 0 ? current_function_has_nonlocal_label
3070 /* Make handler for inner block if it has something
3071 special to do when you jump out of it. */
3072 : (thisblock->data.block.cleanups != 0
3073 || thisblock->data.block.stack_level != 0)))
3075 tree link;
3076 rtx afterward = gen_label_rtx ();
3077 rtx handler_label = gen_label_rtx ();
3078 rtx save_receiver = gen_reg_rtx (Pmode);
3079 rtx insns;
3081 /* Don't let jump_optimize delete the handler. */
3082 LABEL_PRESERVE_P (handler_label) = 1;
3084 /* Record the handler address in the stack slot for that purpose,
3085 during this block, saving and restoring the outer value. */
3086 if (thisblock->next != 0)
3088 emit_move_insn (nonlocal_goto_handler_slot, save_receiver);
3090 start_sequence ();
3091 emit_move_insn (save_receiver, nonlocal_goto_handler_slot);
3092 insns = get_insns ();
3093 end_sequence ();
3094 emit_insns_before (insns, thisblock->data.block.first_insn);
3097 start_sequence ();
3098 emit_move_insn (nonlocal_goto_handler_slot,
3099 gen_rtx (LABEL_REF, Pmode, handler_label));
3100 insns = get_insns ();
3101 end_sequence ();
3102 emit_insns_before (insns, thisblock->data.block.first_insn);
3104 /* Jump around the handler; it runs only when specially invoked. */
3105 emit_jump (afterward);
3106 emit_label (handler_label);
3108 #ifdef HAVE_nonlocal_goto
3109 if (! HAVE_nonlocal_goto)
3110 #endif
3111 /* First adjust our frame pointer to its actual value. It was
3112 previously set to the start of the virtual area corresponding to
3113 the stacked variables when we branched here and now needs to be
3114 adjusted to the actual hardware fp value.
3116 Assignments are to virtual registers are converted by
3117 instantiate_virtual_regs into the corresponding assignment
3118 to the underlying register (fp in this case) that makes
3119 the original assignment true.
3120 So the following insn will actually be
3121 decrementing fp by STARTING_FRAME_OFFSET. */
3122 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
3124 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3125 if (fixed_regs[ARG_POINTER_REGNUM])
3127 #ifdef ELIMINABLE_REGS
3128 /* If the argument pointer can be eliminated in favor of the
3129 frame pointer, we don't need to restore it. We assume here
3130 that if such an elimination is present, it can always be used.
3131 This is the case on all known machines; if we don't make this
3132 assumption, we do unnecessary saving on many machines. */
3133 static struct elims {int from, to;} elim_regs[] = ELIMINABLE_REGS;
3134 int i;
3136 for (i = 0; i < sizeof elim_regs / sizeof elim_regs[0]; i++)
3137 if (elim_regs[i].from == ARG_POINTER_REGNUM
3138 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
3139 break;
3141 if (i == sizeof elim_regs / sizeof elim_regs [0])
3142 #endif
3144 /* Now restore our arg pointer from the address at which it
3145 was saved in our stack frame.
3146 If there hasn't be space allocated for it yet, make
3147 some now. */
3148 if (arg_pointer_save_area == 0)
3149 arg_pointer_save_area
3150 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
3151 emit_move_insn (virtual_incoming_args_rtx,
3152 /* We need a pseudo here, or else
3153 instantiate_virtual_regs_1 complains. */
3154 copy_to_reg (arg_pointer_save_area));
3157 #endif
3159 /* The handler expects the desired label address in the static chain
3160 register. It tests the address and does an appropriate jump
3161 to whatever label is desired. */
3162 for (link = nonlocal_labels; link; link = TREE_CHAIN (link))
3163 /* Skip any labels we shouldn't be able to jump to from here. */
3164 if (! DECL_TOO_LATE (TREE_VALUE (link)))
3166 rtx not_this = gen_label_rtx ();
3167 rtx this = gen_label_rtx ();
3168 do_jump_if_equal (static_chain_rtx,
3169 gen_rtx (LABEL_REF, Pmode, DECL_RTL (TREE_VALUE (link))),
3170 this, 0);
3171 emit_jump (not_this);
3172 emit_label (this);
3173 expand_goto (TREE_VALUE (link));
3174 emit_label (not_this);
3176 /* If label is not recognized, abort. */
3177 emit_library_call (gen_rtx (SYMBOL_REF, Pmode, "abort"), 0,
3178 VOIDmode, 0);
3179 emit_barrier ();
3180 emit_label (afterward);
3183 /* Don't allow jumping into a block that has cleanups or a stack level. */
3184 if (dont_jump_in
3185 || thisblock->data.block.stack_level != 0
3186 || thisblock->data.block.cleanups != 0)
3188 struct label_chain *chain;
3190 /* Any labels in this block are no longer valid to go to.
3191 Mark them to cause an error message. */
3192 for (chain = thisblock->data.block.label_chain; chain; chain = chain->next)
3194 DECL_TOO_LATE (chain->label) = 1;
3195 /* If any goto without a fixup came to this label,
3196 that must be an error, because gotos without fixups
3197 come from outside all saved stack-levels and all cleanups. */
3198 if (TREE_ADDRESSABLE (chain->label))
3199 error_with_decl (chain->label,
3200 "label `%s' used before containing binding contour");
3204 /* Restore stack level in effect before the block
3205 (only if variable-size objects allocated). */
3206 /* Perform any cleanups associated with the block. */
3208 if (thisblock->data.block.stack_level != 0
3209 || thisblock->data.block.cleanups != 0)
3211 /* Only clean up here if this point can actually be reached. */
3212 int reachable = GET_CODE (get_last_insn ()) != BARRIER;
3214 /* Don't let cleanups affect ({...}) constructs. */
3215 int old_expr_stmts_for_value = expr_stmts_for_value;
3216 rtx old_last_expr_value = last_expr_value;
3217 tree old_last_expr_type = last_expr_type;
3218 expr_stmts_for_value = 0;
3220 /* Do the cleanups. */
3221 expand_cleanups (thisblock->data.block.cleanups, NULL_TREE, 0, reachable);
3222 if (reachable)
3223 do_pending_stack_adjust ();
3225 expr_stmts_for_value = old_expr_stmts_for_value;
3226 last_expr_value = old_last_expr_value;
3227 last_expr_type = old_last_expr_type;
3229 /* Restore the stack level. */
3231 if (reachable && thisblock->data.block.stack_level != 0)
3233 emit_stack_restore (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3234 thisblock->data.block.stack_level, NULL_RTX);
3235 if (nonlocal_goto_handler_slot != 0)
3236 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level,
3237 NULL_RTX);
3240 /* Any gotos out of this block must also do these things.
3241 Also report any gotos with fixups that came to labels in this
3242 level. */
3243 fixup_gotos (thisblock,
3244 thisblock->data.block.stack_level,
3245 thisblock->data.block.cleanups,
3246 thisblock->data.block.first_insn,
3247 dont_jump_in);
3250 /* Mark the beginning and end of the scope if requested.
3251 We do this now, after running cleanups on the variables
3252 just going out of scope, so they are in scope for their cleanups. */
3254 if (mark_ends)
3255 last_block_end_note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_END);
3256 else
3257 /* Get rid of the beginning-mark if we don't make an end-mark. */
3258 NOTE_LINE_NUMBER (thisblock->data.block.first_insn) = NOTE_INSN_DELETED;
3260 /* If doing stupid register allocation, make sure lives of all
3261 register variables declared here extend thru end of scope. */
3263 if (obey_regdecls)
3264 for (decl = vars; decl; decl = TREE_CHAIN (decl))
3266 rtx rtl = DECL_RTL (decl);
3267 if (TREE_CODE (decl) == VAR_DECL && rtl != 0)
3268 use_variable (rtl);
3271 /* Restore block_stack level for containing block. */
3273 stack_block_stack = thisblock->data.block.innermost_stack_block;
3274 POPSTACK (block_stack);
3276 /* Pop the stack slot nesting and free any slots at this level. */
3277 pop_temp_slots ();
3281 /* End a binding contour.
3282 VARS is the chain of VAR_DECL nodes for the variables bound
3283 in this contour. MARK_ENDS is nonzer if we should put a note
3284 at the beginning and end of this binding contour.
3285 DONT_JUMP_IN is nonzero if it is not valid to jump into this
3286 contour. */
3288 static void
3289 bc_expand_end_bindings (vars, mark_ends, dont_jump_in)
3290 tree vars;
3291 int mark_ends;
3292 int dont_jump_in;
3294 struct nesting *thisbind = nesting_stack;
3295 tree decl;
3297 if (warn_unused)
3298 for (decl = vars; decl; decl = TREE_CHAIN (decl))
3299 if (! TREE_USED (TREE_VALUE (decl)) && TREE_CODE (TREE_VALUE (decl)) == VAR_DECL)
3300 warning_with_decl (decl, "unused variable `%s'");
3302 if (thisbind->exit_label)
3303 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thisbind->exit_label));
3305 /* Pop block/bindings off stack */
3306 POPSTACK (block_stack);
3309 /* Generate RTL for the automatic variable declaration DECL.
3310 (Other kinds of declarations are simply ignored if seen here.) */
3312 void
3313 expand_decl (decl)
3314 register tree decl;
3316 struct nesting *thisblock = block_stack;
3317 tree type;
3319 if (output_bytecode)
3321 bc_expand_decl (decl, 0);
3322 return;
3325 type = TREE_TYPE (decl);
3327 /* Only automatic variables need any expansion done.
3328 Static and external variables, and external functions,
3329 will be handled by `assemble_variable' (called from finish_decl).
3330 TYPE_DECL and CONST_DECL require nothing.
3331 PARM_DECLs are handled in `assign_parms'. */
3333 if (TREE_CODE (decl) != VAR_DECL)
3334 return;
3335 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
3336 return;
3338 /* Create the RTL representation for the variable. */
3340 if (type == error_mark_node)
3341 DECL_RTL (decl) = gen_rtx (MEM, BLKmode, const0_rtx);
3342 else if (DECL_SIZE (decl) == 0)
3343 /* Variable with incomplete type. */
3345 if (DECL_INITIAL (decl) == 0)
3346 /* Error message was already done; now avoid a crash. */
3347 DECL_RTL (decl) = assign_stack_temp (DECL_MODE (decl), 0, 1);
3348 else
3349 /* An initializer is going to decide the size of this array.
3350 Until we know the size, represent its address with a reg. */
3351 DECL_RTL (decl) = gen_rtx (MEM, BLKmode, gen_reg_rtx (Pmode));
3352 MEM_IN_STRUCT_P (DECL_RTL (decl)) = AGGREGATE_TYPE_P (type);
3354 else if (DECL_MODE (decl) != BLKmode
3355 /* If -ffloat-store, don't put explicit float vars
3356 into regs. */
3357 && !(flag_float_store
3358 && TREE_CODE (type) == REAL_TYPE)
3359 && ! TREE_THIS_VOLATILE (decl)
3360 && ! TREE_ADDRESSABLE (decl)
3361 && (DECL_REGISTER (decl) || ! obey_regdecls))
3363 /* Automatic variable that can go in a register. */
3364 int unsignedp = TREE_UNSIGNED (type);
3365 enum machine_mode reg_mode
3366 = promote_mode (type, DECL_MODE (decl), &unsignedp, 0);
3368 if (TREE_CODE (type) == COMPLEX_TYPE)
3370 rtx realpart, imagpart;
3371 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (type));
3373 /* For a complex type variable, make a CONCAT of two pseudos
3374 so that the real and imaginary parts
3375 can be allocated separately. */
3376 realpart = gen_reg_rtx (partmode);
3377 REG_USERVAR_P (realpart) = 1;
3378 imagpart = gen_reg_rtx (partmode);
3379 REG_USERVAR_P (imagpart) = 1;
3380 DECL_RTL (decl) = gen_rtx (CONCAT, reg_mode, realpart, imagpart);
3382 else
3384 DECL_RTL (decl) = gen_reg_rtx (reg_mode);
3385 if (TREE_CODE (type) == POINTER_TYPE)
3386 mark_reg_pointer (DECL_RTL (decl),
3387 (TYPE_ALIGN (TREE_TYPE (TREE_TYPE (decl)))
3388 / BITS_PER_UNIT));
3389 REG_USERVAR_P (DECL_RTL (decl)) = 1;
3392 else if (TREE_CODE (DECL_SIZE (decl)) == INTEGER_CST)
3394 /* Variable of fixed size that goes on the stack. */
3395 rtx oldaddr = 0;
3396 rtx addr;
3398 /* If we previously made RTL for this decl, it must be an array
3399 whose size was determined by the initializer.
3400 The old address was a register; set that register now
3401 to the proper address. */
3402 if (DECL_RTL (decl) != 0)
3404 if (GET_CODE (DECL_RTL (decl)) != MEM
3405 || GET_CODE (XEXP (DECL_RTL (decl), 0)) != REG)
3406 abort ();
3407 oldaddr = XEXP (DECL_RTL (decl), 0);
3410 DECL_RTL (decl)
3411 = assign_stack_temp (DECL_MODE (decl),
3412 ((TREE_INT_CST_LOW (DECL_SIZE (decl))
3413 + BITS_PER_UNIT - 1)
3414 / BITS_PER_UNIT),
3416 MEM_IN_STRUCT_P (DECL_RTL (decl)) = AGGREGATE_TYPE_P (TREE_TYPE (decl));
3418 /* Set alignment we actually gave this decl. */
3419 DECL_ALIGN (decl) = (DECL_MODE (decl) == BLKmode ? BIGGEST_ALIGNMENT
3420 : GET_MODE_BITSIZE (DECL_MODE (decl)));
3422 if (oldaddr)
3424 addr = force_operand (XEXP (DECL_RTL (decl), 0), oldaddr);
3425 if (addr != oldaddr)
3426 emit_move_insn (oldaddr, addr);
3429 /* If this is a memory ref that contains aggregate components,
3430 mark it as such for cse and loop optimize. */
3431 MEM_IN_STRUCT_P (DECL_RTL (decl)) = AGGREGATE_TYPE_P (TREE_TYPE (decl));
3432 #if 0
3433 /* If this is in memory because of -ffloat-store,
3434 set the volatile bit, to prevent optimizations from
3435 undoing the effects. */
3436 if (flag_float_store && TREE_CODE (type) == REAL_TYPE)
3437 MEM_VOLATILE_P (DECL_RTL (decl)) = 1;
3438 #endif
3440 else
3441 /* Dynamic-size object: must push space on the stack. */
3443 rtx address, size;
3445 /* Record the stack pointer on entry to block, if have
3446 not already done so. */
3447 if (thisblock->data.block.stack_level == 0)
3449 do_pending_stack_adjust ();
3450 emit_stack_save (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3451 &thisblock->data.block.stack_level,
3452 thisblock->data.block.first_insn);
3453 stack_block_stack = thisblock;
3456 /* Compute the variable's size, in bytes. */
3457 size = expand_expr (size_binop (CEIL_DIV_EXPR,
3458 DECL_SIZE (decl),
3459 size_int (BITS_PER_UNIT)),
3460 NULL_RTX, VOIDmode, 0);
3461 free_temp_slots ();
3463 /* Allocate space on the stack for the variable. */
3464 address = allocate_dynamic_stack_space (size, NULL_RTX,
3465 DECL_ALIGN (decl));
3467 /* Reference the variable indirect through that rtx. */
3468 DECL_RTL (decl) = gen_rtx (MEM, DECL_MODE (decl), address);
3470 /* If this is a memory ref that contains aggregate components,
3471 mark it as such for cse and loop optimize. */
3472 MEM_IN_STRUCT_P (DECL_RTL (decl)) = AGGREGATE_TYPE_P (TREE_TYPE (decl));
3474 /* Indicate the alignment we actually gave this variable. */
3475 #ifdef STACK_BOUNDARY
3476 DECL_ALIGN (decl) = STACK_BOUNDARY;
3477 #else
3478 DECL_ALIGN (decl) = BIGGEST_ALIGNMENT;
3479 #endif
3482 if (TREE_THIS_VOLATILE (decl))
3483 MEM_VOLATILE_P (DECL_RTL (decl)) = 1;
3484 #if 0 /* A variable is not necessarily unchanging
3485 just because it is const. RTX_UNCHANGING_P
3486 means no change in the function,
3487 not merely no change in the variable's scope.
3488 It is correct to set RTX_UNCHANGING_P if the variable's scope
3489 is the whole function. There's no convenient way to test that. */
3490 if (TREE_READONLY (decl))
3491 RTX_UNCHANGING_P (DECL_RTL (decl)) = 1;
3492 #endif
3494 /* If doing stupid register allocation, make sure life of any
3495 register variable starts here, at the start of its scope. */
3497 if (obey_regdecls)
3498 use_variable (DECL_RTL (decl));
3502 /* Generate code for the automatic variable declaration DECL. For
3503 most variables this just means we give it a stack offset. The
3504 compiler sometimes emits cleanups without variables and we will
3505 have to deal with those too. */
3507 static void
3508 bc_expand_decl (decl, cleanup)
3509 tree decl;
3510 tree cleanup;
3512 tree type;
3514 if (!decl)
3516 /* A cleanup with no variable. */
3517 if (!cleanup)
3518 abort ();
3520 return;
3523 /* Only auto variables need any work. */
3524 if (TREE_CODE (decl) != VAR_DECL || TREE_STATIC (decl) || DECL_EXTERNAL (decl))
3525 return;
3527 type = TREE_TYPE (decl);
3529 if (type == error_mark_node)
3530 DECL_RTL (decl) = bc_gen_rtx ((char *) 0, 0, (struct bc_label *) 0);
3532 else if (DECL_SIZE (decl) == 0)
3534 /* Variable with incomplete type. The stack offset herein will be
3535 fixed later in expand_decl_init (). */
3536 DECL_RTL (decl) = bc_gen_rtx ((char *) 0, 0, (struct bc_label *) 0);
3538 else if (TREE_CONSTANT (DECL_SIZE (decl)))
3540 DECL_RTL (decl) = bc_allocate_local (TREE_INT_CST_LOW (DECL_SIZE (decl)) / BITS_PER_UNIT,
3541 DECL_ALIGN (decl));
3543 else
3544 DECL_RTL (decl) = bc_allocate_variable_array (DECL_SIZE (decl));
3547 /* Emit code to perform the initialization of a declaration DECL. */
3549 void
3550 expand_decl_init (decl)
3551 tree decl;
3553 int was_used = TREE_USED (decl);
3555 if (output_bytecode)
3557 bc_expand_decl_init (decl);
3558 return;
3561 /* If this is a CONST_DECL, we don't have to generate any code, but
3562 if DECL_INITIAL is a constant, call expand_expr to force TREE_CST_RTL
3563 to be set while in the obstack containing the constant. If we don't
3564 do this, we can lose if we have functions nested three deep and the middle
3565 function makes a CONST_DECL whose DECL_INITIAL is a STRING_CST while
3566 the innermost function is the first to expand that STRING_CST. */
3567 if (TREE_CODE (decl) == CONST_DECL)
3569 if (DECL_INITIAL (decl) && TREE_CONSTANT (DECL_INITIAL (decl)))
3570 expand_expr (DECL_INITIAL (decl), NULL_RTX, VOIDmode,
3571 EXPAND_INITIALIZER);
3572 return;
3575 if (TREE_STATIC (decl))
3576 return;
3578 /* Compute and store the initial value now. */
3580 if (DECL_INITIAL (decl) == error_mark_node)
3582 enum tree_code code = TREE_CODE (TREE_TYPE (decl));
3583 if (code == INTEGER_TYPE || code == REAL_TYPE || code == ENUMERAL_TYPE
3584 || code == POINTER_TYPE)
3585 expand_assignment (decl, convert (TREE_TYPE (decl), integer_zero_node),
3586 0, 0);
3587 emit_queue ();
3589 else if (DECL_INITIAL (decl) && TREE_CODE (DECL_INITIAL (decl)) != TREE_LIST)
3591 emit_line_note (DECL_SOURCE_FILE (decl), DECL_SOURCE_LINE (decl));
3592 expand_assignment (decl, DECL_INITIAL (decl), 0, 0);
3593 emit_queue ();
3596 /* Don't let the initialization count as "using" the variable. */
3597 TREE_USED (decl) = was_used;
3599 /* Free any temporaries we made while initializing the decl. */
3600 preserve_temp_slots (NULL_RTX);
3601 free_temp_slots ();
3604 /* Expand initialization for variable-sized types. Allocate array
3605 using newlocalSI and set local variable, which is a pointer to the
3606 storage. */
3608 static void
3609 bc_expand_variable_local_init (decl)
3610 tree decl;
3612 /* Evaluate size expression and coerce to SI */
3613 bc_expand_expr (DECL_SIZE (decl));
3615 /* Type sizes are always (?) of TREE_CODE INTEGER_CST, so
3616 no coercion is necessary (?) */
3618 /* emit_typecode_conversion (preferred_typecode (TYPE_MODE (DECL_SIZE (decl)),
3619 TREE_UNSIGNED (DECL_SIZE (decl))), SIcode); */
3621 /* Emit code to allocate array */
3622 bc_emit_instruction (newlocalSI);
3624 /* Store array pointer in local variable. This is the only instance
3625 where we actually want the address of the pointer to the
3626 variable-size block, rather than the pointer itself. We avoid
3627 using expand_address() since that would cause the pointer to be
3628 pushed rather than its address. Hence the hard-coded reference;
3629 notice also that the variable is always local (no global
3630 variable-size type variables). */
3632 bc_load_localaddr (DECL_RTL (decl));
3633 bc_emit_instruction (storeP);
3637 /* Emit code to initialize a declaration. */
3639 static void
3640 bc_expand_decl_init (decl)
3641 tree decl;
3643 int org_stack_depth;
3645 /* Statical initializers are handled elsewhere */
3647 if (TREE_STATIC (decl))
3648 return;
3650 /* Memory original stack depth */
3651 org_stack_depth = stack_depth;
3653 /* If the type is variable-size, we first create its space (we ASSUME
3654 it CAN'T be static). We do this regardless of whether there's an
3655 initializer assignment or not. */
3657 if (TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
3658 bc_expand_variable_local_init (decl);
3660 /* Expand initializer assignment */
3661 if (DECL_INITIAL (decl) == error_mark_node)
3663 enum tree_code code = TREE_CODE (TREE_TYPE (decl));
3665 if (code == INTEGER_TYPE || code == REAL_TYPE || code == ENUMERAL_TYPE
3666 || code == POINTER_TYPE)
3668 expand_assignment (TREE_TYPE (decl), decl, 0, 0);
3670 else if (DECL_INITIAL (decl))
3671 expand_assignment (TREE_TYPE (decl), decl, 0, 0);
3673 /* Restore stack depth */
3674 if (org_stack_depth > stack_depth)
3675 abort ();
3677 bc_adjust_stack (stack_depth - org_stack_depth);
3681 /* CLEANUP is an expression to be executed at exit from this binding contour;
3682 for example, in C++, it might call the destructor for this variable.
3684 If CLEANUP contains any SAVE_EXPRs, then you must preevaluate them
3685 either before or after calling `expand_decl_cleanup' but before compiling
3686 any subsequent expressions. This is because CLEANUP may be expanded
3687 more than once, on different branches of execution.
3688 For the same reason, CLEANUP may not contain a CALL_EXPR
3689 except as its topmost node--else `preexpand_calls' would get confused.
3691 If CLEANUP is nonzero and DECL is zero, we record a cleanup
3692 that is not associated with any particular variable. */
3695 expand_decl_cleanup (decl, cleanup)
3696 tree decl, cleanup;
3698 struct nesting *thisblock = block_stack;
3700 /* Error if we are not in any block. */
3701 if (thisblock == 0)
3702 return 0;
3704 /* Record the cleanup if there is one. */
3706 if (cleanup != 0)
3708 thisblock->data.block.cleanups
3709 = temp_tree_cons (decl, cleanup, thisblock->data.block.cleanups);
3710 /* If this block has a cleanup, it belongs in stack_block_stack. */
3711 stack_block_stack = thisblock;
3712 (*interim_eh_hook) (NULL_TREE);
3714 return 1;
3717 /* DECL is an anonymous union. CLEANUP is a cleanup for DECL.
3718 DECL_ELTS is the list of elements that belong to DECL's type.
3719 In each, the TREE_VALUE is a VAR_DECL, and the TREE_PURPOSE a cleanup. */
3721 void
3722 expand_anon_union_decl (decl, cleanup, decl_elts)
3723 tree decl, cleanup, decl_elts;
3725 struct nesting *thisblock = block_stack;
3726 rtx x;
3728 expand_decl (decl);
3729 expand_decl_cleanup (decl, cleanup);
3730 x = DECL_RTL (decl);
3732 while (decl_elts)
3734 tree decl_elt = TREE_VALUE (decl_elts);
3735 tree cleanup_elt = TREE_PURPOSE (decl_elts);
3736 enum machine_mode mode = TYPE_MODE (TREE_TYPE (decl_elt));
3738 /* Propagate the union's alignment to the elements. */
3739 DECL_ALIGN (decl_elt) = DECL_ALIGN (decl);
3741 /* If the element has BLKmode and the union doesn't, the union is
3742 aligned such that the element doesn't need to have BLKmode, so
3743 change the element's mode to the appropriate one for its size. */
3744 if (mode == BLKmode && DECL_MODE (decl) != BLKmode)
3745 DECL_MODE (decl_elt) = mode
3746 = mode_for_size (TREE_INT_CST_LOW (DECL_SIZE (decl_elt)),
3747 MODE_INT, 1);
3749 /* (SUBREG (MEM ...)) at RTL generation time is invalid, so we
3750 instead create a new MEM rtx with the proper mode. */
3751 if (GET_CODE (x) == MEM)
3753 if (mode == GET_MODE (x))
3754 DECL_RTL (decl_elt) = x;
3755 else
3757 DECL_RTL (decl_elt) = gen_rtx (MEM, mode, copy_rtx (XEXP (x, 0)));
3758 MEM_IN_STRUCT_P (DECL_RTL (decl_elt)) = MEM_IN_STRUCT_P (x);
3759 RTX_UNCHANGING_P (DECL_RTL (decl_elt)) = RTX_UNCHANGING_P (x);
3762 else if (GET_CODE (x) == REG)
3764 if (mode == GET_MODE (x))
3765 DECL_RTL (decl_elt) = x;
3766 else
3767 DECL_RTL (decl_elt) = gen_rtx (SUBREG, mode, x, 0);
3769 else
3770 abort ();
3772 /* Record the cleanup if there is one. */
3774 if (cleanup != 0)
3775 thisblock->data.block.cleanups
3776 = temp_tree_cons (decl_elt, cleanup_elt,
3777 thisblock->data.block.cleanups);
3779 decl_elts = TREE_CHAIN (decl_elts);
3783 /* Expand a list of cleanups LIST.
3784 Elements may be expressions or may be nested lists.
3786 If DONT_DO is nonnull, then any list-element
3787 whose TREE_PURPOSE matches DONT_DO is omitted.
3788 This is sometimes used to avoid a cleanup associated with
3789 a value that is being returned out of the scope.
3791 If IN_FIXUP is non-zero, we are generating this cleanup for a fixup
3792 goto and handle protection regions specially in that case.
3794 If REACHABLE, we emit code, otherwise just inform the exception handling
3795 code about this finalization. */
3797 static void
3798 expand_cleanups (list, dont_do, in_fixup, reachable)
3799 tree list;
3800 tree dont_do;
3801 int in_fixup;
3802 int reachable;
3804 tree tail;
3805 for (tail = list; tail; tail = TREE_CHAIN (tail))
3806 if (dont_do == 0 || TREE_PURPOSE (tail) != dont_do)
3808 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3809 expand_cleanups (TREE_VALUE (tail), dont_do, in_fixup, reachable);
3810 else
3812 if (! in_fixup)
3813 (*interim_eh_hook) (TREE_VALUE (tail));
3815 if (reachable)
3817 /* Cleanups may be run multiple times. For example,
3818 when exiting a binding contour, we expand the
3819 cleanups associated with that contour. When a goto
3820 within that binding contour has a target outside that
3821 contour, it will expand all cleanups from its scope to
3822 the target. Though the cleanups are expanded multiple
3823 times, the control paths are non-overlapping so the
3824 cleanups will not be executed twice. */
3825 expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
3826 free_temp_slots ();
3832 /* Move all cleanups from the current block_stack
3833 to the containing block_stack, where they are assumed to
3834 have been created. If anything can cause a temporary to
3835 be created, but not expanded for more than one level of
3836 block_stacks, then this code will have to change. */
3838 void
3839 move_cleanups_up ()
3841 struct nesting *block = block_stack;
3842 struct nesting *outer = block->next;
3844 outer->data.block.cleanups
3845 = chainon (block->data.block.cleanups,
3846 outer->data.block.cleanups);
3847 block->data.block.cleanups = 0;
3850 tree
3851 last_cleanup_this_contour ()
3853 if (block_stack == 0)
3854 return 0;
3856 return block_stack->data.block.cleanups;
3859 /* Return 1 if there are any pending cleanups at this point.
3860 If THIS_CONTOUR is nonzero, check the current contour as well.
3861 Otherwise, look only at the contours that enclose this one. */
3864 any_pending_cleanups (this_contour)
3865 int this_contour;
3867 struct nesting *block;
3869 if (block_stack == 0)
3870 return 0;
3872 if (this_contour && block_stack->data.block.cleanups != NULL)
3873 return 1;
3874 if (block_stack->data.block.cleanups == 0
3875 && (block_stack->data.block.outer_cleanups == 0
3876 #if 0
3877 || block_stack->data.block.outer_cleanups == empty_cleanup_list
3878 #endif
3880 return 0;
3882 for (block = block_stack->next; block; block = block->next)
3883 if (block->data.block.cleanups != 0)
3884 return 1;
3886 return 0;
3889 /* Enter a case (Pascal) or switch (C) statement.
3890 Push a block onto case_stack and nesting_stack
3891 to accumulate the case-labels that are seen
3892 and to record the labels generated for the statement.
3894 EXIT_FLAG is nonzero if `exit_something' should exit this case stmt.
3895 Otherwise, this construct is transparent for `exit_something'.
3897 EXPR is the index-expression to be dispatched on.
3898 TYPE is its nominal type. We could simply convert EXPR to this type,
3899 but instead we take short cuts. */
3901 void
3902 expand_start_case (exit_flag, expr, type, printname)
3903 int exit_flag;
3904 tree expr;
3905 tree type;
3906 char *printname;
3908 register struct nesting *thiscase = ALLOC_NESTING ();
3910 /* Make an entry on case_stack for the case we are entering. */
3912 thiscase->next = case_stack;
3913 thiscase->all = nesting_stack;
3914 thiscase->depth = ++nesting_depth;
3915 thiscase->exit_label = exit_flag ? gen_label_rtx () : 0;
3916 thiscase->data.case_stmt.case_list = 0;
3917 thiscase->data.case_stmt.index_expr = expr;
3918 thiscase->data.case_stmt.nominal_type = type;
3919 thiscase->data.case_stmt.default_label = 0;
3920 thiscase->data.case_stmt.num_ranges = 0;
3921 thiscase->data.case_stmt.printname = printname;
3922 thiscase->data.case_stmt.seenlabel = 0;
3923 case_stack = thiscase;
3924 nesting_stack = thiscase;
3926 if (output_bytecode)
3928 bc_expand_start_case (thiscase, expr, type, printname);
3929 return;
3932 do_pending_stack_adjust ();
3934 /* Make sure case_stmt.start points to something that won't
3935 need any transformation before expand_end_case. */
3936 if (GET_CODE (get_last_insn ()) != NOTE)
3937 emit_note (NULL_PTR, NOTE_INSN_DELETED);
3939 thiscase->data.case_stmt.start = get_last_insn ();
3943 /* Enter a case statement. It is assumed that the caller has pushed
3944 the current context onto the case stack. */
3946 static void
3947 bc_expand_start_case (thiscase, expr, type, printname)
3948 struct nesting *thiscase;
3949 tree expr;
3950 tree type;
3951 char *printname;
3953 bc_expand_expr (expr);
3954 bc_expand_conversion (TREE_TYPE (expr), type);
3956 /* For cases, the skip is a place we jump to that's emitted after
3957 the size of the jump table is known. */
3959 thiscase->data.case_stmt.skip_label = gen_label_rtx ();
3960 bc_emit_bytecode (jump);
3961 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (thiscase->data.case_stmt.skip_label));
3963 #ifdef DEBUG_PRINT_CODE
3964 fputc ('\n', stderr);
3965 #endif
3969 /* Start a "dummy case statement" within which case labels are invalid
3970 and are not connected to any larger real case statement.
3971 This can be used if you don't want to let a case statement jump
3972 into the middle of certain kinds of constructs. */
3974 void
3975 expand_start_case_dummy ()
3977 register struct nesting *thiscase = ALLOC_NESTING ();
3979 /* Make an entry on case_stack for the dummy. */
3981 thiscase->next = case_stack;
3982 thiscase->all = nesting_stack;
3983 thiscase->depth = ++nesting_depth;
3984 thiscase->exit_label = 0;
3985 thiscase->data.case_stmt.case_list = 0;
3986 thiscase->data.case_stmt.start = 0;
3987 thiscase->data.case_stmt.nominal_type = 0;
3988 thiscase->data.case_stmt.default_label = 0;
3989 thiscase->data.case_stmt.num_ranges = 0;
3990 case_stack = thiscase;
3991 nesting_stack = thiscase;
3994 /* End a dummy case statement. */
3996 void
3997 expand_end_case_dummy ()
3999 POPSTACK (case_stack);
4002 /* Return the data type of the index-expression
4003 of the innermost case statement, or null if none. */
4005 tree
4006 case_index_expr_type ()
4008 if (case_stack)
4009 return TREE_TYPE (case_stack->data.case_stmt.index_expr);
4010 return 0;
4013 /* Accumulate one case or default label inside a case or switch statement.
4014 VALUE is the value of the case (a null pointer, for a default label).
4015 The function CONVERTER, when applied to arguments T and V,
4016 converts the value V to the type T.
4018 If not currently inside a case or switch statement, return 1 and do
4019 nothing. The caller will print a language-specific error message.
4020 If VALUE is a duplicate or overlaps, return 2 and do nothing
4021 except store the (first) duplicate node in *DUPLICATE.
4022 If VALUE is out of range, return 3 and do nothing.
4023 If we are jumping into the scope of a cleaup or var-sized array, return 5.
4024 Return 0 on success.
4026 Extended to handle range statements. */
4029 pushcase (value, converter, label, duplicate)
4030 register tree value;
4031 tree (*converter) PROTO((tree, tree));
4032 register tree label;
4033 tree *duplicate;
4035 register struct case_node **l;
4036 register struct case_node *n;
4037 tree index_type;
4038 tree nominal_type;
4040 if (output_bytecode)
4041 return bc_pushcase (value, label);
4043 /* Fail if not inside a real case statement. */
4044 if (! (case_stack && case_stack->data.case_stmt.start))
4045 return 1;
4047 if (stack_block_stack
4048 && stack_block_stack->depth > case_stack->depth)
4049 return 5;
4051 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4052 nominal_type = case_stack->data.case_stmt.nominal_type;
4054 /* If the index is erroneous, avoid more problems: pretend to succeed. */
4055 if (index_type == error_mark_node)
4056 return 0;
4058 /* Convert VALUE to the type in which the comparisons are nominally done. */
4059 if (value != 0)
4060 value = (*converter) (nominal_type, value);
4062 /* If this is the first label, warn if any insns have been emitted. */
4063 if (case_stack->data.case_stmt.seenlabel == 0)
4065 rtx insn;
4066 for (insn = case_stack->data.case_stmt.start;
4067 insn;
4068 insn = NEXT_INSN (insn))
4070 if (GET_CODE (insn) == CODE_LABEL)
4071 break;
4072 if (GET_CODE (insn) != NOTE
4073 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn)) != USE))
4075 warning ("unreachable code at beginning of %s",
4076 case_stack->data.case_stmt.printname);
4077 break;
4081 case_stack->data.case_stmt.seenlabel = 1;
4083 /* Fail if this value is out of range for the actual type of the index
4084 (which may be narrower than NOMINAL_TYPE). */
4085 if (value != 0 && ! int_fits_type_p (value, index_type))
4086 return 3;
4088 /* Fail if this is a duplicate or overlaps another entry. */
4089 if (value == 0)
4091 if (case_stack->data.case_stmt.default_label != 0)
4093 *duplicate = case_stack->data.case_stmt.default_label;
4094 return 2;
4096 case_stack->data.case_stmt.default_label = label;
4098 else
4100 /* Find the elt in the chain before which to insert the new value,
4101 to keep the chain sorted in increasing order.
4102 But report an error if this element is a duplicate. */
4103 for (l = &case_stack->data.case_stmt.case_list;
4104 /* Keep going past elements distinctly less than VALUE. */
4105 *l != 0 && tree_int_cst_lt ((*l)->high, value);
4106 l = &(*l)->right)
4108 if (*l)
4110 /* Element we will insert before must be distinctly greater;
4111 overlap means error. */
4112 if (! tree_int_cst_lt (value, (*l)->low))
4114 *duplicate = (*l)->code_label;
4115 return 2;
4119 /* Add this label to the chain, and succeed.
4120 Copy VALUE so it is on temporary rather than momentary
4121 obstack and will thus survive till the end of the case statement. */
4122 n = (struct case_node *) oballoc (sizeof (struct case_node));
4123 n->left = 0;
4124 n->right = *l;
4125 n->high = n->low = copy_node (value);
4126 n->code_label = label;
4127 *l = n;
4130 expand_label (label);
4131 return 0;
4134 /* Like pushcase but this case applies to all values
4135 between VALUE1 and VALUE2 (inclusive).
4136 The return value is the same as that of pushcase
4137 but there is one additional error code:
4138 4 means the specified range was empty. */
4141 pushcase_range (value1, value2, converter, label, duplicate)
4142 register tree value1, value2;
4143 tree (*converter) PROTO((tree, tree));
4144 register tree label;
4145 tree *duplicate;
4147 register struct case_node **l;
4148 register struct case_node *n;
4149 tree index_type;
4150 tree nominal_type;
4152 /* Fail if not inside a real case statement. */
4153 if (! (case_stack && case_stack->data.case_stmt.start))
4154 return 1;
4156 if (stack_block_stack
4157 && stack_block_stack->depth > case_stack->depth)
4158 return 5;
4160 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4161 nominal_type = case_stack->data.case_stmt.nominal_type;
4163 /* If the index is erroneous, avoid more problems: pretend to succeed. */
4164 if (index_type == error_mark_node)
4165 return 0;
4167 /* If this is the first label, warn if any insns have been emitted. */
4168 if (case_stack->data.case_stmt.seenlabel == 0)
4170 rtx insn;
4171 for (insn = case_stack->data.case_stmt.start;
4172 insn;
4173 insn = NEXT_INSN (insn))
4175 if (GET_CODE (insn) == CODE_LABEL)
4176 break;
4177 if (GET_CODE (insn) != NOTE
4178 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn)) != USE))
4180 warning ("unreachable code at beginning of %s",
4181 case_stack->data.case_stmt.printname);
4182 break;
4186 case_stack->data.case_stmt.seenlabel = 1;
4188 /* Convert VALUEs to type in which the comparisons are nominally done. */
4189 if (value1 == 0) /* Negative infinity. */
4190 value1 = TYPE_MIN_VALUE(index_type);
4191 value1 = (*converter) (nominal_type, value1);
4193 if (value2 == 0) /* Positive infinity. */
4194 value2 = TYPE_MAX_VALUE(index_type);
4195 value2 = (*converter) (nominal_type, value2);
4197 /* Fail if these values are out of range. */
4198 if (! int_fits_type_p (value1, index_type))
4199 return 3;
4201 if (! int_fits_type_p (value2, index_type))
4202 return 3;
4204 /* Fail if the range is empty. */
4205 if (tree_int_cst_lt (value2, value1))
4206 return 4;
4208 /* If the bounds are equal, turn this into the one-value case. */
4209 if (tree_int_cst_equal (value1, value2))
4210 return pushcase (value1, converter, label, duplicate);
4212 /* Find the elt in the chain before which to insert the new value,
4213 to keep the chain sorted in increasing order.
4214 But report an error if this element is a duplicate. */
4215 for (l = &case_stack->data.case_stmt.case_list;
4216 /* Keep going past elements distinctly less than this range. */
4217 *l != 0 && tree_int_cst_lt ((*l)->high, value1);
4218 l = &(*l)->right)
4220 if (*l)
4222 /* Element we will insert before must be distinctly greater;
4223 overlap means error. */
4224 if (! tree_int_cst_lt (value2, (*l)->low))
4226 *duplicate = (*l)->code_label;
4227 return 2;
4231 /* Add this label to the chain, and succeed.
4232 Copy VALUE1, VALUE2 so they are on temporary rather than momentary
4233 obstack and will thus survive till the end of the case statement. */
4235 n = (struct case_node *) oballoc (sizeof (struct case_node));
4236 n->left = 0;
4237 n->right = *l;
4238 n->low = copy_node (value1);
4239 n->high = copy_node (value2);
4240 n->code_label = label;
4241 *l = n;
4243 expand_label (label);
4245 case_stack->data.case_stmt.num_ranges++;
4247 return 0;
4251 /* Accumulate one case or default label; VALUE is the value of the
4252 case, or nil for a default label. If not currently inside a case,
4253 return 1 and do nothing. If VALUE is a duplicate or overlaps, return
4254 2 and do nothing. If VALUE is out of range, return 3 and do nothing.
4255 Return 0 on success. This function is a leftover from the earlier
4256 bytecode compiler, which was based on gcc 1.37. It should be
4257 merged into pushcase. */
4259 static int
4260 bc_pushcase (value, label)
4261 tree value;
4262 tree label;
4264 struct nesting *thiscase = case_stack;
4265 struct case_node *case_label, *new_label;
4267 if (! thiscase)
4268 return 1;
4270 /* Fail if duplicate, overlap, or out of type range. */
4271 if (value)
4273 value = convert (thiscase->data.case_stmt.nominal_type, value);
4274 if (! int_fits_type_p (value, thiscase->data.case_stmt.nominal_type))
4275 return 3;
4277 for (case_label = thiscase->data.case_stmt.case_list;
4278 case_label->left; case_label = case_label->left)
4279 if (! tree_int_cst_lt (case_label->left->high, value))
4280 break;
4282 if (case_label != thiscase->data.case_stmt.case_list
4283 && ! tree_int_cst_lt (case_label->high, value)
4284 || (case_label->left && ! tree_int_cst_lt (value, case_label->left->low)))
4285 return 2;
4287 new_label = (struct case_node *) oballoc (sizeof (struct case_node));
4288 new_label->low = new_label->high = copy_node (value);
4289 new_label->code_label = label;
4290 new_label->left = case_label->left;
4292 case_label->left = new_label;
4293 thiscase->data.case_stmt.num_ranges++;
4295 else
4297 if (thiscase->data.case_stmt.default_label)
4298 return 2;
4299 thiscase->data.case_stmt.default_label = label;
4302 expand_label (label);
4303 return 0;
4306 /* Returns the number of possible values of TYPE.
4307 Returns -1 if the number is unknown or variable.
4308 Returns -2 if the number does not fit in a HOST_WIDE_INT.
4309 Sets *SPARENESS to 2 if TYPE is an ENUMERAL_TYPE whose values
4310 do not increase monotonically (there may be duplicates);
4311 to 1 if the values increase monotonically, but not always by 1;
4312 otherwise sets it to 0. */
4314 HOST_WIDE_INT
4315 all_cases_count (type, spareness)
4316 tree type;
4317 int *spareness;
4319 HOST_WIDE_INT count, count_high = 0;
4320 *spareness = 0;
4322 switch (TREE_CODE (type))
4324 tree t;
4325 case BOOLEAN_TYPE:
4326 count = 2;
4327 break;
4328 case CHAR_TYPE:
4329 count = 1 << BITS_PER_UNIT;
4330 break;
4331 default:
4332 case INTEGER_TYPE:
4333 if (TREE_CODE (TYPE_MIN_VALUE (type)) != INTEGER_CST
4334 || TREE_CODE (TYPE_MAX_VALUE (type)) != INTEGER_CST)
4335 return -1;
4336 else
4338 /* count
4339 = TREE_INT_CST_LOW (TYPE_MAX_VALUE (type))
4340 - TREE_INT_CST_LOW (TYPE_MIN_VALUE (type)) + 1
4341 but with overflow checking. */
4342 tree mint = TYPE_MIN_VALUE (type);
4343 tree maxt = TYPE_MAX_VALUE (type);
4344 HOST_WIDE_INT lo, hi;
4345 neg_double(TREE_INT_CST_LOW (mint), TREE_INT_CST_HIGH (mint),
4346 &lo, &hi);
4347 add_double(TREE_INT_CST_LOW (maxt), TREE_INT_CST_HIGH (maxt),
4348 lo, hi, &lo, &hi);
4349 add_double (lo, hi, 1, 0, &lo, &hi);
4350 if (hi != 0 || lo < 0)
4351 return -2;
4352 count = lo;
4354 break;
4355 case ENUMERAL_TYPE:
4356 count = 0;
4357 for (t = TYPE_VALUES (type); t != NULL_TREE; t = TREE_CHAIN (t))
4359 if (TREE_CODE (TYPE_MIN_VALUE (type)) != INTEGER_CST
4360 || TREE_CODE (TREE_VALUE (t)) != INTEGER_CST
4361 || TREE_INT_CST_LOW (TYPE_MIN_VALUE (type)) + count
4362 != TREE_INT_CST_LOW (TREE_VALUE (t)))
4363 *spareness = 1;
4364 count++;
4366 if (*spareness == 1)
4368 tree prev = TREE_VALUE (TYPE_VALUES (type));
4369 for (t = TYPE_VALUES (type); t = TREE_CHAIN (t), t != NULL_TREE; )
4371 if (! tree_int_cst_lt (prev, TREE_VALUE (t)))
4373 *spareness = 2;
4374 break;
4376 prev = TREE_VALUE (t);
4381 return count;
4385 #define BITARRAY_TEST(ARRAY, INDEX) \
4386 ((ARRAY)[(unsigned)(INDEX) / HOST_BITS_PER_CHAR]\
4387 & (1 << ((unsigned)(INDEX) % HOST_BITS_PER_CHAR)))
4388 #define BITARRAY_SET(ARRAY, INDEX) \
4389 ((ARRAY)[(unsigned)(INDEX) / HOST_BITS_PER_CHAR]\
4390 |= 1 << ((unsigned)(INDEX) % HOST_BITS_PER_CHAR))
4392 /* Set the elements of the bitstring CASES_SEEN (which has length COUNT),
4393 with the case values we have seen, assuming the case expression
4394 has the given TYPE.
4395 SPARSENESS is as determined by all_cases_count.
4397 The time needed is proportional to COUNT, unless
4398 SPARSENESS is 2, in which case quadratic time is needed. */
4400 void
4401 mark_seen_cases (type, cases_seen, count, sparseness)
4402 tree type;
4403 unsigned char *cases_seen;
4404 long count;
4405 int sparseness;
4407 long i;
4409 tree next_node_to_try = NULL_TREE;
4410 long next_node_offset = 0;
4412 register struct case_node *n;
4413 tree val = make_node (INTEGER_CST);
4414 TREE_TYPE (val) = type;
4415 for (n = case_stack->data.case_stmt.case_list; n;
4416 n = n->right)
4418 TREE_INT_CST_LOW (val) = TREE_INT_CST_LOW (n->low);
4419 TREE_INT_CST_HIGH (val) = TREE_INT_CST_HIGH (n->low);
4420 while ( ! tree_int_cst_lt (n->high, val))
4422 /* Calculate (into xlo) the "offset" of the integer (val).
4423 The element with lowest value has offset 0, the next smallest
4424 element has offset 1, etc. */
4426 HOST_WIDE_INT xlo, xhi;
4427 tree t;
4428 if (sparseness == 2)
4430 /* This less efficient loop is only needed to handle
4431 duplicate case values (multiple enum constants
4432 with the same value). */
4433 for (t = TYPE_VALUES (type), xlo = 0; t != NULL_TREE;
4434 t = TREE_CHAIN (t), xlo++)
4436 if (tree_int_cst_equal (val, TREE_VALUE (t)))
4437 BITARRAY_SET (cases_seen, xlo);
4440 else
4442 if (sparseness && TYPE_VALUES (type) != NULL_TREE)
4444 /* The TYPE_VALUES will be in increasing order, so
4445 starting searching where we last ended. */
4446 t = next_node_to_try;
4447 xlo = next_node_offset;
4448 xhi = 0;
4449 for (;;)
4451 if (t == NULL_TREE)
4453 t = TYPE_VALUES (type);
4454 xlo = 0;
4456 if (tree_int_cst_equal (val, TREE_VALUE (t)))
4458 next_node_to_try = TREE_CHAIN (t);
4459 next_node_offset = xlo + 1;
4460 break;
4462 xlo++;
4463 t = TREE_CHAIN (t);
4464 if (t == next_node_to_try)
4465 break;
4468 else
4470 t = TYPE_MIN_VALUE (type);
4471 if (t)
4472 neg_double (TREE_INT_CST_LOW (t), TREE_INT_CST_HIGH (t),
4473 &xlo, &xhi);
4474 else
4475 xlo = xhi = 0;
4476 add_double (xlo, xhi,
4477 TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
4478 &xlo, &xhi);
4481 if (xhi == 0 && xlo >= 0 && xlo < count)
4482 BITARRAY_SET (cases_seen, xlo);
4484 add_double (TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
4485 1, 0,
4486 &TREE_INT_CST_LOW (val), &TREE_INT_CST_HIGH (val));
4491 /* Called when the index of a switch statement is an enumerated type
4492 and there is no default label.
4494 Checks that all enumeration literals are covered by the case
4495 expressions of a switch. Also, warn if there are any extra
4496 switch cases that are *not* elements of the enumerated type.
4498 If all enumeration literals were covered by the case expressions,
4499 turn one of the expressions into the default expression since it should
4500 not be possible to fall through such a switch. */
4502 void
4503 check_for_full_enumeration_handling (type)
4504 tree type;
4506 register struct case_node *n;
4507 register struct case_node **l;
4508 register tree chain;
4509 int all_values = 1;
4511 /* True iff the selector type is a numbered set mode. */
4512 int sparseness = 0;
4514 /* The number of possible selector values. */
4515 HOST_WIDE_INT size;
4517 /* For each possible selector value. a one iff it has been matched
4518 by a case value alternative. */
4519 unsigned char *cases_seen;
4521 /* The allocated size of cases_seen, in chars. */
4522 long bytes_needed;
4523 tree t;
4525 if (output_bytecode)
4527 bc_check_for_full_enumeration_handling (type);
4528 return;
4531 if (! warn_switch)
4532 return;
4534 size = all_cases_count (type, &sparseness);
4535 bytes_needed = (size + HOST_BITS_PER_CHAR) / HOST_BITS_PER_CHAR;
4537 if (size > 0 && size < 600000
4538 /* We deliberately use malloc here - not xmalloc. */
4539 && (cases_seen = (unsigned char *) malloc (bytes_needed)) != NULL)
4541 long i;
4542 tree v = TYPE_VALUES (type);
4543 bzero (cases_seen, bytes_needed);
4545 /* The time complexity of this code is normally O(N), where
4546 N being the number of members in the enumerated type.
4547 However, if type is a ENUMERAL_TYPE whose values do not
4548 increase monotonically, quadratic time may be needed. */
4550 mark_seen_cases (type, cases_seen, size, sparseness);
4552 for (i = 0; v != NULL_TREE && i < size; i++, v = TREE_CHAIN (v))
4554 if (BITARRAY_TEST(cases_seen, i) == 0)
4555 warning ("enumeration value `%s' not handled in switch",
4556 IDENTIFIER_POINTER (TREE_PURPOSE (v)));
4559 free (cases_seen);
4562 /* Now we go the other way around; we warn if there are case
4563 expressions that don't correspond to enumerators. This can
4564 occur since C and C++ don't enforce type-checking of
4565 assignments to enumeration variables. */
4567 if (warn_switch)
4568 for (n = case_stack->data.case_stmt.case_list; n; n = n->right)
4570 for (chain = TYPE_VALUES (type);
4571 chain && !tree_int_cst_equal (n->low, TREE_VALUE (chain));
4572 chain = TREE_CHAIN (chain))
4575 if (!chain)
4577 if (TYPE_NAME (type) == 0)
4578 warning ("case value `%d' not in enumerated type",
4579 TREE_INT_CST_LOW (n->low));
4580 else
4581 warning ("case value `%d' not in enumerated type `%s'",
4582 TREE_INT_CST_LOW (n->low),
4583 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
4584 == IDENTIFIER_NODE)
4585 ? TYPE_NAME (type)
4586 : DECL_NAME (TYPE_NAME (type))));
4588 if (!tree_int_cst_equal (n->low, n->high))
4590 for (chain = TYPE_VALUES (type);
4591 chain && !tree_int_cst_equal (n->high, TREE_VALUE (chain));
4592 chain = TREE_CHAIN (chain))
4595 if (!chain)
4597 if (TYPE_NAME (type) == 0)
4598 warning ("case value `%d' not in enumerated type",
4599 TREE_INT_CST_LOW (n->high));
4600 else
4601 warning ("case value `%d' not in enumerated type `%s'",
4602 TREE_INT_CST_LOW (n->high),
4603 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
4604 == IDENTIFIER_NODE)
4605 ? TYPE_NAME (type)
4606 : DECL_NAME (TYPE_NAME (type))));
4611 #if 0
4612 /* ??? This optimization is disabled because it causes valid programs to
4613 fail. ANSI C does not guarantee that an expression with enum type
4614 will have a value that is the same as one of the enumeration literals. */
4616 /* If all values were found as case labels, make one of them the default
4617 label. Thus, this switch will never fall through. We arbitrarily pick
4618 the last one to make the default since this is likely the most
4619 efficient choice. */
4621 if (all_values)
4623 for (l = &case_stack->data.case_stmt.case_list;
4624 (*l)->right != 0;
4625 l = &(*l)->right)
4628 case_stack->data.case_stmt.default_label = (*l)->code_label;
4629 *l = 0;
4631 #endif /* 0 */
4635 /* Check that all enumeration literals are covered by the case
4636 expressions of a switch. Also warn if there are any cases
4637 that are not elements of the enumerated type. */
4639 static void
4640 bc_check_for_full_enumeration_handling (type)
4641 tree type;
4643 struct nesting *thiscase = case_stack;
4644 struct case_node *c;
4645 tree e;
4647 /* Check for enums not handled. */
4648 for (e = TYPE_VALUES (type); e; e = TREE_CHAIN (e))
4650 for (c = thiscase->data.case_stmt.case_list->left;
4651 c && tree_int_cst_lt (c->high, TREE_VALUE (e));
4652 c = c->left)
4654 if (! (c && tree_int_cst_equal (c->low, TREE_VALUE (e))))
4655 warning ("enumerated value `%s' not handled in switch",
4656 IDENTIFIER_POINTER (TREE_PURPOSE (e)));
4659 /* Check for cases not in the enumeration. */
4660 for (c = thiscase->data.case_stmt.case_list->left; c; c = c->left)
4662 for (e = TYPE_VALUES (type);
4663 e && !tree_int_cst_equal (c->low, TREE_VALUE (e));
4664 e = TREE_CHAIN (e))
4666 if (! e)
4667 warning ("case value `%d' not in enumerated type `%s'",
4668 TREE_INT_CST_LOW (c->low),
4669 IDENTIFIER_POINTER (TREE_CODE (TYPE_NAME (type)) == IDENTIFIER_NODE
4670 ? TYPE_NAME (type)
4671 : DECL_NAME (TYPE_NAME (type))));
4675 /* Terminate a case (Pascal) or switch (C) statement
4676 in which ORIG_INDEX is the expression to be tested.
4677 Generate the code to test it and jump to the right place. */
4679 void
4680 expand_end_case (orig_index)
4681 tree orig_index;
4683 tree minval, maxval, range, orig_minval;
4684 rtx default_label = 0;
4685 register struct case_node *n;
4686 int count;
4687 rtx index;
4688 rtx table_label;
4689 int ncases;
4690 rtx *labelvec;
4691 register int i;
4692 rtx before_case;
4693 register struct nesting *thiscase = case_stack;
4694 tree index_expr, index_type;
4695 int unsignedp;
4697 if (output_bytecode)
4699 bc_expand_end_case (orig_index);
4700 return;
4703 table_label = gen_label_rtx ();
4704 index_expr = thiscase->data.case_stmt.index_expr;
4705 index_type = TREE_TYPE (index_expr);
4706 unsignedp = TREE_UNSIGNED (index_type);
4708 do_pending_stack_adjust ();
4710 /* An ERROR_MARK occurs for various reasons including invalid data type. */
4711 if (index_type != error_mark_node)
4713 /* If switch expression was an enumerated type, check that all
4714 enumeration literals are covered by the cases.
4715 No sense trying this if there's a default case, however. */
4717 if (!thiscase->data.case_stmt.default_label
4718 && TREE_CODE (TREE_TYPE (orig_index)) == ENUMERAL_TYPE
4719 && TREE_CODE (index_expr) != INTEGER_CST)
4720 check_for_full_enumeration_handling (TREE_TYPE (orig_index));
4722 /* If this is the first label, warn if any insns have been emitted. */
4723 if (thiscase->data.case_stmt.seenlabel == 0)
4725 rtx insn;
4726 for (insn = get_last_insn ();
4727 insn != case_stack->data.case_stmt.start;
4728 insn = PREV_INSN (insn))
4729 if (GET_CODE (insn) != NOTE
4730 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn))!= USE))
4732 warning ("unreachable code at beginning of %s",
4733 case_stack->data.case_stmt.printname);
4734 break;
4738 /* If we don't have a default-label, create one here,
4739 after the body of the switch. */
4740 if (thiscase->data.case_stmt.default_label == 0)
4742 thiscase->data.case_stmt.default_label
4743 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
4744 expand_label (thiscase->data.case_stmt.default_label);
4746 default_label = label_rtx (thiscase->data.case_stmt.default_label);
4748 before_case = get_last_insn ();
4750 /* Simplify the case-list before we count it. */
4751 group_case_nodes (thiscase->data.case_stmt.case_list);
4753 /* Get upper and lower bounds of case values.
4754 Also convert all the case values to the index expr's data type. */
4756 count = 0;
4757 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
4759 /* Check low and high label values are integers. */
4760 if (TREE_CODE (n->low) != INTEGER_CST)
4761 abort ();
4762 if (TREE_CODE (n->high) != INTEGER_CST)
4763 abort ();
4765 n->low = convert (index_type, n->low);
4766 n->high = convert (index_type, n->high);
4768 /* Count the elements and track the largest and smallest
4769 of them (treating them as signed even if they are not). */
4770 if (count++ == 0)
4772 minval = n->low;
4773 maxval = n->high;
4775 else
4777 if (INT_CST_LT (n->low, minval))
4778 minval = n->low;
4779 if (INT_CST_LT (maxval, n->high))
4780 maxval = n->high;
4782 /* A range counts double, since it requires two compares. */
4783 if (! tree_int_cst_equal (n->low, n->high))
4784 count++;
4787 orig_minval = minval;
4789 /* Compute span of values. */
4790 if (count != 0)
4791 range = fold (build (MINUS_EXPR, index_type, maxval, minval));
4793 if (count == 0)
4795 expand_expr (index_expr, const0_rtx, VOIDmode, 0);
4796 emit_queue ();
4797 emit_jump (default_label);
4800 /* If range of values is much bigger than number of values,
4801 make a sequence of conditional branches instead of a dispatch.
4802 If the switch-index is a constant, do it this way
4803 because we can optimize it. */
4805 #ifndef CASE_VALUES_THRESHOLD
4806 #ifdef HAVE_casesi
4807 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
4808 #else
4809 /* If machine does not have a case insn that compares the
4810 bounds, this means extra overhead for dispatch tables
4811 which raises the threshold for using them. */
4812 #define CASE_VALUES_THRESHOLD 5
4813 #endif /* HAVE_casesi */
4814 #endif /* CASE_VALUES_THRESHOLD */
4816 else if (TREE_INT_CST_HIGH (range) != 0
4817 || count < CASE_VALUES_THRESHOLD
4818 || ((unsigned HOST_WIDE_INT) (TREE_INT_CST_LOW (range))
4819 > 10 * count)
4820 #ifndef ASM_OUTPUT_ADDR_DIFF_ELT
4821 || flag_pic
4822 #endif
4823 || TREE_CODE (index_expr) == INTEGER_CST
4824 /* These will reduce to a constant. */
4825 || (TREE_CODE (index_expr) == CALL_EXPR
4826 && TREE_CODE (TREE_OPERAND (index_expr, 0)) == ADDR_EXPR
4827 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == FUNCTION_DECL
4828 && DECL_FUNCTION_CODE (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == BUILT_IN_CLASSIFY_TYPE)
4829 || (TREE_CODE (index_expr) == COMPOUND_EXPR
4830 && TREE_CODE (TREE_OPERAND (index_expr, 1)) == INTEGER_CST))
4832 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
4834 /* If the index is a short or char that we do not have
4835 an insn to handle comparisons directly, convert it to
4836 a full integer now, rather than letting each comparison
4837 generate the conversion. */
4839 if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT
4840 && (cmp_optab->handlers[(int) GET_MODE(index)].insn_code
4841 == CODE_FOR_nothing))
4843 enum machine_mode wider_mode;
4844 for (wider_mode = GET_MODE (index); wider_mode != VOIDmode;
4845 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
4846 if (cmp_optab->handlers[(int) wider_mode].insn_code
4847 != CODE_FOR_nothing)
4849 index = convert_to_mode (wider_mode, index, unsignedp);
4850 break;
4854 emit_queue ();
4855 do_pending_stack_adjust ();
4857 index = protect_from_queue (index, 0);
4858 if (GET_CODE (index) == MEM)
4859 index = copy_to_reg (index);
4860 if (GET_CODE (index) == CONST_INT
4861 || TREE_CODE (index_expr) == INTEGER_CST)
4863 /* Make a tree node with the proper constant value
4864 if we don't already have one. */
4865 if (TREE_CODE (index_expr) != INTEGER_CST)
4867 index_expr
4868 = build_int_2 (INTVAL (index),
4869 unsignedp || INTVAL (index) >= 0 ? 0 : -1);
4870 index_expr = convert (index_type, index_expr);
4873 /* For constant index expressions we need only
4874 issue a unconditional branch to the appropriate
4875 target code. The job of removing any unreachable
4876 code is left to the optimisation phase if the
4877 "-O" option is specified. */
4878 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
4879 if (! tree_int_cst_lt (index_expr, n->low)
4880 && ! tree_int_cst_lt (n->high, index_expr))
4881 break;
4883 if (n)
4884 emit_jump (label_rtx (n->code_label));
4885 else
4886 emit_jump (default_label);
4888 else
4890 /* If the index expression is not constant we generate
4891 a binary decision tree to select the appropriate
4892 target code. This is done as follows:
4894 The list of cases is rearranged into a binary tree,
4895 nearly optimal assuming equal probability for each case.
4897 The tree is transformed into RTL, eliminating
4898 redundant test conditions at the same time.
4900 If program flow could reach the end of the
4901 decision tree an unconditional jump to the
4902 default code is emitted. */
4904 use_cost_table
4905 = (TREE_CODE (TREE_TYPE (orig_index)) != ENUMERAL_TYPE
4906 && estimate_case_costs (thiscase->data.case_stmt.case_list));
4907 balance_case_nodes (&thiscase->data.case_stmt.case_list,
4908 NULL_PTR);
4909 emit_case_nodes (index, thiscase->data.case_stmt.case_list,
4910 default_label, index_type);
4911 emit_jump_if_reachable (default_label);
4914 else
4916 int win = 0;
4917 #ifdef HAVE_casesi
4918 if (HAVE_casesi)
4920 enum machine_mode index_mode = SImode;
4921 int index_bits = GET_MODE_BITSIZE (index_mode);
4922 rtx op1, op2;
4923 enum machine_mode op_mode;
4925 /* Convert the index to SImode. */
4926 if (GET_MODE_BITSIZE (TYPE_MODE (index_type))
4927 > GET_MODE_BITSIZE (index_mode))
4929 enum machine_mode omode = TYPE_MODE (index_type);
4930 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
4932 /* We must handle the endpoints in the original mode. */
4933 index_expr = build (MINUS_EXPR, index_type,
4934 index_expr, minval);
4935 minval = integer_zero_node;
4936 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
4937 emit_cmp_insn (rangertx, index, LTU, NULL_RTX, omode, 1, 0);
4938 emit_jump_insn (gen_bltu (default_label));
4939 /* Now we can safely truncate. */
4940 index = convert_to_mode (index_mode, index, 0);
4942 else
4944 if (TYPE_MODE (index_type) != index_mode)
4946 index_expr = convert (type_for_size (index_bits, 0),
4947 index_expr);
4948 index_type = TREE_TYPE (index_expr);
4951 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
4953 emit_queue ();
4954 index = protect_from_queue (index, 0);
4955 do_pending_stack_adjust ();
4957 op_mode = insn_operand_mode[(int)CODE_FOR_casesi][0];
4958 if (! (*insn_operand_predicate[(int)CODE_FOR_casesi][0])
4959 (index, op_mode))
4960 index = copy_to_mode_reg (op_mode, index);
4962 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
4964 op_mode = insn_operand_mode[(int)CODE_FOR_casesi][1];
4965 if (! (*insn_operand_predicate[(int)CODE_FOR_casesi][1])
4966 (op1, op_mode))
4967 op1 = copy_to_mode_reg (op_mode, op1);
4969 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
4971 op_mode = insn_operand_mode[(int)CODE_FOR_casesi][2];
4972 if (! (*insn_operand_predicate[(int)CODE_FOR_casesi][2])
4973 (op2, op_mode))
4974 op2 = copy_to_mode_reg (op_mode, op2);
4976 emit_jump_insn (gen_casesi (index, op1, op2,
4977 table_label, default_label));
4978 win = 1;
4980 #endif
4981 #ifdef HAVE_tablejump
4982 if (! win && HAVE_tablejump)
4984 index_expr = convert (thiscase->data.case_stmt.nominal_type,
4985 fold (build (MINUS_EXPR, index_type,
4986 index_expr, minval)));
4987 index_type = TREE_TYPE (index_expr);
4988 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
4989 emit_queue ();
4990 index = protect_from_queue (index, 0);
4991 do_pending_stack_adjust ();
4993 do_tablejump (index, TYPE_MODE (index_type),
4994 expand_expr (range, NULL_RTX, VOIDmode, 0),
4995 table_label, default_label);
4996 win = 1;
4998 #endif
4999 if (! win)
5000 abort ();
5002 /* Get table of labels to jump to, in order of case index. */
5004 ncases = TREE_INT_CST_LOW (range) + 1;
5005 labelvec = (rtx *) alloca (ncases * sizeof (rtx));
5006 bzero ((char *) labelvec, ncases * sizeof (rtx));
5008 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5010 register HOST_WIDE_INT i
5011 = TREE_INT_CST_LOW (n->low) - TREE_INT_CST_LOW (orig_minval);
5013 while (1)
5015 labelvec[i]
5016 = gen_rtx (LABEL_REF, Pmode, label_rtx (n->code_label));
5017 if (i + TREE_INT_CST_LOW (orig_minval)
5018 == TREE_INT_CST_LOW (n->high))
5019 break;
5020 i++;
5024 /* Fill in the gaps with the default. */
5025 for (i = 0; i < ncases; i++)
5026 if (labelvec[i] == 0)
5027 labelvec[i] = gen_rtx (LABEL_REF, Pmode, default_label);
5029 /* Output the table */
5030 emit_label (table_label);
5032 /* This would be a lot nicer if CASE_VECTOR_PC_RELATIVE
5033 were an expression, instead of an #ifdef/#ifndef. */
5034 if (
5035 #ifdef CASE_VECTOR_PC_RELATIVE
5036 1 ||
5037 #endif
5038 flag_pic)
5039 emit_jump_insn (gen_rtx (ADDR_DIFF_VEC, CASE_VECTOR_MODE,
5040 gen_rtx (LABEL_REF, Pmode, table_label),
5041 gen_rtvec_v (ncases, labelvec)));
5042 else
5043 emit_jump_insn (gen_rtx (ADDR_VEC, CASE_VECTOR_MODE,
5044 gen_rtvec_v (ncases, labelvec)));
5046 /* If the case insn drops through the table,
5047 after the table we must jump to the default-label.
5048 Otherwise record no drop-through after the table. */
5049 #ifdef CASE_DROPS_THROUGH
5050 emit_jump (default_label);
5051 #else
5052 emit_barrier ();
5053 #endif
5056 before_case = squeeze_notes (NEXT_INSN (before_case), get_last_insn ());
5057 reorder_insns (before_case, get_last_insn (),
5058 thiscase->data.case_stmt.start);
5061 if (thiscase->exit_label)
5062 emit_label (thiscase->exit_label);
5064 POPSTACK (case_stack);
5066 free_temp_slots ();
5070 /* Terminate a case statement. EXPR is the original index
5071 expression. */
5073 static void
5074 bc_expand_end_case (expr)
5075 tree expr;
5077 struct nesting *thiscase = case_stack;
5078 enum bytecode_opcode opcode;
5079 struct bc_label *jump_label;
5080 struct case_node *c;
5082 bc_emit_bytecode (jump);
5083 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (thiscase->exit_label));
5085 #ifdef DEBUG_PRINT_CODE
5086 fputc ('\n', stderr);
5087 #endif
5089 /* Now that the size of the jump table is known, emit the actual
5090 indexed jump instruction. */
5091 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thiscase->data.case_stmt.skip_label));
5093 opcode = TYPE_MODE (thiscase->data.case_stmt.nominal_type) == SImode
5094 ? TREE_UNSIGNED (thiscase->data.case_stmt.nominal_type) ? caseSU : caseSI
5095 : TREE_UNSIGNED (thiscase->data.case_stmt.nominal_type) ? caseDU : caseDI;
5097 bc_emit_bytecode (opcode);
5099 /* Now emit the case instructions literal arguments, in order.
5100 In addition to the value on the stack, it uses:
5101 1. The address of the jump table.
5102 2. The size of the jump table.
5103 3. The default label. */
5105 jump_label = bc_get_bytecode_label ();
5106 bc_emit_bytecode_labelref (jump_label);
5107 bc_emit_bytecode_const ((char *) &thiscase->data.case_stmt.num_ranges,
5108 sizeof thiscase->data.case_stmt.num_ranges);
5110 if (thiscase->data.case_stmt.default_label)
5111 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (DECL_RTL (thiscase->data.case_stmt.default_label)));
5112 else
5113 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (thiscase->exit_label));
5115 /* Output the jump table. */
5117 bc_align_bytecode (3 /* PTR_ALIGN */);
5118 bc_emit_bytecode_labeldef (jump_label);
5120 if (TYPE_MODE (thiscase->data.case_stmt.nominal_type) == SImode)
5121 for (c = thiscase->data.case_stmt.case_list->left; c; c = c->left)
5123 opcode = TREE_INT_CST_LOW (c->low);
5124 bc_emit_bytecode_const ((char *) &opcode, sizeof opcode);
5126 opcode = TREE_INT_CST_LOW (c->high);
5127 bc_emit_bytecode_const ((char *) &opcode, sizeof opcode);
5129 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (DECL_RTL (c->code_label)));
5131 else
5132 if (TYPE_MODE (thiscase->data.case_stmt.nominal_type) == DImode)
5133 for (c = thiscase->data.case_stmt.case_list->left; c; c = c->left)
5135 bc_emit_bytecode_DI_const (c->low);
5136 bc_emit_bytecode_DI_const (c->high);
5138 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (DECL_RTL (c->code_label)));
5140 else
5141 /* Bad mode */
5142 abort ();
5145 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thiscase->exit_label));
5147 /* Possibly issue enumeration warnings. */
5149 if (!thiscase->data.case_stmt.default_label
5150 && TREE_CODE (TREE_TYPE (expr)) == ENUMERAL_TYPE
5151 && TREE_CODE (expr) != INTEGER_CST
5152 && warn_switch)
5153 check_for_full_enumeration_handling (TREE_TYPE (expr));
5156 #ifdef DEBUG_PRINT_CODE
5157 fputc ('\n', stderr);
5158 #endif
5160 POPSTACK (case_stack);
5164 /* Return unique bytecode ID. */
5166 int
5167 bc_new_uid ()
5169 static int bc_uid = 0;
5171 return (++bc_uid);
5174 /* Generate code to jump to LABEL if OP1 and OP2 are equal. */
5176 static void
5177 do_jump_if_equal (op1, op2, label, unsignedp)
5178 rtx op1, op2, label;
5179 int unsignedp;
5181 if (GET_CODE (op1) == CONST_INT
5182 && GET_CODE (op2) == CONST_INT)
5184 if (INTVAL (op1) == INTVAL (op2))
5185 emit_jump (label);
5187 else
5189 enum machine_mode mode = GET_MODE (op1);
5190 if (mode == VOIDmode)
5191 mode = GET_MODE (op2);
5192 emit_cmp_insn (op1, op2, EQ, NULL_RTX, mode, unsignedp, 0);
5193 emit_jump_insn (gen_beq (label));
5197 /* Not all case values are encountered equally. This function
5198 uses a heuristic to weight case labels, in cases where that
5199 looks like a reasonable thing to do.
5201 Right now, all we try to guess is text, and we establish the
5202 following weights:
5204 chars above space: 16
5205 digits: 16
5206 default: 12
5207 space, punct: 8
5208 tab: 4
5209 newline: 2
5210 other "\" chars: 1
5211 remaining chars: 0
5213 If we find any cases in the switch that are not either -1 or in the range
5214 of valid ASCII characters, or are control characters other than those
5215 commonly used with "\", don't treat this switch scanning text.
5217 Return 1 if these nodes are suitable for cost estimation, otherwise
5218 return 0. */
5220 static int
5221 estimate_case_costs (node)
5222 case_node_ptr node;
5224 tree min_ascii = build_int_2 (-1, -1);
5225 tree max_ascii = convert (TREE_TYPE (node->high), build_int_2 (127, 0));
5226 case_node_ptr n;
5227 int i;
5229 /* If we haven't already made the cost table, make it now. Note that the
5230 lower bound of the table is -1, not zero. */
5232 if (cost_table == NULL)
5234 cost_table = ((short *) xmalloc (129 * sizeof (short))) + 1;
5235 bzero ((char *) (cost_table - 1), 129 * sizeof (short));
5237 for (i = 0; i < 128; i++)
5239 if (isalnum (i))
5240 cost_table[i] = 16;
5241 else if (ispunct (i))
5242 cost_table[i] = 8;
5243 else if (iscntrl (i))
5244 cost_table[i] = -1;
5247 cost_table[' '] = 8;
5248 cost_table['\t'] = 4;
5249 cost_table['\0'] = 4;
5250 cost_table['\n'] = 2;
5251 cost_table['\f'] = 1;
5252 cost_table['\v'] = 1;
5253 cost_table['\b'] = 1;
5256 /* See if all the case expressions look like text. It is text if the
5257 constant is >= -1 and the highest constant is <= 127. Do all comparisons
5258 as signed arithmetic since we don't want to ever access cost_table with a
5259 value less than -1. Also check that none of the constants in a range
5260 are strange control characters. */
5262 for (n = node; n; n = n->right)
5264 if ((INT_CST_LT (n->low, min_ascii)) || INT_CST_LT (max_ascii, n->high))
5265 return 0;
5267 for (i = TREE_INT_CST_LOW (n->low); i <= TREE_INT_CST_LOW (n->high); i++)
5268 if (cost_table[i] < 0)
5269 return 0;
5272 /* All interesting values are within the range of interesting
5273 ASCII characters. */
5274 return 1;
5277 /* Scan an ordered list of case nodes
5278 combining those with consecutive values or ranges.
5280 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
5282 static void
5283 group_case_nodes (head)
5284 case_node_ptr head;
5286 case_node_ptr node = head;
5288 while (node)
5290 rtx lb = next_real_insn (label_rtx (node->code_label));
5291 case_node_ptr np = node;
5293 /* Try to group the successors of NODE with NODE. */
5294 while (((np = np->right) != 0)
5295 /* Do they jump to the same place? */
5296 && next_real_insn (label_rtx (np->code_label)) == lb
5297 /* Are their ranges consecutive? */
5298 && tree_int_cst_equal (np->low,
5299 fold (build (PLUS_EXPR,
5300 TREE_TYPE (node->high),
5301 node->high,
5302 integer_one_node)))
5303 /* An overflow is not consecutive. */
5304 && tree_int_cst_lt (node->high,
5305 fold (build (PLUS_EXPR,
5306 TREE_TYPE (node->high),
5307 node->high,
5308 integer_one_node))))
5310 node->high = np->high;
5312 /* NP is the first node after NODE which can't be grouped with it.
5313 Delete the nodes in between, and move on to that node. */
5314 node->right = np;
5315 node = np;
5319 /* Take an ordered list of case nodes
5320 and transform them into a near optimal binary tree,
5321 on the assumption that any target code selection value is as
5322 likely as any other.
5324 The transformation is performed by splitting the ordered
5325 list into two equal sections plus a pivot. The parts are
5326 then attached to the pivot as left and right branches. Each
5327 branch is is then transformed recursively. */
5329 static void
5330 balance_case_nodes (head, parent)
5331 case_node_ptr *head;
5332 case_node_ptr parent;
5334 register case_node_ptr np;
5336 np = *head;
5337 if (np)
5339 int cost = 0;
5340 int i = 0;
5341 int ranges = 0;
5342 register case_node_ptr *npp;
5343 case_node_ptr left;
5345 /* Count the number of entries on branch. Also count the ranges. */
5347 while (np)
5349 if (!tree_int_cst_equal (np->low, np->high))
5351 ranges++;
5352 if (use_cost_table)
5353 cost += cost_table[TREE_INT_CST_LOW (np->high)];
5356 if (use_cost_table)
5357 cost += cost_table[TREE_INT_CST_LOW (np->low)];
5359 i++;
5360 np = np->right;
5363 if (i > 2)
5365 /* Split this list if it is long enough for that to help. */
5366 npp = head;
5367 left = *npp;
5368 if (use_cost_table)
5370 /* Find the place in the list that bisects the list's total cost,
5371 Here I gets half the total cost. */
5372 int n_moved = 0;
5373 i = (cost + 1) / 2;
5374 while (1)
5376 /* Skip nodes while their cost does not reach that amount. */
5377 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5378 i -= cost_table[TREE_INT_CST_LOW ((*npp)->high)];
5379 i -= cost_table[TREE_INT_CST_LOW ((*npp)->low)];
5380 if (i <= 0)
5381 break;
5382 npp = &(*npp)->right;
5383 n_moved += 1;
5385 if (n_moved == 0)
5387 /* Leave this branch lopsided, but optimize left-hand
5388 side and fill in `parent' fields for right-hand side. */
5389 np = *head;
5390 np->parent = parent;
5391 balance_case_nodes (&np->left, np);
5392 for (; np->right; np = np->right)
5393 np->right->parent = np;
5394 return;
5397 /* If there are just three nodes, split at the middle one. */
5398 else if (i == 3)
5399 npp = &(*npp)->right;
5400 else
5402 /* Find the place in the list that bisects the list's total cost,
5403 where ranges count as 2.
5404 Here I gets half the total cost. */
5405 i = (i + ranges + 1) / 2;
5406 while (1)
5408 /* Skip nodes while their cost does not reach that amount. */
5409 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5410 i--;
5411 i--;
5412 if (i <= 0)
5413 break;
5414 npp = &(*npp)->right;
5417 *head = np = *npp;
5418 *npp = 0;
5419 np->parent = parent;
5420 np->left = left;
5422 /* Optimize each of the two split parts. */
5423 balance_case_nodes (&np->left, np);
5424 balance_case_nodes (&np->right, np);
5426 else
5428 /* Else leave this branch as one level,
5429 but fill in `parent' fields. */
5430 np = *head;
5431 np->parent = parent;
5432 for (; np->right; np = np->right)
5433 np->right->parent = np;
5438 /* Search the parent sections of the case node tree
5439 to see if a test for the lower bound of NODE would be redundant.
5440 INDEX_TYPE is the type of the index expression.
5442 The instructions to generate the case decision tree are
5443 output in the same order as nodes are processed so it is
5444 known that if a parent node checks the range of the current
5445 node minus one that the current node is bounded at its lower
5446 span. Thus the test would be redundant. */
5448 static int
5449 node_has_low_bound (node, index_type)
5450 case_node_ptr node;
5451 tree index_type;
5453 tree low_minus_one;
5454 case_node_ptr pnode;
5456 /* If the lower bound of this node is the lowest value in the index type,
5457 we need not test it. */
5459 if (tree_int_cst_equal (node->low, TYPE_MIN_VALUE (index_type)))
5460 return 1;
5462 /* If this node has a left branch, the value at the left must be less
5463 than that at this node, so it cannot be bounded at the bottom and
5464 we need not bother testing any further. */
5466 if (node->left)
5467 return 0;
5469 low_minus_one = fold (build (MINUS_EXPR, TREE_TYPE (node->low),
5470 node->low, integer_one_node));
5472 /* If the subtraction above overflowed, we can't verify anything.
5473 Otherwise, look for a parent that tests our value - 1. */
5475 if (! tree_int_cst_lt (low_minus_one, node->low))
5476 return 0;
5478 for (pnode = node->parent; pnode; pnode = pnode->parent)
5479 if (tree_int_cst_equal (low_minus_one, pnode->high))
5480 return 1;
5482 return 0;
5485 /* Search the parent sections of the case node tree
5486 to see if a test for the upper bound of NODE would be redundant.
5487 INDEX_TYPE is the type of the index expression.
5489 The instructions to generate the case decision tree are
5490 output in the same order as nodes are processed so it is
5491 known that if a parent node checks the range of the current
5492 node plus one that the current node is bounded at its upper
5493 span. Thus the test would be redundant. */
5495 static int
5496 node_has_high_bound (node, index_type)
5497 case_node_ptr node;
5498 tree index_type;
5500 tree high_plus_one;
5501 case_node_ptr pnode;
5503 /* If the upper bound of this node is the highest value in the type
5504 of the index expression, we need not test against it. */
5506 if (tree_int_cst_equal (node->high, TYPE_MAX_VALUE (index_type)))
5507 return 1;
5509 /* If this node has a right branch, the value at the right must be greater
5510 than that at this node, so it cannot be bounded at the top and
5511 we need not bother testing any further. */
5513 if (node->right)
5514 return 0;
5516 high_plus_one = fold (build (PLUS_EXPR, TREE_TYPE (node->high),
5517 node->high, integer_one_node));
5519 /* If the addition above overflowed, we can't verify anything.
5520 Otherwise, look for a parent that tests our value + 1. */
5522 if (! tree_int_cst_lt (node->high, high_plus_one))
5523 return 0;
5525 for (pnode = node->parent; pnode; pnode = pnode->parent)
5526 if (tree_int_cst_equal (high_plus_one, pnode->low))
5527 return 1;
5529 return 0;
5532 /* Search the parent sections of the
5533 case node tree to see if both tests for the upper and lower
5534 bounds of NODE would be redundant. */
5536 static int
5537 node_is_bounded (node, index_type)
5538 case_node_ptr node;
5539 tree index_type;
5541 return (node_has_low_bound (node, index_type)
5542 && node_has_high_bound (node, index_type));
5545 /* Emit an unconditional jump to LABEL unless it would be dead code. */
5547 static void
5548 emit_jump_if_reachable (label)
5549 rtx label;
5551 if (GET_CODE (get_last_insn ()) != BARRIER)
5552 emit_jump (label);
5555 /* Emit step-by-step code to select a case for the value of INDEX.
5556 The thus generated decision tree follows the form of the
5557 case-node binary tree NODE, whose nodes represent test conditions.
5558 INDEX_TYPE is the type of the index of the switch.
5560 Care is taken to prune redundant tests from the decision tree
5561 by detecting any boundary conditions already checked by
5562 emitted rtx. (See node_has_high_bound, node_has_low_bound
5563 and node_is_bounded, above.)
5565 Where the test conditions can be shown to be redundant we emit
5566 an unconditional jump to the target code. As a further
5567 optimization, the subordinates of a tree node are examined to
5568 check for bounded nodes. In this case conditional and/or
5569 unconditional jumps as a result of the boundary check for the
5570 current node are arranged to target the subordinates associated
5571 code for out of bound conditions on the current node node.
5573 We can assume that when control reaches the code generated here,
5574 the index value has already been compared with the parents
5575 of this node, and determined to be on the same side of each parent
5576 as this node is. Thus, if this node tests for the value 51,
5577 and a parent tested for 52, we don't need to consider
5578 the possibility of a value greater than 51. If another parent
5579 tests for the value 50, then this node need not test anything. */
5581 static void
5582 emit_case_nodes (index, node, default_label, index_type)
5583 rtx index;
5584 case_node_ptr node;
5585 rtx default_label;
5586 tree index_type;
5588 /* If INDEX has an unsigned type, we must make unsigned branches. */
5589 int unsignedp = TREE_UNSIGNED (index_type);
5590 typedef rtx rtx_function ();
5591 rtx_function *gen_bgt_pat = unsignedp ? gen_bgtu : gen_bgt;
5592 rtx_function *gen_bge_pat = unsignedp ? gen_bgeu : gen_bge;
5593 rtx_function *gen_blt_pat = unsignedp ? gen_bltu : gen_blt;
5594 rtx_function *gen_ble_pat = unsignedp ? gen_bleu : gen_ble;
5595 enum machine_mode mode = GET_MODE (index);
5597 /* See if our parents have already tested everything for us.
5598 If they have, emit an unconditional jump for this node. */
5599 if (node_is_bounded (node, index_type))
5600 emit_jump (label_rtx (node->code_label));
5602 else if (tree_int_cst_equal (node->low, node->high))
5604 /* Node is single valued. First see if the index expression matches
5605 this node and then check our children, if any. */
5607 do_jump_if_equal (index, expand_expr (node->low, NULL_RTX, VOIDmode, 0),
5608 label_rtx (node->code_label), unsignedp);
5610 if (node->right != 0 && node->left != 0)
5612 /* This node has children on both sides.
5613 Dispatch to one side or the other
5614 by comparing the index value with this node's value.
5615 If one subtree is bounded, check that one first,
5616 so we can avoid real branches in the tree. */
5618 if (node_is_bounded (node->right, index_type))
5620 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5621 VOIDmode, 0),
5622 GT, NULL_RTX, mode, unsignedp, 0);
5624 emit_jump_insn ((*gen_bgt_pat) (label_rtx (node->right->code_label)));
5625 emit_case_nodes (index, node->left, default_label, index_type);
5628 else if (node_is_bounded (node->left, index_type))
5630 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5631 VOIDmode, 0),
5632 LT, NULL_RTX, mode, unsignedp, 0);
5633 emit_jump_insn ((*gen_blt_pat) (label_rtx (node->left->code_label)));
5634 emit_case_nodes (index, node->right, default_label, index_type);
5637 else
5639 /* Neither node is bounded. First distinguish the two sides;
5640 then emit the code for one side at a time. */
5642 tree test_label
5643 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
5645 /* See if the value is on the right. */
5646 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5647 VOIDmode, 0),
5648 GT, NULL_RTX, mode, unsignedp, 0);
5649 emit_jump_insn ((*gen_bgt_pat) (label_rtx (test_label)));
5651 /* Value must be on the left.
5652 Handle the left-hand subtree. */
5653 emit_case_nodes (index, node->left, default_label, index_type);
5654 /* If left-hand subtree does nothing,
5655 go to default. */
5656 emit_jump_if_reachable (default_label);
5658 /* Code branches here for the right-hand subtree. */
5659 expand_label (test_label);
5660 emit_case_nodes (index, node->right, default_label, index_type);
5664 else if (node->right != 0 && node->left == 0)
5666 /* Here we have a right child but no left so we issue conditional
5667 branch to default and process the right child.
5669 Omit the conditional branch to default if we it avoid only one
5670 right child; it costs too much space to save so little time. */
5672 if (node->right->right || node->right->left
5673 || !tree_int_cst_equal (node->right->low, node->right->high))
5675 if (!node_has_low_bound (node, index_type))
5677 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5678 VOIDmode, 0),
5679 LT, NULL_RTX, mode, unsignedp, 0);
5680 emit_jump_insn ((*gen_blt_pat) (default_label));
5683 emit_case_nodes (index, node->right, default_label, index_type);
5685 else
5686 /* We cannot process node->right normally
5687 since we haven't ruled out the numbers less than
5688 this node's value. So handle node->right explicitly. */
5689 do_jump_if_equal (index,
5690 expand_expr (node->right->low, NULL_RTX,
5691 VOIDmode, 0),
5692 label_rtx (node->right->code_label), unsignedp);
5695 else if (node->right == 0 && node->left != 0)
5697 /* Just one subtree, on the left. */
5699 #if 0 /* The following code and comment were formerly part
5700 of the condition here, but they didn't work
5701 and I don't understand what the idea was. -- rms. */
5702 /* If our "most probable entry" is less probable
5703 than the default label, emit a jump to
5704 the default label using condition codes
5705 already lying around. With no right branch,
5706 a branch-greater-than will get us to the default
5707 label correctly. */
5708 if (use_cost_table
5709 && cost_table[TREE_INT_CST_LOW (node->high)] < 12)
5711 #endif /* 0 */
5712 if (node->left->left || node->left->right
5713 || !tree_int_cst_equal (node->left->low, node->left->high))
5715 if (!node_has_high_bound (node, index_type))
5717 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5718 VOIDmode, 0),
5719 GT, NULL_RTX, mode, unsignedp, 0);
5720 emit_jump_insn ((*gen_bgt_pat) (default_label));
5723 emit_case_nodes (index, node->left, default_label, index_type);
5725 else
5726 /* We cannot process node->left normally
5727 since we haven't ruled out the numbers less than
5728 this node's value. So handle node->left explicitly. */
5729 do_jump_if_equal (index,
5730 expand_expr (node->left->low, NULL_RTX,
5731 VOIDmode, 0),
5732 label_rtx (node->left->code_label), unsignedp);
5735 else
5737 /* Node is a range. These cases are very similar to those for a single
5738 value, except that we do not start by testing whether this node
5739 is the one to branch to. */
5741 if (node->right != 0 && node->left != 0)
5743 /* Node has subtrees on both sides.
5744 If the right-hand subtree is bounded,
5745 test for it first, since we can go straight there.
5746 Otherwise, we need to make a branch in the control structure,
5747 then handle the two subtrees. */
5748 tree test_label = 0;
5750 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5751 VOIDmode, 0),
5752 GT, NULL_RTX, mode, unsignedp, 0);
5754 if (node_is_bounded (node->right, index_type))
5755 /* Right hand node is fully bounded so we can eliminate any
5756 testing and branch directly to the target code. */
5757 emit_jump_insn ((*gen_bgt_pat) (label_rtx (node->right->code_label)));
5758 else
5760 /* Right hand node requires testing.
5761 Branch to a label where we will handle it later. */
5763 test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
5764 emit_jump_insn ((*gen_bgt_pat) (label_rtx (test_label)));
5767 /* Value belongs to this node or to the left-hand subtree. */
5769 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX, VOIDmode, 0),
5770 GE, NULL_RTX, mode, unsignedp, 0);
5771 emit_jump_insn ((*gen_bge_pat) (label_rtx (node->code_label)));
5773 /* Handle the left-hand subtree. */
5774 emit_case_nodes (index, node->left, default_label, index_type);
5776 /* If right node had to be handled later, do that now. */
5778 if (test_label)
5780 /* If the left-hand subtree fell through,
5781 don't let it fall into the right-hand subtree. */
5782 emit_jump_if_reachable (default_label);
5784 expand_label (test_label);
5785 emit_case_nodes (index, node->right, default_label, index_type);
5789 else if (node->right != 0 && node->left == 0)
5791 /* Deal with values to the left of this node,
5792 if they are possible. */
5793 if (!node_has_low_bound (node, index_type))
5795 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX,
5796 VOIDmode, 0),
5797 LT, NULL_RTX, mode, unsignedp, 0);
5798 emit_jump_insn ((*gen_blt_pat) (default_label));
5801 /* Value belongs to this node or to the right-hand subtree. */
5803 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5804 VOIDmode, 0),
5805 LE, NULL_RTX, mode, unsignedp, 0);
5806 emit_jump_insn ((*gen_ble_pat) (label_rtx (node->code_label)));
5808 emit_case_nodes (index, node->right, default_label, index_type);
5811 else if (node->right == 0 && node->left != 0)
5813 /* Deal with values to the right of this node,
5814 if they are possible. */
5815 if (!node_has_high_bound (node, index_type))
5817 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5818 VOIDmode, 0),
5819 GT, NULL_RTX, mode, unsignedp, 0);
5820 emit_jump_insn ((*gen_bgt_pat) (default_label));
5823 /* Value belongs to this node or to the left-hand subtree. */
5825 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX, VOIDmode, 0),
5826 GE, NULL_RTX, mode, unsignedp, 0);
5827 emit_jump_insn ((*gen_bge_pat) (label_rtx (node->code_label)));
5829 emit_case_nodes (index, node->left, default_label, index_type);
5832 else
5834 /* Node has no children so we check low and high bounds to remove
5835 redundant tests. Only one of the bounds can exist,
5836 since otherwise this node is bounded--a case tested already. */
5838 if (!node_has_high_bound (node, index_type))
5840 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5841 VOIDmode, 0),
5842 GT, NULL_RTX, mode, unsignedp, 0);
5843 emit_jump_insn ((*gen_bgt_pat) (default_label));
5846 if (!node_has_low_bound (node, index_type))
5848 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX,
5849 VOIDmode, 0),
5850 LT, NULL_RTX, mode, unsignedp, 0);
5851 emit_jump_insn ((*gen_blt_pat) (default_label));
5854 emit_jump (label_rtx (node->code_label));
5859 /* These routines are used by the loop unrolling code. They copy BLOCK trees
5860 so that the debugging info will be correct for the unrolled loop. */
5862 /* Indexed by block number, contains a pointer to the N'th block node. */
5864 static tree *block_vector;
5866 void
5867 find_loop_tree_blocks ()
5869 tree block = DECL_INITIAL (current_function_decl);
5871 block_vector = identify_blocks (block, get_insns ());
5874 void
5875 unroll_block_trees ()
5877 tree block = DECL_INITIAL (current_function_decl);
5879 reorder_blocks (block_vector, block, get_insns ());