* Make-lang.in (DEMANGLER_INSTALL_NAME, DEMANGLER_CROSS_NAME): New
[official-gcc.git] / gcc / stmt.c
blob42ebecbb4e4039419b4caee39ebec622d735119d
1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 88, 89, 92-7, 1998 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
22 /* This file handles the generation of rtl code from tree structure
23 above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
24 It also creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
27 The functions whose names start with `expand_' are called by the
28 parser to generate RTL instructions for various kinds of constructs.
30 Some control and binding constructs require calling several such
31 functions at different times. For example, a simple if-then
32 is expanded by calling `expand_start_cond' (with the condition-expression
33 as argument) before parsing the then-clause and calling `expand_end_cond'
34 after parsing the then-clause. */
36 #include "config.h"
38 #include <stdio.h>
39 #include <ctype.h>
41 #ifdef HAVE_STDLIB_H
42 #include <stdlib.h>
43 #endif
45 #include "rtl.h"
46 #include "tree.h"
47 #include "flags.h"
48 #include "except.h"
49 #include "function.h"
50 #include "insn-flags.h"
51 #include "insn-config.h"
52 #include "insn-codes.h"
53 #include "expr.h"
54 #include "hard-reg-set.h"
55 #include "obstack.h"
56 #include "loop.h"
57 #include "recog.h"
58 #include "machmode.h"
60 #define obstack_chunk_alloc xmalloc
61 #define obstack_chunk_free free
62 struct obstack stmt_obstack;
64 /* Assume that case vectors are not pc-relative. */
65 #ifndef CASE_VECTOR_PC_RELATIVE
66 #define CASE_VECTOR_PC_RELATIVE 0
67 #endif
69 /* Filename and line number of last line-number note,
70 whether we actually emitted it or not. */
71 char *emit_filename;
72 int emit_lineno;
74 /* Nonzero if within a ({...}) grouping, in which case we must
75 always compute a value for each expr-stmt in case it is the last one. */
77 int expr_stmts_for_value;
79 /* Each time we expand an expression-statement,
80 record the expr's type and its RTL value here. */
82 static tree last_expr_type;
83 static rtx last_expr_value;
85 /* Each time we expand the end of a binding contour (in `expand_end_bindings')
86 and we emit a new NOTE_INSN_BLOCK_END note, we save a pointer to it here.
87 This is used by the `remember_end_note' function to record the endpoint
88 of each generated block in its associated BLOCK node. */
90 static rtx last_block_end_note;
92 /* Number of binding contours started so far in this function. */
94 int block_start_count;
96 /* Nonzero if function being compiled needs to
97 return the address of where it has put a structure value. */
99 extern int current_function_returns_pcc_struct;
101 /* Label that will go on parm cleanup code, if any.
102 Jumping to this label runs cleanup code for parameters, if
103 such code must be run. Following this code is the logical return label. */
105 extern rtx cleanup_label;
107 /* Label that will go on function epilogue.
108 Jumping to this label serves as a "return" instruction
109 on machines which require execution of the epilogue on all returns. */
111 extern rtx return_label;
113 /* Offset to end of allocated area of stack frame.
114 If stack grows down, this is the address of the last stack slot allocated.
115 If stack grows up, this is the address for the next slot. */
116 extern int frame_offset;
118 /* Label to jump back to for tail recursion, or 0 if we have
119 not yet needed one for this function. */
120 extern rtx tail_recursion_label;
122 /* Place after which to insert the tail_recursion_label if we need one. */
123 extern rtx tail_recursion_reentry;
125 /* Location at which to save the argument pointer if it will need to be
126 referenced. There are two cases where this is done: if nonlocal gotos
127 exist, or if vars whose is an offset from the argument pointer will be
128 needed by inner routines. */
130 extern rtx arg_pointer_save_area;
132 /* Chain of all RTL_EXPRs that have insns in them. */
133 extern tree rtl_expr_chain;
135 /* Stack allocation level in which temporaries for TARGET_EXPRs live. */
136 extern int target_temp_slot_level;
138 extern int temp_slot_level;
140 /* Functions and data structures for expanding case statements. */
142 /* Case label structure, used to hold info on labels within case
143 statements. We handle "range" labels; for a single-value label
144 as in C, the high and low limits are the same.
146 An AVL tree of case nodes is initially created, and later transformed
147 to a list linked via the RIGHT fields in the nodes. Nodes with
148 higher case values are later in the list.
150 Switch statements can be output in one of two forms. A branch table
151 is used if there are more than a few labels and the labels are dense
152 within the range between the smallest and largest case value. If a
153 branch table is used, no further manipulations are done with the case
154 node chain.
156 The alternative to the use of a branch table is to generate a series
157 of compare and jump insns. When that is done, we use the LEFT, RIGHT,
158 and PARENT fields to hold a binary tree. Initially the tree is
159 totally unbalanced, with everything on the right. We balance the tree
160 with nodes on the left having lower case values than the parent
161 and nodes on the right having higher values. We then output the tree
162 in order. */
164 struct case_node
166 struct case_node *left; /* Left son in binary tree */
167 struct case_node *right; /* Right son in binary tree; also node chain */
168 struct case_node *parent; /* Parent of node in binary tree */
169 tree low; /* Lowest index value for this label */
170 tree high; /* Highest index value for this label */
171 tree code_label; /* Label to jump to when node matches */
172 int balance;
175 typedef struct case_node case_node;
176 typedef struct case_node *case_node_ptr;
178 /* These are used by estimate_case_costs and balance_case_nodes. */
180 /* This must be a signed type, and non-ANSI compilers lack signed char. */
181 static short *cost_table;
182 static int use_cost_table;
184 /* Stack of control and binding constructs we are currently inside.
186 These constructs begin when you call `expand_start_WHATEVER'
187 and end when you call `expand_end_WHATEVER'. This stack records
188 info about how the construct began that tells the end-function
189 what to do. It also may provide information about the construct
190 to alter the behavior of other constructs within the body.
191 For example, they may affect the behavior of C `break' and `continue'.
193 Each construct gets one `struct nesting' object.
194 All of these objects are chained through the `all' field.
195 `nesting_stack' points to the first object (innermost construct).
196 The position of an entry on `nesting_stack' is in its `depth' field.
198 Each type of construct has its own individual stack.
199 For example, loops have `loop_stack'. Each object points to the
200 next object of the same type through the `next' field.
202 Some constructs are visible to `break' exit-statements and others
203 are not. Which constructs are visible depends on the language.
204 Therefore, the data structure allows each construct to be visible
205 or not, according to the args given when the construct is started.
206 The construct is visible if the `exit_label' field is non-null.
207 In that case, the value should be a CODE_LABEL rtx. */
209 struct nesting
211 struct nesting *all;
212 struct nesting *next;
213 int depth;
214 rtx exit_label;
215 union
217 /* For conds (if-then and if-then-else statements). */
218 struct
220 /* Label for the end of the if construct.
221 There is none if EXITFLAG was not set
222 and no `else' has been seen yet. */
223 rtx endif_label;
224 /* Label for the end of this alternative.
225 This may be the end of the if or the next else/elseif. */
226 rtx next_label;
227 } cond;
228 /* For loops. */
229 struct
231 /* Label at the top of the loop; place to loop back to. */
232 rtx start_label;
233 /* Label at the end of the whole construct. */
234 rtx end_label;
235 /* Label before a jump that branches to the end of the whole
236 construct. This is where destructors go if any. */
237 rtx alt_end_label;
238 /* Label for `continue' statement to jump to;
239 this is in front of the stepper of the loop. */
240 rtx continue_label;
241 } loop;
242 /* For variable binding contours. */
243 struct
245 /* Sequence number of this binding contour within the function,
246 in order of entry. */
247 int block_start_count;
248 /* Nonzero => value to restore stack to on exit. */
249 rtx stack_level;
250 /* The NOTE that starts this contour.
251 Used by expand_goto to check whether the destination
252 is within each contour or not. */
253 rtx first_insn;
254 /* Innermost containing binding contour that has a stack level. */
255 struct nesting *innermost_stack_block;
256 /* List of cleanups to be run on exit from this contour.
257 This is a list of expressions to be evaluated.
258 The TREE_PURPOSE of each link is the ..._DECL node
259 which the cleanup pertains to. */
260 tree cleanups;
261 /* List of cleanup-lists of blocks containing this block,
262 as they were at the locus where this block appears.
263 There is an element for each containing block,
264 ordered innermost containing block first.
265 The tail of this list can be 0,
266 if all remaining elements would be empty lists.
267 The element's TREE_VALUE is the cleanup-list of that block,
268 which may be null. */
269 tree outer_cleanups;
270 /* Chain of labels defined inside this binding contour.
271 For contours that have stack levels or cleanups. */
272 struct label_chain *label_chain;
273 /* Number of function calls seen, as of start of this block. */
274 int function_call_count;
275 /* Nonzero if this is associated with a EH region. */
276 int exception_region;
277 /* The saved target_temp_slot_level from our outer block.
278 We may reset target_temp_slot_level to be the level of
279 this block, if that is done, target_temp_slot_level
280 reverts to the saved target_temp_slot_level at the very
281 end of the block. */
282 int target_temp_slot_level;
283 /* True if we are currently emitting insns in an area of
284 output code that is controlled by a conditional
285 expression. This is used by the cleanup handling code to
286 generate conditional cleanup actions. */
287 int conditional_code;
288 /* A place to move the start of the exception region for any
289 of the conditional cleanups, must be at the end or after
290 the start of the last unconditional cleanup, and before any
291 conditional branch points. */
292 rtx last_unconditional_cleanup;
293 /* When in a conditional context, this is the specific
294 cleanup list associated with last_unconditional_cleanup,
295 where we place the conditionalized cleanups. */
296 tree *cleanup_ptr;
297 } block;
298 /* For switch (C) or case (Pascal) statements,
299 and also for dummies (see `expand_start_case_dummy'). */
300 struct
302 /* The insn after which the case dispatch should finally
303 be emitted. Zero for a dummy. */
304 rtx start;
305 /* A list of case labels; it is first built as an AVL tree.
306 During expand_end_case, this is converted to a list, and may be
307 rearranged into a nearly balanced binary tree. */
308 struct case_node *case_list;
309 /* Label to jump to if no case matches. */
310 tree default_label;
311 /* The expression to be dispatched on. */
312 tree index_expr;
313 /* Type that INDEX_EXPR should be converted to. */
314 tree nominal_type;
315 /* Number of range exprs in case statement. */
316 int num_ranges;
317 /* Name of this kind of statement, for warnings. */
318 char *printname;
319 /* Nonzero if a case label has been seen in this case stmt. */
320 char seenlabel;
321 } case_stmt;
322 } data;
325 /* Chain of all pending binding contours. */
326 struct nesting *block_stack;
328 /* If any new stacks are added here, add them to POPSTACKS too. */
330 /* Chain of all pending binding contours that restore stack levels
331 or have cleanups. */
332 struct nesting *stack_block_stack;
334 /* Chain of all pending conditional statements. */
335 struct nesting *cond_stack;
337 /* Chain of all pending loops. */
338 struct nesting *loop_stack;
340 /* Chain of all pending case or switch statements. */
341 struct nesting *case_stack;
343 /* Separate chain including all of the above,
344 chained through the `all' field. */
345 struct nesting *nesting_stack;
347 /* Number of entries on nesting_stack now. */
348 int nesting_depth;
350 /* Allocate and return a new `struct nesting'. */
352 #define ALLOC_NESTING() \
353 (struct nesting *) obstack_alloc (&stmt_obstack, sizeof (struct nesting))
355 /* Pop the nesting stack element by element until we pop off
356 the element which is at the top of STACK.
357 Update all the other stacks, popping off elements from them
358 as we pop them from nesting_stack. */
360 #define POPSTACK(STACK) \
361 do { struct nesting *target = STACK; \
362 struct nesting *this; \
363 do { this = nesting_stack; \
364 if (loop_stack == this) \
365 loop_stack = loop_stack->next; \
366 if (cond_stack == this) \
367 cond_stack = cond_stack->next; \
368 if (block_stack == this) \
369 block_stack = block_stack->next; \
370 if (stack_block_stack == this) \
371 stack_block_stack = stack_block_stack->next; \
372 if (case_stack == this) \
373 case_stack = case_stack->next; \
374 nesting_depth = nesting_stack->depth - 1; \
375 nesting_stack = this->all; \
376 obstack_free (&stmt_obstack, this); } \
377 while (this != target); } while (0)
379 /* In some cases it is impossible to generate code for a forward goto
380 until the label definition is seen. This happens when it may be necessary
381 for the goto to reset the stack pointer: we don't yet know how to do that.
382 So expand_goto puts an entry on this fixup list.
383 Each time a binding contour that resets the stack is exited,
384 we check each fixup.
385 If the target label has now been defined, we can insert the proper code. */
387 struct goto_fixup
389 /* Points to following fixup. */
390 struct goto_fixup *next;
391 /* Points to the insn before the jump insn.
392 If more code must be inserted, it goes after this insn. */
393 rtx before_jump;
394 /* The LABEL_DECL that this jump is jumping to, or 0
395 for break, continue or return. */
396 tree target;
397 /* The BLOCK for the place where this goto was found. */
398 tree context;
399 /* The CODE_LABEL rtx that this is jumping to. */
400 rtx target_rtl;
401 /* Number of binding contours started in current function
402 before the label reference. */
403 int block_start_count;
404 /* The outermost stack level that should be restored for this jump.
405 Each time a binding contour that resets the stack is exited,
406 if the target label is *not* yet defined, this slot is updated. */
407 rtx stack_level;
408 /* List of lists of cleanup expressions to be run by this goto.
409 There is one element for each block that this goto is within.
410 The tail of this list can be 0,
411 if all remaining elements would be empty.
412 The TREE_VALUE contains the cleanup list of that block as of the
413 time this goto was seen.
414 The TREE_ADDRESSABLE flag is 1 for a block that has been exited. */
415 tree cleanup_list_list;
418 static struct goto_fixup *goto_fixup_chain;
420 /* Within any binding contour that must restore a stack level,
421 all labels are recorded with a chain of these structures. */
423 struct label_chain
425 /* Points to following fixup. */
426 struct label_chain *next;
427 tree label;
431 /* Non-zero if we are using EH to handle cleanus. */
432 static int using_eh_for_cleanups_p = 0;
435 static void expand_goto_internal PROTO((tree, rtx, rtx));
436 static int expand_fixup PROTO((tree, rtx, rtx));
437 static void fixup_gotos PROTO((struct nesting *, rtx, tree,
438 rtx, int));
439 static void expand_null_return_1 PROTO((rtx, int));
440 static void expand_value_return PROTO((rtx));
441 static int tail_recursion_args PROTO((tree, tree));
442 static void expand_cleanups PROTO((tree, tree, int, int));
443 static void do_jump_if_equal PROTO((rtx, rtx, rtx, int));
444 static int estimate_case_costs PROTO((case_node_ptr));
445 static void group_case_nodes PROTO((case_node_ptr));
446 static void balance_case_nodes PROTO((case_node_ptr *,
447 case_node_ptr));
448 static int node_has_low_bound PROTO((case_node_ptr, tree));
449 static int node_has_high_bound PROTO((case_node_ptr, tree));
450 static int node_is_bounded PROTO((case_node_ptr, tree));
451 static void emit_jump_if_reachable PROTO((rtx));
452 static void emit_case_nodes PROTO((rtx, case_node_ptr, rtx, tree));
453 static int add_case_node PROTO((tree, tree, tree, tree *));
454 static struct case_node *case_tree2list PROTO((case_node *, case_node *));
457 void
458 using_eh_for_cleanups ()
460 using_eh_for_cleanups_p = 1;
463 void
464 init_stmt ()
466 gcc_obstack_init (&stmt_obstack);
467 init_eh ();
470 void
471 init_stmt_for_function ()
473 /* We are not currently within any block, conditional, loop or case. */
474 block_stack = 0;
475 stack_block_stack = 0;
476 loop_stack = 0;
477 case_stack = 0;
478 cond_stack = 0;
479 nesting_stack = 0;
480 nesting_depth = 0;
482 block_start_count = 0;
484 /* No gotos have been expanded yet. */
485 goto_fixup_chain = 0;
487 /* We are not processing a ({...}) grouping. */
488 expr_stmts_for_value = 0;
489 last_expr_type = 0;
491 init_eh_for_function ();
494 void
495 save_stmt_status (p)
496 struct function *p;
498 p->block_stack = block_stack;
499 p->stack_block_stack = stack_block_stack;
500 p->cond_stack = cond_stack;
501 p->loop_stack = loop_stack;
502 p->case_stack = case_stack;
503 p->nesting_stack = nesting_stack;
504 p->nesting_depth = nesting_depth;
505 p->block_start_count = block_start_count;
506 p->last_expr_type = last_expr_type;
507 p->last_expr_value = last_expr_value;
508 p->expr_stmts_for_value = expr_stmts_for_value;
509 p->emit_filename = emit_filename;
510 p->emit_lineno = emit_lineno;
511 p->goto_fixup_chain = goto_fixup_chain;
512 save_eh_status (p);
515 void
516 restore_stmt_status (p)
517 struct function *p;
519 block_stack = p->block_stack;
520 stack_block_stack = p->stack_block_stack;
521 cond_stack = p->cond_stack;
522 loop_stack = p->loop_stack;
523 case_stack = p->case_stack;
524 nesting_stack = p->nesting_stack;
525 nesting_depth = p->nesting_depth;
526 block_start_count = p->block_start_count;
527 last_expr_type = p->last_expr_type;
528 last_expr_value = p->last_expr_value;
529 expr_stmts_for_value = p->expr_stmts_for_value;
530 emit_filename = p->emit_filename;
531 emit_lineno = p->emit_lineno;
532 goto_fixup_chain = p->goto_fixup_chain;
533 restore_eh_status (p);
536 /* Emit a no-op instruction. */
538 void
539 emit_nop ()
541 rtx last_insn;
543 last_insn = get_last_insn ();
544 if (!optimize
545 && (GET_CODE (last_insn) == CODE_LABEL
546 || (GET_CODE (last_insn) == NOTE
547 && prev_real_insn (last_insn) == 0)))
548 emit_insn (gen_nop ());
551 /* Return the rtx-label that corresponds to a LABEL_DECL,
552 creating it if necessary. */
555 label_rtx (label)
556 tree label;
558 if (TREE_CODE (label) != LABEL_DECL)
559 abort ();
561 if (DECL_RTL (label))
562 return DECL_RTL (label);
564 return DECL_RTL (label) = gen_label_rtx ();
567 /* Add an unconditional jump to LABEL as the next sequential instruction. */
569 void
570 emit_jump (label)
571 rtx label;
573 do_pending_stack_adjust ();
574 emit_jump_insn (gen_jump (label));
575 emit_barrier ();
578 /* Emit code to jump to the address
579 specified by the pointer expression EXP. */
581 void
582 expand_computed_goto (exp)
583 tree exp;
585 rtx x = expand_expr (exp, NULL_RTX, VOIDmode, 0);
587 #ifdef POINTERS_EXTEND_UNSIGNED
588 x = convert_memory_address (Pmode, x);
589 #endif
591 emit_queue ();
592 /* Be sure the function is executable. */
593 if (flag_check_memory_usage)
594 emit_library_call (chkr_check_exec_libfunc, 1,
595 VOIDmode, 1, x, ptr_mode);
597 do_pending_stack_adjust ();
598 emit_indirect_jump (x);
601 /* Handle goto statements and the labels that they can go to. */
603 /* Specify the location in the RTL code of a label LABEL,
604 which is a LABEL_DECL tree node.
606 This is used for the kind of label that the user can jump to with a
607 goto statement, and for alternatives of a switch or case statement.
608 RTL labels generated for loops and conditionals don't go through here;
609 they are generated directly at the RTL level, by other functions below.
611 Note that this has nothing to do with defining label *names*.
612 Languages vary in how they do that and what that even means. */
614 void
615 expand_label (label)
616 tree label;
618 struct label_chain *p;
620 do_pending_stack_adjust ();
621 emit_label (label_rtx (label));
622 if (DECL_NAME (label))
623 LABEL_NAME (DECL_RTL (label)) = IDENTIFIER_POINTER (DECL_NAME (label));
625 if (stack_block_stack != 0)
627 p = (struct label_chain *) oballoc (sizeof (struct label_chain));
628 p->next = stack_block_stack->data.block.label_chain;
629 stack_block_stack->data.block.label_chain = p;
630 p->label = label;
634 /* Declare that LABEL (a LABEL_DECL) may be used for nonlocal gotos
635 from nested functions. */
637 void
638 declare_nonlocal_label (label)
639 tree label;
641 nonlocal_labels = tree_cons (NULL_TREE, label, nonlocal_labels);
642 LABEL_PRESERVE_P (label_rtx (label)) = 1;
643 if (nonlocal_goto_handler_slot == 0)
645 nonlocal_goto_handler_slot
646 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
647 emit_stack_save (SAVE_NONLOCAL,
648 &nonlocal_goto_stack_level,
649 PREV_INSN (tail_recursion_reentry));
653 /* Generate RTL code for a `goto' statement with target label LABEL.
654 LABEL should be a LABEL_DECL tree node that was or will later be
655 defined with `expand_label'. */
657 void
658 expand_goto (label)
659 tree label;
661 tree context;
663 /* Check for a nonlocal goto to a containing function. */
664 context = decl_function_context (label);
665 if (context != 0 && context != current_function_decl)
667 struct function *p = find_function_data (context);
668 rtx label_ref = gen_rtx_LABEL_REF (Pmode, label_rtx (label));
669 rtx temp;
671 p->has_nonlocal_label = 1;
672 current_function_has_nonlocal_goto = 1;
673 LABEL_REF_NONLOCAL_P (label_ref) = 1;
675 /* Copy the rtl for the slots so that they won't be shared in
676 case the virtual stack vars register gets instantiated differently
677 in the parent than in the child. */
679 #if HAVE_nonlocal_goto
680 if (HAVE_nonlocal_goto)
681 emit_insn (gen_nonlocal_goto (lookup_static_chain (label),
682 copy_rtx (p->nonlocal_goto_handler_slot),
683 copy_rtx (p->nonlocal_goto_stack_level),
684 label_ref));
685 else
686 #endif
688 rtx addr;
690 /* Restore frame pointer for containing function.
691 This sets the actual hard register used for the frame pointer
692 to the location of the function's incoming static chain info.
693 The non-local goto handler will then adjust it to contain the
694 proper value and reload the argument pointer, if needed. */
695 emit_move_insn (hard_frame_pointer_rtx, lookup_static_chain (label));
697 /* We have now loaded the frame pointer hardware register with
698 the address of that corresponds to the start of the virtual
699 stack vars. So replace virtual_stack_vars_rtx in all
700 addresses we use with stack_pointer_rtx. */
702 /* Get addr of containing function's current nonlocal goto handler,
703 which will do any cleanups and then jump to the label. */
704 addr = copy_rtx (p->nonlocal_goto_handler_slot);
705 temp = copy_to_reg (replace_rtx (addr, virtual_stack_vars_rtx,
706 hard_frame_pointer_rtx));
708 /* Restore the stack pointer. Note this uses fp just restored. */
709 addr = p->nonlocal_goto_stack_level;
710 if (addr)
711 addr = replace_rtx (copy_rtx (addr),
712 virtual_stack_vars_rtx,
713 hard_frame_pointer_rtx);
715 emit_stack_restore (SAVE_NONLOCAL, addr, NULL_RTX);
717 /* Put in the static chain register the nonlocal label address. */
718 emit_move_insn (static_chain_rtx, label_ref);
719 /* USE of hard_frame_pointer_rtx added for consistency; not clear if
720 really needed. */
721 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
722 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
723 emit_insn (gen_rtx_USE (VOIDmode, static_chain_rtx));
724 emit_indirect_jump (temp);
727 else
728 expand_goto_internal (label, label_rtx (label), NULL_RTX);
731 /* Generate RTL code for a `goto' statement with target label BODY.
732 LABEL should be a LABEL_REF.
733 LAST_INSN, if non-0, is the rtx we should consider as the last
734 insn emitted (for the purposes of cleaning up a return). */
736 static void
737 expand_goto_internal (body, label, last_insn)
738 tree body;
739 rtx label;
740 rtx last_insn;
742 struct nesting *block;
743 rtx stack_level = 0;
745 if (GET_CODE (label) != CODE_LABEL)
746 abort ();
748 /* If label has already been defined, we can tell now
749 whether and how we must alter the stack level. */
751 if (PREV_INSN (label) != 0)
753 /* Find the innermost pending block that contains the label.
754 (Check containment by comparing insn-uids.)
755 Then restore the outermost stack level within that block,
756 and do cleanups of all blocks contained in it. */
757 for (block = block_stack; block; block = block->next)
759 if (INSN_UID (block->data.block.first_insn) < INSN_UID (label))
760 break;
761 if (block->data.block.stack_level != 0)
762 stack_level = block->data.block.stack_level;
763 /* Execute the cleanups for blocks we are exiting. */
764 if (block->data.block.cleanups != 0)
766 expand_cleanups (block->data.block.cleanups, NULL_TREE, 1, 1);
767 do_pending_stack_adjust ();
771 if (stack_level)
773 /* Ensure stack adjust isn't done by emit_jump, as this
774 would clobber the stack pointer. This one should be
775 deleted as dead by flow. */
776 clear_pending_stack_adjust ();
777 do_pending_stack_adjust ();
778 emit_stack_restore (SAVE_BLOCK, stack_level, NULL_RTX);
781 if (body != 0 && DECL_TOO_LATE (body))
782 error ("jump to `%s' invalidly jumps into binding contour",
783 IDENTIFIER_POINTER (DECL_NAME (body)));
785 /* Label not yet defined: may need to put this goto
786 on the fixup list. */
787 else if (! expand_fixup (body, label, last_insn))
789 /* No fixup needed. Record that the label is the target
790 of at least one goto that has no fixup. */
791 if (body != 0)
792 TREE_ADDRESSABLE (body) = 1;
795 emit_jump (label);
798 /* Generate if necessary a fixup for a goto
799 whose target label in tree structure (if any) is TREE_LABEL
800 and whose target in rtl is RTL_LABEL.
802 If LAST_INSN is nonzero, we pretend that the jump appears
803 after insn LAST_INSN instead of at the current point in the insn stream.
805 The fixup will be used later to insert insns just before the goto.
806 Those insns will restore the stack level as appropriate for the
807 target label, and will (in the case of C++) also invoke any object
808 destructors which have to be invoked when we exit the scopes which
809 are exited by the goto.
811 Value is nonzero if a fixup is made. */
813 static int
814 expand_fixup (tree_label, rtl_label, last_insn)
815 tree tree_label;
816 rtx rtl_label;
817 rtx last_insn;
819 struct nesting *block, *end_block;
821 /* See if we can recognize which block the label will be output in.
822 This is possible in some very common cases.
823 If we succeed, set END_BLOCK to that block.
824 Otherwise, set it to 0. */
826 if (cond_stack
827 && (rtl_label == cond_stack->data.cond.endif_label
828 || rtl_label == cond_stack->data.cond.next_label))
829 end_block = cond_stack;
830 /* If we are in a loop, recognize certain labels which
831 are likely targets. This reduces the number of fixups
832 we need to create. */
833 else if (loop_stack
834 && (rtl_label == loop_stack->data.loop.start_label
835 || rtl_label == loop_stack->data.loop.end_label
836 || rtl_label == loop_stack->data.loop.continue_label))
837 end_block = loop_stack;
838 else
839 end_block = 0;
841 /* Now set END_BLOCK to the binding level to which we will return. */
843 if (end_block)
845 struct nesting *next_block = end_block->all;
846 block = block_stack;
848 /* First see if the END_BLOCK is inside the innermost binding level.
849 If so, then no cleanups or stack levels are relevant. */
850 while (next_block && next_block != block)
851 next_block = next_block->all;
853 if (next_block)
854 return 0;
856 /* Otherwise, set END_BLOCK to the innermost binding level
857 which is outside the relevant control-structure nesting. */
858 next_block = block_stack->next;
859 for (block = block_stack; block != end_block; block = block->all)
860 if (block == next_block)
861 next_block = next_block->next;
862 end_block = next_block;
865 /* Does any containing block have a stack level or cleanups?
866 If not, no fixup is needed, and that is the normal case
867 (the only case, for standard C). */
868 for (block = block_stack; block != end_block; block = block->next)
869 if (block->data.block.stack_level != 0
870 || block->data.block.cleanups != 0)
871 break;
873 if (block != end_block)
875 /* Ok, a fixup is needed. Add a fixup to the list of such. */
876 struct goto_fixup *fixup
877 = (struct goto_fixup *) oballoc (sizeof (struct goto_fixup));
878 /* In case an old stack level is restored, make sure that comes
879 after any pending stack adjust. */
880 /* ?? If the fixup isn't to come at the present position,
881 doing the stack adjust here isn't useful. Doing it with our
882 settings at that location isn't useful either. Let's hope
883 someone does it! */
884 if (last_insn == 0)
885 do_pending_stack_adjust ();
886 fixup->target = tree_label;
887 fixup->target_rtl = rtl_label;
889 /* Create a BLOCK node and a corresponding matched set of
890 NOTE_INSN_BEGIN_BLOCK and NOTE_INSN_END_BLOCK notes at
891 this point. The notes will encapsulate any and all fixup
892 code which we might later insert at this point in the insn
893 stream. Also, the BLOCK node will be the parent (i.e. the
894 `SUPERBLOCK') of any other BLOCK nodes which we might create
895 later on when we are expanding the fixup code. */
898 register rtx original_before_jump
899 = last_insn ? last_insn : get_last_insn ();
901 start_sequence ();
902 pushlevel (0);
903 fixup->before_jump = emit_note (NULL_PTR, NOTE_INSN_BLOCK_BEG);
904 last_block_end_note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_END);
905 fixup->context = poplevel (1, 0, 0); /* Create the BLOCK node now! */
906 end_sequence ();
907 emit_insns_after (fixup->before_jump, original_before_jump);
910 fixup->block_start_count = block_start_count;
911 fixup->stack_level = 0;
912 fixup->cleanup_list_list
913 = ((block->data.block.outer_cleanups
914 || block->data.block.cleanups)
915 ? tree_cons (NULL_TREE, block->data.block.cleanups,
916 block->data.block.outer_cleanups)
917 : 0);
918 fixup->next = goto_fixup_chain;
919 goto_fixup_chain = fixup;
922 return block != 0;
927 /* Expand any needed fixups in the outputmost binding level of the
928 function. FIRST_INSN is the first insn in the function. */
930 void
931 expand_fixups (first_insn)
932 rtx first_insn;
934 fixup_gotos (NULL_PTR, NULL_RTX, NULL_TREE, first_insn, 0);
937 /* When exiting a binding contour, process all pending gotos requiring fixups.
938 THISBLOCK is the structure that describes the block being exited.
939 STACK_LEVEL is the rtx for the stack level to restore exiting this contour.
940 CLEANUP_LIST is a list of expressions to evaluate on exiting this contour.
941 FIRST_INSN is the insn that began this contour.
943 Gotos that jump out of this contour must restore the
944 stack level and do the cleanups before actually jumping.
946 DONT_JUMP_IN nonzero means report error there is a jump into this
947 contour from before the beginning of the contour.
948 This is also done if STACK_LEVEL is nonzero. */
950 static void
951 fixup_gotos (thisblock, stack_level, cleanup_list, first_insn, dont_jump_in)
952 struct nesting *thisblock;
953 rtx stack_level;
954 tree cleanup_list;
955 rtx first_insn;
956 int dont_jump_in;
958 register struct goto_fixup *f, *prev;
960 /* F is the fixup we are considering; PREV is the previous one. */
961 /* We run this loop in two passes so that cleanups of exited blocks
962 are run first, and blocks that are exited are marked so
963 afterwards. */
965 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
967 /* Test for a fixup that is inactive because it is already handled. */
968 if (f->before_jump == 0)
970 /* Delete inactive fixup from the chain, if that is easy to do. */
971 if (prev != 0)
972 prev->next = f->next;
974 /* Has this fixup's target label been defined?
975 If so, we can finalize it. */
976 else if (PREV_INSN (f->target_rtl) != 0)
978 register rtx cleanup_insns;
980 /* Get the first non-label after the label
981 this goto jumps to. If that's before this scope begins,
982 we don't have a jump into the scope. */
983 rtx after_label = f->target_rtl;
984 while (after_label != 0 && GET_CODE (after_label) == CODE_LABEL)
985 after_label = NEXT_INSN (after_label);
987 /* If this fixup jumped into this contour from before the beginning
988 of this contour, report an error. */
989 /* ??? Bug: this does not detect jumping in through intermediate
990 blocks that have stack levels or cleanups.
991 It detects only a problem with the innermost block
992 around the label. */
993 if (f->target != 0
994 && (dont_jump_in || stack_level || cleanup_list)
995 /* If AFTER_LABEL is 0, it means the jump goes to the end
996 of the rtl, which means it jumps into this scope. */
997 && (after_label == 0
998 || INSN_UID (first_insn) < INSN_UID (after_label))
999 && INSN_UID (first_insn) > INSN_UID (f->before_jump)
1000 && ! DECL_ERROR_ISSUED (f->target))
1002 error_with_decl (f->target,
1003 "label `%s' used before containing binding contour");
1004 /* Prevent multiple errors for one label. */
1005 DECL_ERROR_ISSUED (f->target) = 1;
1008 /* We will expand the cleanups into a sequence of their own and
1009 then later on we will attach this new sequence to the insn
1010 stream just ahead of the actual jump insn. */
1012 start_sequence ();
1014 /* Temporarily restore the lexical context where we will
1015 logically be inserting the fixup code. We do this for the
1016 sake of getting the debugging information right. */
1018 pushlevel (0);
1019 set_block (f->context);
1021 /* Expand the cleanups for blocks this jump exits. */
1022 if (f->cleanup_list_list)
1024 tree lists;
1025 for (lists = f->cleanup_list_list; lists; lists = TREE_CHAIN (lists))
1026 /* Marked elements correspond to blocks that have been closed.
1027 Do their cleanups. */
1028 if (TREE_ADDRESSABLE (lists)
1029 && TREE_VALUE (lists) != 0)
1031 expand_cleanups (TREE_VALUE (lists), NULL_TREE, 1, 1);
1032 /* Pop any pushes done in the cleanups,
1033 in case function is about to return. */
1034 do_pending_stack_adjust ();
1038 /* Restore stack level for the biggest contour that this
1039 jump jumps out of. */
1040 if (f->stack_level)
1041 emit_stack_restore (SAVE_BLOCK, f->stack_level, f->before_jump);
1043 /* Finish up the sequence containing the insns which implement the
1044 necessary cleanups, and then attach that whole sequence to the
1045 insn stream just ahead of the actual jump insn. Attaching it
1046 at that point insures that any cleanups which are in fact
1047 implicit C++ object destructions (which must be executed upon
1048 leaving the block) appear (to the debugger) to be taking place
1049 in an area of the generated code where the object(s) being
1050 destructed are still "in scope". */
1052 cleanup_insns = get_insns ();
1053 poplevel (1, 0, 0);
1055 end_sequence ();
1056 emit_insns_after (cleanup_insns, f->before_jump);
1059 f->before_jump = 0;
1063 /* For any still-undefined labels, do the cleanups for this block now.
1064 We must do this now since items in the cleanup list may go out
1065 of scope when the block ends. */
1066 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1067 if (f->before_jump != 0
1068 && PREV_INSN (f->target_rtl) == 0
1069 /* Label has still not appeared. If we are exiting a block with
1070 a stack level to restore, that started before the fixup,
1071 mark this stack level as needing restoration
1072 when the fixup is later finalized. */
1073 && thisblock != 0
1074 /* Note: if THISBLOCK == 0 and we have a label that hasn't appeared, it
1075 means the label is undefined. That's erroneous, but possible. */
1076 && (thisblock->data.block.block_start_count
1077 <= f->block_start_count))
1079 tree lists = f->cleanup_list_list;
1080 rtx cleanup_insns;
1082 for (; lists; lists = TREE_CHAIN (lists))
1083 /* If the following elt. corresponds to our containing block
1084 then the elt. must be for this block. */
1085 if (TREE_CHAIN (lists) == thisblock->data.block.outer_cleanups)
1087 start_sequence ();
1088 pushlevel (0);
1089 set_block (f->context);
1090 expand_cleanups (TREE_VALUE (lists), NULL_TREE, 1, 1);
1091 do_pending_stack_adjust ();
1092 cleanup_insns = get_insns ();
1093 poplevel (1, 0, 0);
1094 end_sequence ();
1095 if (cleanup_insns != 0)
1096 f->before_jump
1097 = emit_insns_after (cleanup_insns, f->before_jump);
1099 f->cleanup_list_list = TREE_CHAIN (lists);
1102 if (stack_level)
1103 f->stack_level = stack_level;
1109 /* Generate RTL for an asm statement (explicit assembler code).
1110 BODY is a STRING_CST node containing the assembler code text,
1111 or an ADDR_EXPR containing a STRING_CST. */
1113 void
1114 expand_asm (body)
1115 tree body;
1117 if (flag_check_memory_usage)
1119 error ("`asm' cannot be used with `-fcheck-memory-usage'");
1120 return;
1123 if (TREE_CODE (body) == ADDR_EXPR)
1124 body = TREE_OPERAND (body, 0);
1126 emit_insn (gen_rtx_ASM_INPUT (VOIDmode,
1127 TREE_STRING_POINTER (body)));
1128 last_expr_type = 0;
1131 /* Generate RTL for an asm statement with arguments.
1132 STRING is the instruction template.
1133 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
1134 Each output or input has an expression in the TREE_VALUE and
1135 a constraint-string in the TREE_PURPOSE.
1136 CLOBBERS is a list of STRING_CST nodes each naming a hard register
1137 that is clobbered by this insn.
1139 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
1140 Some elements of OUTPUTS may be replaced with trees representing temporary
1141 values. The caller should copy those temporary values to the originally
1142 specified lvalues.
1144 VOL nonzero means the insn is volatile; don't optimize it. */
1146 void
1147 expand_asm_operands (string, outputs, inputs, clobbers, vol, filename, line)
1148 tree string, outputs, inputs, clobbers;
1149 int vol;
1150 char *filename;
1151 int line;
1153 rtvec argvec, constraints;
1154 rtx body;
1155 int ninputs = list_length (inputs);
1156 int noutputs = list_length (outputs);
1157 int ninout = 0;
1158 int nclobbers;
1159 tree tail;
1160 register int i;
1161 /* Vector of RTX's of evaluated output operands. */
1162 rtx *output_rtx = (rtx *) alloca (noutputs * sizeof (rtx));
1163 int *inout_opnum = (int *) alloca (noutputs * sizeof (int));
1164 enum machine_mode *inout_mode
1165 = (enum machine_mode *) alloca (noutputs * sizeof (enum machine_mode));
1166 /* The insn we have emitted. */
1167 rtx insn;
1169 /* An ASM with no outputs needs to be treated as volatile. */
1170 if (noutputs == 0)
1171 vol = 1;
1173 if (flag_check_memory_usage)
1175 error ("`asm' cannot be used with `-fcheck-memory-usage'");
1176 return;
1179 /* Count the number of meaningful clobbered registers, ignoring what
1180 we would ignore later. */
1181 nclobbers = 0;
1182 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1184 char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1185 i = decode_reg_name (regname);
1186 if (i >= 0 || i == -4)
1187 ++nclobbers;
1188 else if (i == -2)
1189 error ("unknown register name `%s' in `asm'", regname);
1192 last_expr_type = 0;
1194 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1196 tree val = TREE_VALUE (tail);
1197 tree type = TREE_TYPE (val);
1198 int j;
1199 int found_equal = 0;
1200 int found_plus = 0;
1201 int allows_reg = 0;
1203 /* If there's an erroneous arg, emit no insn. */
1204 if (TREE_TYPE (val) == error_mark_node)
1205 return;
1207 /* Make sure constraint has `=' and does not have `+'. Also, see
1208 if it allows any register. Be liberal on the latter test, since
1209 the worst that happens if we get it wrong is we issue an error
1210 message. */
1212 for (j = 0; j < TREE_STRING_LENGTH (TREE_PURPOSE (tail)) - 1; j++)
1213 switch (TREE_STRING_POINTER (TREE_PURPOSE (tail))[j])
1215 case '+':
1216 /* Make sure we can specify the matching operand. */
1217 if (i > 9)
1219 error ("output operand constraint %d contains `+'", i);
1220 return;
1223 /* Replace '+' with '='. */
1224 TREE_STRING_POINTER (TREE_PURPOSE (tail))[j] = '=';
1225 found_plus = 1;
1226 break;
1228 case '=':
1229 found_equal = 1;
1230 break;
1232 case '?': case '!': case '*': case '%': case '&':
1233 case 'V': case 'm': case 'o': case '<': case '>':
1234 case 'E': case 'F': case 'G': case 'H': case 'X':
1235 case 's': case 'i': case 'n':
1236 case 'I': case 'J': case 'K': case 'L': case 'M':
1237 case 'N': case 'O': case 'P': case ',':
1238 #ifdef EXTRA_CONSTRAINT
1239 case 'Q': case 'R': case 'S': case 'T': case 'U':
1240 #endif
1241 break;
1243 case '0': case '1': case '2': case '3': case '4':
1244 case '5': case '6': case '7': case '8': case '9':
1245 error ("matching constraint not valid in output operand");
1246 break;
1248 case 'p': case 'g': case 'r':
1249 default:
1250 allows_reg = 1;
1251 break;
1254 if (! found_equal && ! found_plus)
1256 error ("output operand constraint lacks `='");
1257 return;
1260 /* If an output operand is not a decl or indirect ref and our constraint
1261 allows a register, make a temporary to act as an intermediate.
1262 Make the asm insn write into that, then our caller will copy it to
1263 the real output operand. Likewise for promoted variables. */
1265 if (TREE_CODE (val) == INDIRECT_REF
1266 || (TREE_CODE_CLASS (TREE_CODE (val)) == 'd'
1267 && ! (GET_CODE (DECL_RTL (val)) == REG
1268 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
1269 || ! allows_reg
1270 || found_plus)
1272 if (! allows_reg)
1273 mark_addressable (TREE_VALUE (tail));
1275 output_rtx[i]
1276 = expand_expr (TREE_VALUE (tail), NULL_RTX, VOIDmode,
1277 EXPAND_MEMORY_USE_WO);
1279 if (! allows_reg && GET_CODE (output_rtx[i]) != MEM)
1280 error ("output number %d not directly addressable", i);
1282 else
1284 output_rtx[i] = assign_temp (type, 0, 0, 0);
1285 TREE_VALUE (tail) = make_tree (type, output_rtx[i]);
1288 if (found_plus)
1290 inout_mode[ninout] = TYPE_MODE (TREE_TYPE (TREE_VALUE (tail)));
1291 inout_opnum[ninout++] = i;
1295 ninputs += ninout;
1296 if (ninputs + noutputs > MAX_RECOG_OPERANDS)
1298 error ("more than %d operands in `asm'", MAX_RECOG_OPERANDS);
1299 return;
1302 /* Make vectors for the expression-rtx and constraint strings. */
1304 argvec = rtvec_alloc (ninputs);
1305 constraints = rtvec_alloc (ninputs);
1307 body = gen_rtx_ASM_OPERANDS (VOIDmode,
1308 TREE_STRING_POINTER (string), "", 0, argvec,
1309 constraints, filename, line);
1311 MEM_VOLATILE_P (body) = vol;
1313 /* Eval the inputs and put them into ARGVEC.
1314 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
1316 i = 0;
1317 for (tail = inputs; tail; tail = TREE_CHAIN (tail))
1319 int j;
1320 int allows_reg = 0;
1322 /* If there's an erroneous arg, emit no insn,
1323 because the ASM_INPUT would get VOIDmode
1324 and that could cause a crash in reload. */
1325 if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
1326 return;
1327 if (TREE_PURPOSE (tail) == NULL_TREE)
1329 error ("hard register `%s' listed as input operand to `asm'",
1330 TREE_STRING_POINTER (TREE_VALUE (tail)) );
1331 return;
1334 /* Make sure constraint has neither `=' nor `+'. */
1336 for (j = 0; j < TREE_STRING_LENGTH (TREE_PURPOSE (tail)) - 1; j++)
1337 switch (TREE_STRING_POINTER (TREE_PURPOSE (tail))[j])
1339 case '+': case '=':
1340 error ("input operand constraint contains `%c'",
1341 TREE_STRING_POINTER (TREE_PURPOSE (tail))[j]);
1342 return;
1344 case '?': case '!': case '*': case '%': case '&':
1345 case 'V': case 'm': case 'o': case '<': case '>':
1346 case 'E': case 'F': case 'G': case 'H': case 'X':
1347 case 's': case 'i': case 'n':
1348 case 'I': case 'J': case 'K': case 'L': case 'M':
1349 case 'N': case 'O': case 'P': case ',':
1350 #ifdef EXTRA_CONSTRAINT
1351 case 'Q': case 'R': case 'S': case 'T': case 'U':
1352 #endif
1353 break;
1355 /* Whether or not a numeric constraint allows a register is
1356 decided by the matching constraint, and so there is no need
1357 to do anything special with them. We must handle them in
1358 the default case, so that we don't unnecessarily force
1359 operands to memory. */
1360 case '0': case '1': case '2': case '3': case '4':
1361 case '5': case '6': case '7': case '8': case '9':
1362 if (TREE_STRING_POINTER (TREE_PURPOSE (tail))[j]
1363 >= '0' + noutputs)
1365 error
1366 ("matching constraint references invalid operand number");
1367 return;
1370 /* ... fall through ... */
1372 case 'p': case 'g': case 'r':
1373 default:
1374 allows_reg = 1;
1375 break;
1378 if (! allows_reg)
1379 mark_addressable (TREE_VALUE (tail));
1381 XVECEXP (body, 3, i) /* argvec */
1382 = expand_expr (TREE_VALUE (tail), NULL_RTX, VOIDmode, 0);
1383 if (CONSTANT_P (XVECEXP (body, 3, i))
1384 && ! general_operand (XVECEXP (body, 3, i),
1385 TYPE_MODE (TREE_TYPE (TREE_VALUE (tail)))))
1387 if (allows_reg)
1388 XVECEXP (body, 3, i)
1389 = force_reg (TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))),
1390 XVECEXP (body, 3, i));
1391 else
1392 XVECEXP (body, 3, i)
1393 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))),
1394 XVECEXP (body, 3, i));
1397 if (! allows_reg
1398 && (GET_CODE (XVECEXP (body, 3, i)) == REG
1399 || GET_CODE (XVECEXP (body, 3, i)) == SUBREG
1400 || GET_CODE (XVECEXP (body, 3, i)) == CONCAT))
1402 tree type = TREE_TYPE (TREE_VALUE (tail));
1403 rtx memloc = assign_temp (type, 1, 1, 1);
1405 emit_move_insn (memloc, XVECEXP (body, 3, i));
1406 XVECEXP (body, 3, i) = memloc;
1409 XVECEXP (body, 4, i) /* constraints */
1410 = gen_rtx_ASM_INPUT (TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))),
1411 TREE_STRING_POINTER (TREE_PURPOSE (tail)));
1412 i++;
1415 /* Protect all the operands from the queue,
1416 now that they have all been evaluated. */
1418 for (i = 0; i < ninputs - ninout; i++)
1419 XVECEXP (body, 3, i) = protect_from_queue (XVECEXP (body, 3, i), 0);
1421 for (i = 0; i < noutputs; i++)
1422 output_rtx[i] = protect_from_queue (output_rtx[i], 1);
1424 /* For in-out operands, copy output rtx to input rtx. */
1425 for (i = 0; i < ninout; i++)
1427 static char match[9+1][2]
1428 = {"0", "1", "2", "3", "4", "5", "6", "7", "8", "9"};
1429 int j = inout_opnum[i];
1431 XVECEXP (body, 3, ninputs - ninout + i) /* argvec */
1432 = output_rtx[j];
1433 XVECEXP (body, 4, ninputs - ninout + i) /* constraints */
1434 = gen_rtx_ASM_INPUT (inout_mode[j], match[j]);
1437 /* Now, for each output, construct an rtx
1438 (set OUTPUT (asm_operands INSN OUTPUTNUMBER OUTPUTCONSTRAINT
1439 ARGVEC CONSTRAINTS))
1440 If there is more than one, put them inside a PARALLEL. */
1442 if (noutputs == 1 && nclobbers == 0)
1444 XSTR (body, 1) = TREE_STRING_POINTER (TREE_PURPOSE (outputs));
1445 insn = emit_insn (gen_rtx_SET (VOIDmode, output_rtx[0], body));
1447 else if (noutputs == 0 && nclobbers == 0)
1449 /* No output operands: put in a raw ASM_OPERANDS rtx. */
1450 insn = emit_insn (body);
1452 else
1454 rtx obody = body;
1455 int num = noutputs;
1456 if (num == 0) num = 1;
1457 body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
1459 /* For each output operand, store a SET. */
1461 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1463 XVECEXP (body, 0, i)
1464 = gen_rtx_SET (VOIDmode,
1465 output_rtx[i],
1466 gen_rtx_ASM_OPERANDS (VOIDmode,
1467 TREE_STRING_POINTER (string),
1468 TREE_STRING_POINTER (TREE_PURPOSE (tail)),
1469 i, argvec, constraints,
1470 filename, line));
1471 MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
1474 /* If there are no outputs (but there are some clobbers)
1475 store the bare ASM_OPERANDS into the PARALLEL. */
1477 if (i == 0)
1478 XVECEXP (body, 0, i++) = obody;
1480 /* Store (clobber REG) for each clobbered register specified. */
1482 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1484 char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1485 int j = decode_reg_name (regname);
1487 if (j < 0)
1489 if (j == -3) /* `cc', which is not a register */
1490 continue;
1492 if (j == -4) /* `memory', don't cache memory across asm */
1494 XVECEXP (body, 0, i++)
1495 = gen_rtx_CLOBBER (VOIDmode,
1496 gen_rtx_MEM (BLKmode,
1497 gen_rtx_SCRATCH (VOIDmode)));
1498 continue;
1501 /* Ignore unknown register, error already signaled. */
1502 continue;
1505 /* Use QImode since that's guaranteed to clobber just one reg. */
1506 XVECEXP (body, 0, i++)
1507 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (QImode, j));
1510 insn = emit_insn (body);
1513 free_temp_slots ();
1516 /* Generate RTL to evaluate the expression EXP
1517 and remember it in case this is the VALUE in a ({... VALUE; }) constr. */
1519 void
1520 expand_expr_stmt (exp)
1521 tree exp;
1523 /* If -W, warn about statements with no side effects,
1524 except for an explicit cast to void (e.g. for assert()), and
1525 except inside a ({...}) where they may be useful. */
1526 if (expr_stmts_for_value == 0 && exp != error_mark_node)
1528 if (! TREE_SIDE_EFFECTS (exp) && (extra_warnings || warn_unused)
1529 && !(TREE_CODE (exp) == CONVERT_EXPR
1530 && TREE_TYPE (exp) == void_type_node))
1531 warning_with_file_and_line (emit_filename, emit_lineno,
1532 "statement with no effect");
1533 else if (warn_unused)
1534 warn_if_unused_value (exp);
1537 /* If EXP is of function type and we are expanding statements for
1538 value, convert it to pointer-to-function. */
1539 if (expr_stmts_for_value && TREE_CODE (TREE_TYPE (exp)) == FUNCTION_TYPE)
1540 exp = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (exp)), exp);
1542 last_expr_type = TREE_TYPE (exp);
1543 if (! flag_syntax_only)
1544 last_expr_value = expand_expr (exp,
1545 (expr_stmts_for_value
1546 ? NULL_RTX : const0_rtx),
1547 VOIDmode, 0);
1549 /* If all we do is reference a volatile value in memory,
1550 copy it to a register to be sure it is actually touched. */
1551 if (last_expr_value != 0 && GET_CODE (last_expr_value) == MEM
1552 && TREE_THIS_VOLATILE (exp))
1554 if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode)
1556 else if (TYPE_MODE (TREE_TYPE (exp)) != BLKmode)
1557 copy_to_reg (last_expr_value);
1558 else
1560 rtx lab = gen_label_rtx ();
1562 /* Compare the value with itself to reference it. */
1563 emit_cmp_insn (last_expr_value, last_expr_value, EQ,
1564 expand_expr (TYPE_SIZE (last_expr_type),
1565 NULL_RTX, VOIDmode, 0),
1566 BLKmode, 0,
1567 TYPE_ALIGN (last_expr_type) / BITS_PER_UNIT);
1568 emit_jump_insn ((*bcc_gen_fctn[(int) EQ]) (lab));
1569 emit_label (lab);
1573 /* If this expression is part of a ({...}) and is in memory, we may have
1574 to preserve temporaries. */
1575 preserve_temp_slots (last_expr_value);
1577 /* Free any temporaries used to evaluate this expression. Any temporary
1578 used as a result of this expression will already have been preserved
1579 above. */
1580 free_temp_slots ();
1582 emit_queue ();
1585 /* Warn if EXP contains any computations whose results are not used.
1586 Return 1 if a warning is printed; 0 otherwise. */
1589 warn_if_unused_value (exp)
1590 tree exp;
1592 if (TREE_USED (exp))
1593 return 0;
1595 switch (TREE_CODE (exp))
1597 case PREINCREMENT_EXPR:
1598 case POSTINCREMENT_EXPR:
1599 case PREDECREMENT_EXPR:
1600 case POSTDECREMENT_EXPR:
1601 case MODIFY_EXPR:
1602 case INIT_EXPR:
1603 case TARGET_EXPR:
1604 case CALL_EXPR:
1605 case METHOD_CALL_EXPR:
1606 case RTL_EXPR:
1607 case TRY_CATCH_EXPR:
1608 case WITH_CLEANUP_EXPR:
1609 case EXIT_EXPR:
1610 /* We don't warn about COND_EXPR because it may be a useful
1611 construct if either arm contains a side effect. */
1612 case COND_EXPR:
1613 return 0;
1615 case BIND_EXPR:
1616 /* For a binding, warn if no side effect within it. */
1617 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1619 case SAVE_EXPR:
1620 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1622 case TRUTH_ORIF_EXPR:
1623 case TRUTH_ANDIF_EXPR:
1624 /* In && or ||, warn if 2nd operand has no side effect. */
1625 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1627 case COMPOUND_EXPR:
1628 if (TREE_NO_UNUSED_WARNING (exp))
1629 return 0;
1630 if (warn_if_unused_value (TREE_OPERAND (exp, 0)))
1631 return 1;
1632 /* Let people do `(foo (), 0)' without a warning. */
1633 if (TREE_CONSTANT (TREE_OPERAND (exp, 1)))
1634 return 0;
1635 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1637 case NOP_EXPR:
1638 case CONVERT_EXPR:
1639 case NON_LVALUE_EXPR:
1640 /* Don't warn about values cast to void. */
1641 if (TREE_TYPE (exp) == void_type_node)
1642 return 0;
1643 /* Don't warn about conversions not explicit in the user's program. */
1644 if (TREE_NO_UNUSED_WARNING (exp))
1645 return 0;
1646 /* Assignment to a cast usually results in a cast of a modify.
1647 Don't complain about that. There can be an arbitrary number of
1648 casts before the modify, so we must loop until we find the first
1649 non-cast expression and then test to see if that is a modify. */
1651 tree tem = TREE_OPERAND (exp, 0);
1653 while (TREE_CODE (tem) == CONVERT_EXPR || TREE_CODE (tem) == NOP_EXPR)
1654 tem = TREE_OPERAND (tem, 0);
1656 if (TREE_CODE (tem) == MODIFY_EXPR || TREE_CODE (tem) == INIT_EXPR
1657 || TREE_CODE (tem) == CALL_EXPR)
1658 return 0;
1660 goto warn;
1662 case INDIRECT_REF:
1663 /* Don't warn about automatic dereferencing of references, since
1664 the user cannot control it. */
1665 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == REFERENCE_TYPE)
1666 return warn_if_unused_value (TREE_OPERAND (exp, 0));
1667 /* ... fall through ... */
1669 default:
1670 /* Referencing a volatile value is a side effect, so don't warn. */
1671 if ((TREE_CODE_CLASS (TREE_CODE (exp)) == 'd'
1672 || TREE_CODE_CLASS (TREE_CODE (exp)) == 'r')
1673 && TREE_THIS_VOLATILE (exp))
1674 return 0;
1675 warn:
1676 warning_with_file_and_line (emit_filename, emit_lineno,
1677 "value computed is not used");
1678 return 1;
1682 /* Clear out the memory of the last expression evaluated. */
1684 void
1685 clear_last_expr ()
1687 last_expr_type = 0;
1690 /* Begin a statement which will return a value.
1691 Return the RTL_EXPR for this statement expr.
1692 The caller must save that value and pass it to expand_end_stmt_expr. */
1694 tree
1695 expand_start_stmt_expr ()
1697 int momentary;
1698 tree t;
1700 /* Make the RTL_EXPR node temporary, not momentary,
1701 so that rtl_expr_chain doesn't become garbage. */
1702 momentary = suspend_momentary ();
1703 t = make_node (RTL_EXPR);
1704 resume_momentary (momentary);
1705 do_pending_stack_adjust ();
1706 start_sequence_for_rtl_expr (t);
1707 NO_DEFER_POP;
1708 expr_stmts_for_value++;
1709 return t;
1712 /* Restore the previous state at the end of a statement that returns a value.
1713 Returns a tree node representing the statement's value and the
1714 insns to compute the value.
1716 The nodes of that expression have been freed by now, so we cannot use them.
1717 But we don't want to do that anyway; the expression has already been
1718 evaluated and now we just want to use the value. So generate a RTL_EXPR
1719 with the proper type and RTL value.
1721 If the last substatement was not an expression,
1722 return something with type `void'. */
1724 tree
1725 expand_end_stmt_expr (t)
1726 tree t;
1728 OK_DEFER_POP;
1730 if (last_expr_type == 0)
1732 last_expr_type = void_type_node;
1733 last_expr_value = const0_rtx;
1735 else if (last_expr_value == 0)
1736 /* There are some cases where this can happen, such as when the
1737 statement is void type. */
1738 last_expr_value = const0_rtx;
1739 else if (GET_CODE (last_expr_value) != REG && ! CONSTANT_P (last_expr_value))
1740 /* Remove any possible QUEUED. */
1741 last_expr_value = protect_from_queue (last_expr_value, 0);
1743 emit_queue ();
1745 TREE_TYPE (t) = last_expr_type;
1746 RTL_EXPR_RTL (t) = last_expr_value;
1747 RTL_EXPR_SEQUENCE (t) = get_insns ();
1749 rtl_expr_chain = tree_cons (NULL_TREE, t, rtl_expr_chain);
1751 end_sequence ();
1753 /* Don't consider deleting this expr or containing exprs at tree level. */
1754 TREE_SIDE_EFFECTS (t) = 1;
1755 /* Propagate volatility of the actual RTL expr. */
1756 TREE_THIS_VOLATILE (t) = volatile_refs_p (last_expr_value);
1758 last_expr_type = 0;
1759 expr_stmts_for_value--;
1761 return t;
1764 /* Generate RTL for the start of an if-then. COND is the expression
1765 whose truth should be tested.
1767 If EXITFLAG is nonzero, this conditional is visible to
1768 `exit_something'. */
1770 void
1771 expand_start_cond (cond, exitflag)
1772 tree cond;
1773 int exitflag;
1775 struct nesting *thiscond = ALLOC_NESTING ();
1777 /* Make an entry on cond_stack for the cond we are entering. */
1779 thiscond->next = cond_stack;
1780 thiscond->all = nesting_stack;
1781 thiscond->depth = ++nesting_depth;
1782 thiscond->data.cond.next_label = gen_label_rtx ();
1783 /* Before we encounter an `else', we don't need a separate exit label
1784 unless there are supposed to be exit statements
1785 to exit this conditional. */
1786 thiscond->exit_label = exitflag ? gen_label_rtx () : 0;
1787 thiscond->data.cond.endif_label = thiscond->exit_label;
1788 cond_stack = thiscond;
1789 nesting_stack = thiscond;
1791 do_jump (cond, thiscond->data.cond.next_label, NULL_RTX);
1794 /* Generate RTL between then-clause and the elseif-clause
1795 of an if-then-elseif-.... */
1797 void
1798 expand_start_elseif (cond)
1799 tree cond;
1801 if (cond_stack->data.cond.endif_label == 0)
1802 cond_stack->data.cond.endif_label = gen_label_rtx ();
1803 emit_jump (cond_stack->data.cond.endif_label);
1804 emit_label (cond_stack->data.cond.next_label);
1805 cond_stack->data.cond.next_label = gen_label_rtx ();
1806 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
1809 /* Generate RTL between the then-clause and the else-clause
1810 of an if-then-else. */
1812 void
1813 expand_start_else ()
1815 if (cond_stack->data.cond.endif_label == 0)
1816 cond_stack->data.cond.endif_label = gen_label_rtx ();
1818 emit_jump (cond_stack->data.cond.endif_label);
1819 emit_label (cond_stack->data.cond.next_label);
1820 cond_stack->data.cond.next_label = 0; /* No more _else or _elseif calls. */
1823 /* After calling expand_start_else, turn this "else" into an "else if"
1824 by providing another condition. */
1826 void
1827 expand_elseif (cond)
1828 tree cond;
1830 cond_stack->data.cond.next_label = gen_label_rtx ();
1831 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
1834 /* Generate RTL for the end of an if-then.
1835 Pop the record for it off of cond_stack. */
1837 void
1838 expand_end_cond ()
1840 struct nesting *thiscond = cond_stack;
1842 do_pending_stack_adjust ();
1843 if (thiscond->data.cond.next_label)
1844 emit_label (thiscond->data.cond.next_label);
1845 if (thiscond->data.cond.endif_label)
1846 emit_label (thiscond->data.cond.endif_label);
1848 POPSTACK (cond_stack);
1849 last_expr_type = 0;
1854 /* Generate RTL for the start of a loop. EXIT_FLAG is nonzero if this
1855 loop should be exited by `exit_something'. This is a loop for which
1856 `expand_continue' will jump to the top of the loop.
1858 Make an entry on loop_stack to record the labels associated with
1859 this loop. */
1861 struct nesting *
1862 expand_start_loop (exit_flag)
1863 int exit_flag;
1865 register struct nesting *thisloop = ALLOC_NESTING ();
1867 /* Make an entry on loop_stack for the loop we are entering. */
1869 thisloop->next = loop_stack;
1870 thisloop->all = nesting_stack;
1871 thisloop->depth = ++nesting_depth;
1872 thisloop->data.loop.start_label = gen_label_rtx ();
1873 thisloop->data.loop.end_label = gen_label_rtx ();
1874 thisloop->data.loop.alt_end_label = 0;
1875 thisloop->data.loop.continue_label = thisloop->data.loop.start_label;
1876 thisloop->exit_label = exit_flag ? thisloop->data.loop.end_label : 0;
1877 loop_stack = thisloop;
1878 nesting_stack = thisloop;
1880 do_pending_stack_adjust ();
1881 emit_queue ();
1882 emit_note (NULL_PTR, NOTE_INSN_LOOP_BEG);
1883 emit_label (thisloop->data.loop.start_label);
1885 return thisloop;
1888 /* Like expand_start_loop but for a loop where the continuation point
1889 (for expand_continue_loop) will be specified explicitly. */
1891 struct nesting *
1892 expand_start_loop_continue_elsewhere (exit_flag)
1893 int exit_flag;
1895 struct nesting *thisloop = expand_start_loop (exit_flag);
1896 loop_stack->data.loop.continue_label = gen_label_rtx ();
1897 return thisloop;
1900 /* Specify the continuation point for a loop started with
1901 expand_start_loop_continue_elsewhere.
1902 Use this at the point in the code to which a continue statement
1903 should jump. */
1905 void
1906 expand_loop_continue_here ()
1908 do_pending_stack_adjust ();
1909 emit_note (NULL_PTR, NOTE_INSN_LOOP_CONT);
1910 emit_label (loop_stack->data.loop.continue_label);
1913 /* Finish a loop. Generate a jump back to the top and the loop-exit label.
1914 Pop the block off of loop_stack. */
1916 void
1917 expand_end_loop ()
1919 register rtx insn;
1920 register rtx start_label;
1921 rtx last_test_insn = 0;
1922 int num_insns = 0;
1924 insn = get_last_insn ();
1925 start_label = loop_stack->data.loop.start_label;
1927 /* Mark the continue-point at the top of the loop if none elsewhere. */
1928 if (start_label == loop_stack->data.loop.continue_label)
1929 emit_note_before (NOTE_INSN_LOOP_CONT, start_label);
1931 do_pending_stack_adjust ();
1933 /* If optimizing, perhaps reorder the loop. If the loop
1934 starts with a conditional exit, roll that to the end
1935 where it will optimize together with the jump back.
1937 We look for the last conditional branch to the exit that we encounter
1938 before hitting 30 insns or a CALL_INSN. If we see an unconditional
1939 branch to the exit first, use it.
1941 We must also stop at NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes
1942 because moving them is not valid. */
1944 if (optimize
1946 ! (GET_CODE (insn) == JUMP_INSN
1947 && GET_CODE (PATTERN (insn)) == SET
1948 && SET_DEST (PATTERN (insn)) == pc_rtx
1949 && GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE))
1951 /* Scan insns from the top of the loop looking for a qualified
1952 conditional exit. */
1953 for (insn = NEXT_INSN (loop_stack->data.loop.start_label); insn;
1954 insn = NEXT_INSN (insn))
1956 if (GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == CODE_LABEL)
1957 break;
1959 if (GET_CODE (insn) == NOTE
1960 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
1961 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
1962 break;
1964 if (GET_CODE (insn) == JUMP_INSN || GET_CODE (insn) == INSN)
1965 num_insns++;
1967 if (last_test_insn && num_insns > 30)
1968 break;
1970 if (GET_CODE (insn) == JUMP_INSN && GET_CODE (PATTERN (insn)) == SET
1971 && SET_DEST (PATTERN (insn)) == pc_rtx
1972 && GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE
1973 && ((GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 1)) == LABEL_REF
1974 && ((XEXP (XEXP (SET_SRC (PATTERN (insn)), 1), 0)
1975 == loop_stack->data.loop.end_label)
1976 || (XEXP (XEXP (SET_SRC (PATTERN (insn)), 1), 0)
1977 == loop_stack->data.loop.alt_end_label)))
1978 || (GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 2)) == LABEL_REF
1979 && ((XEXP (XEXP (SET_SRC (PATTERN (insn)), 2), 0)
1980 == loop_stack->data.loop.end_label)
1981 || (XEXP (XEXP (SET_SRC (PATTERN (insn)), 2), 0)
1982 == loop_stack->data.loop.alt_end_label)))))
1983 last_test_insn = insn;
1985 if (last_test_insn == 0 && GET_CODE (insn) == JUMP_INSN
1986 && GET_CODE (PATTERN (insn)) == SET
1987 && SET_DEST (PATTERN (insn)) == pc_rtx
1988 && GET_CODE (SET_SRC (PATTERN (insn))) == LABEL_REF
1989 && ((XEXP (SET_SRC (PATTERN (insn)), 0)
1990 == loop_stack->data.loop.end_label)
1991 || (XEXP (SET_SRC (PATTERN (insn)), 0)
1992 == loop_stack->data.loop.alt_end_label)))
1993 /* Include BARRIER. */
1994 last_test_insn = NEXT_INSN (insn);
1997 if (last_test_insn != 0 && last_test_insn != get_last_insn ())
1999 /* We found one. Move everything from there up
2000 to the end of the loop, and add a jump into the loop
2001 to jump to there. */
2002 register rtx newstart_label = gen_label_rtx ();
2003 register rtx start_move = start_label;
2005 /* If the start label is preceded by a NOTE_INSN_LOOP_CONT note,
2006 then we want to move this note also. */
2007 if (GET_CODE (PREV_INSN (start_move)) == NOTE
2008 && (NOTE_LINE_NUMBER (PREV_INSN (start_move))
2009 == NOTE_INSN_LOOP_CONT))
2010 start_move = PREV_INSN (start_move);
2012 emit_label_after (newstart_label, PREV_INSN (start_move));
2013 reorder_insns (start_move, last_test_insn, get_last_insn ());
2014 emit_jump_insn_after (gen_jump (start_label),
2015 PREV_INSN (newstart_label));
2016 emit_barrier_after (PREV_INSN (newstart_label));
2017 start_label = newstart_label;
2021 emit_jump (start_label);
2022 emit_note (NULL_PTR, NOTE_INSN_LOOP_END);
2023 emit_label (loop_stack->data.loop.end_label);
2025 POPSTACK (loop_stack);
2027 last_expr_type = 0;
2030 /* Generate a jump to the current loop's continue-point.
2031 This is usually the top of the loop, but may be specified
2032 explicitly elsewhere. If not currently inside a loop,
2033 return 0 and do nothing; caller will print an error message. */
2036 expand_continue_loop (whichloop)
2037 struct nesting *whichloop;
2039 last_expr_type = 0;
2040 if (whichloop == 0)
2041 whichloop = loop_stack;
2042 if (whichloop == 0)
2043 return 0;
2044 expand_goto_internal (NULL_TREE, whichloop->data.loop.continue_label,
2045 NULL_RTX);
2046 return 1;
2049 /* Generate a jump to exit the current loop. If not currently inside a loop,
2050 return 0 and do nothing; caller will print an error message. */
2053 expand_exit_loop (whichloop)
2054 struct nesting *whichloop;
2056 last_expr_type = 0;
2057 if (whichloop == 0)
2058 whichloop = loop_stack;
2059 if (whichloop == 0)
2060 return 0;
2061 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label, NULL_RTX);
2062 return 1;
2065 /* Generate a conditional jump to exit the current loop if COND
2066 evaluates to zero. If not currently inside a loop,
2067 return 0 and do nothing; caller will print an error message. */
2070 expand_exit_loop_if_false (whichloop, cond)
2071 struct nesting *whichloop;
2072 tree cond;
2074 rtx label = gen_label_rtx ();
2075 rtx last_insn;
2076 last_expr_type = 0;
2078 if (whichloop == 0)
2079 whichloop = loop_stack;
2080 if (whichloop == 0)
2081 return 0;
2082 /* In order to handle fixups, we actually create a conditional jump
2083 around a unconditional branch to exit the loop. If fixups are
2084 necessary, they go before the unconditional branch. */
2087 do_jump (cond, NULL_RTX, label);
2088 last_insn = get_last_insn ();
2089 if (GET_CODE (last_insn) == CODE_LABEL)
2090 whichloop->data.loop.alt_end_label = last_insn;
2091 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label,
2092 NULL_RTX);
2093 emit_label (label);
2095 return 1;
2098 /* Return non-zero if we should preserve sub-expressions as separate
2099 pseudos. We never do so if we aren't optimizing. We always do so
2100 if -fexpensive-optimizations.
2102 Otherwise, we only do so if we are in the "early" part of a loop. I.e.,
2103 the loop may still be a small one. */
2106 preserve_subexpressions_p ()
2108 rtx insn;
2110 if (flag_expensive_optimizations)
2111 return 1;
2113 if (optimize == 0 || loop_stack == 0)
2114 return 0;
2116 insn = get_last_insn_anywhere ();
2118 return (insn
2119 && (INSN_UID (insn) - INSN_UID (loop_stack->data.loop.start_label)
2120 < n_non_fixed_regs * 3));
2124 /* Generate a jump to exit the current loop, conditional, binding contour
2125 or case statement. Not all such constructs are visible to this function,
2126 only those started with EXIT_FLAG nonzero. Individual languages use
2127 the EXIT_FLAG parameter to control which kinds of constructs you can
2128 exit this way.
2130 If not currently inside anything that can be exited,
2131 return 0 and do nothing; caller will print an error message. */
2134 expand_exit_something ()
2136 struct nesting *n;
2137 last_expr_type = 0;
2138 for (n = nesting_stack; n; n = n->all)
2139 if (n->exit_label != 0)
2141 expand_goto_internal (NULL_TREE, n->exit_label, NULL_RTX);
2142 return 1;
2145 return 0;
2148 /* Generate RTL to return from the current function, with no value.
2149 (That is, we do not do anything about returning any value.) */
2151 void
2152 expand_null_return ()
2154 struct nesting *block = block_stack;
2155 rtx last_insn = 0;
2157 /* Does any pending block have cleanups? */
2159 while (block && block->data.block.cleanups == 0)
2160 block = block->next;
2162 /* If yes, use a goto to return, since that runs cleanups. */
2164 expand_null_return_1 (last_insn, block != 0);
2167 /* Generate RTL to return from the current function, with value VAL. */
2169 static void
2170 expand_value_return (val)
2171 rtx val;
2173 struct nesting *block = block_stack;
2174 rtx last_insn = get_last_insn ();
2175 rtx return_reg = DECL_RTL (DECL_RESULT (current_function_decl));
2177 /* Copy the value to the return location
2178 unless it's already there. */
2180 if (return_reg != val)
2182 #ifdef PROMOTE_FUNCTION_RETURN
2183 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
2184 int unsignedp = TREE_UNSIGNED (type);
2185 enum machine_mode mode
2186 = promote_mode (type, DECL_MODE (DECL_RESULT (current_function_decl)),
2187 &unsignedp, 1);
2189 if (GET_MODE (val) != VOIDmode && GET_MODE (val) != mode)
2190 convert_move (return_reg, val, unsignedp);
2191 else
2192 #endif
2193 emit_move_insn (return_reg, val);
2195 if (GET_CODE (return_reg) == REG
2196 && REGNO (return_reg) < FIRST_PSEUDO_REGISTER)
2197 emit_insn (gen_rtx_USE (VOIDmode, return_reg));
2198 /* Handle calls that return values in multiple non-contiguous locations.
2199 The Irix 6 ABI has examples of this. */
2200 else if (GET_CODE (return_reg) == PARALLEL)
2202 int i;
2204 for (i = 0; i < XVECLEN (return_reg, 0); i++)
2206 rtx x = XEXP (XVECEXP (return_reg, 0, i), 0);
2208 if (GET_CODE (x) == REG
2209 && REGNO (x) < FIRST_PSEUDO_REGISTER)
2210 emit_insn (gen_rtx_USE (VOIDmode, x));
2214 /* Does any pending block have cleanups? */
2216 while (block && block->data.block.cleanups == 0)
2217 block = block->next;
2219 /* If yes, use a goto to return, since that runs cleanups.
2220 Use LAST_INSN to put cleanups *before* the move insn emitted above. */
2222 expand_null_return_1 (last_insn, block != 0);
2225 /* Output a return with no value. If LAST_INSN is nonzero,
2226 pretend that the return takes place after LAST_INSN.
2227 If USE_GOTO is nonzero then don't use a return instruction;
2228 go to the return label instead. This causes any cleanups
2229 of pending blocks to be executed normally. */
2231 static void
2232 expand_null_return_1 (last_insn, use_goto)
2233 rtx last_insn;
2234 int use_goto;
2236 rtx end_label = cleanup_label ? cleanup_label : return_label;
2238 clear_pending_stack_adjust ();
2239 do_pending_stack_adjust ();
2240 last_expr_type = 0;
2242 /* PCC-struct return always uses an epilogue. */
2243 if (current_function_returns_pcc_struct || use_goto)
2245 if (end_label == 0)
2246 end_label = return_label = gen_label_rtx ();
2247 expand_goto_internal (NULL_TREE, end_label, last_insn);
2248 return;
2251 /* Otherwise output a simple return-insn if one is available,
2252 unless it won't do the job. */
2253 #ifdef HAVE_return
2254 if (HAVE_return && use_goto == 0 && cleanup_label == 0)
2256 emit_jump_insn (gen_return ());
2257 emit_barrier ();
2258 return;
2260 #endif
2262 /* Otherwise jump to the epilogue. */
2263 expand_goto_internal (NULL_TREE, end_label, last_insn);
2266 /* Generate RTL to evaluate the expression RETVAL and return it
2267 from the current function. */
2269 void
2270 expand_return (retval)
2271 tree retval;
2273 /* If there are any cleanups to be performed, then they will
2274 be inserted following LAST_INSN. It is desirable
2275 that the last_insn, for such purposes, should be the
2276 last insn before computing the return value. Otherwise, cleanups
2277 which call functions can clobber the return value. */
2278 /* ??? rms: I think that is erroneous, because in C++ it would
2279 run destructors on variables that might be used in the subsequent
2280 computation of the return value. */
2281 rtx last_insn = 0;
2282 register rtx val = 0;
2283 register rtx op0;
2284 tree retval_rhs;
2285 int cleanups;
2287 /* If function wants no value, give it none. */
2288 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
2290 expand_expr (retval, NULL_RTX, VOIDmode, 0);
2291 emit_queue ();
2292 expand_null_return ();
2293 return;
2296 /* Are any cleanups needed? E.g. C++ destructors to be run? */
2297 /* This is not sufficient. We also need to watch for cleanups of the
2298 expression we are about to expand. Unfortunately, we cannot know
2299 if it has cleanups until we expand it, and we want to change how we
2300 expand it depending upon if we need cleanups. We can't win. */
2301 #if 0
2302 cleanups = any_pending_cleanups (1);
2303 #else
2304 cleanups = 1;
2305 #endif
2307 if (TREE_CODE (retval) == RESULT_DECL)
2308 retval_rhs = retval;
2309 else if ((TREE_CODE (retval) == MODIFY_EXPR || TREE_CODE (retval) == INIT_EXPR)
2310 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
2311 retval_rhs = TREE_OPERAND (retval, 1);
2312 else if (TREE_TYPE (retval) == void_type_node)
2313 /* Recognize tail-recursive call to void function. */
2314 retval_rhs = retval;
2315 else
2316 retval_rhs = NULL_TREE;
2318 /* Only use `last_insn' if there are cleanups which must be run. */
2319 if (cleanups || cleanup_label != 0)
2320 last_insn = get_last_insn ();
2322 /* Distribute return down conditional expr if either of the sides
2323 may involve tail recursion (see test below). This enhances the number
2324 of tail recursions we see. Don't do this always since it can produce
2325 sub-optimal code in some cases and we distribute assignments into
2326 conditional expressions when it would help. */
2328 if (optimize && retval_rhs != 0
2329 && frame_offset == 0
2330 && TREE_CODE (retval_rhs) == COND_EXPR
2331 && (TREE_CODE (TREE_OPERAND (retval_rhs, 1)) == CALL_EXPR
2332 || TREE_CODE (TREE_OPERAND (retval_rhs, 2)) == CALL_EXPR))
2334 rtx label = gen_label_rtx ();
2335 tree expr;
2337 do_jump (TREE_OPERAND (retval_rhs, 0), label, NULL_RTX);
2338 expr = build (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (current_function_decl)),
2339 DECL_RESULT (current_function_decl),
2340 TREE_OPERAND (retval_rhs, 1));
2341 TREE_SIDE_EFFECTS (expr) = 1;
2342 expand_return (expr);
2343 emit_label (label);
2345 expr = build (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (current_function_decl)),
2346 DECL_RESULT (current_function_decl),
2347 TREE_OPERAND (retval_rhs, 2));
2348 TREE_SIDE_EFFECTS (expr) = 1;
2349 expand_return (expr);
2350 return;
2353 /* For tail-recursive call to current function,
2354 just jump back to the beginning.
2355 It's unsafe if any auto variable in this function
2356 has its address taken; for simplicity,
2357 require stack frame to be empty. */
2358 if (optimize && retval_rhs != 0
2359 && frame_offset == 0
2360 && TREE_CODE (retval_rhs) == CALL_EXPR
2361 && TREE_CODE (TREE_OPERAND (retval_rhs, 0)) == ADDR_EXPR
2362 && TREE_OPERAND (TREE_OPERAND (retval_rhs, 0), 0) == current_function_decl
2363 /* Finish checking validity, and if valid emit code
2364 to set the argument variables for the new call. */
2365 && tail_recursion_args (TREE_OPERAND (retval_rhs, 1),
2366 DECL_ARGUMENTS (current_function_decl)))
2368 if (tail_recursion_label == 0)
2370 tail_recursion_label = gen_label_rtx ();
2371 emit_label_after (tail_recursion_label,
2372 tail_recursion_reentry);
2374 emit_queue ();
2375 expand_goto_internal (NULL_TREE, tail_recursion_label, last_insn);
2376 emit_barrier ();
2377 return;
2379 #ifdef HAVE_return
2380 /* This optimization is safe if there are local cleanups
2381 because expand_null_return takes care of them.
2382 ??? I think it should also be safe when there is a cleanup label,
2383 because expand_null_return takes care of them, too.
2384 Any reason why not? */
2385 if (HAVE_return && cleanup_label == 0
2386 && ! current_function_returns_pcc_struct
2387 && BRANCH_COST <= 1)
2389 /* If this is return x == y; then generate
2390 if (x == y) return 1; else return 0;
2391 if we can do it with explicit return insns and branches are cheap,
2392 but not if we have the corresponding scc insn. */
2393 int has_scc = 0;
2394 if (retval_rhs)
2395 switch (TREE_CODE (retval_rhs))
2397 case EQ_EXPR:
2398 #ifdef HAVE_seq
2399 has_scc = HAVE_seq;
2400 #endif
2401 case NE_EXPR:
2402 #ifdef HAVE_sne
2403 has_scc = HAVE_sne;
2404 #endif
2405 case GT_EXPR:
2406 #ifdef HAVE_sgt
2407 has_scc = HAVE_sgt;
2408 #endif
2409 case GE_EXPR:
2410 #ifdef HAVE_sge
2411 has_scc = HAVE_sge;
2412 #endif
2413 case LT_EXPR:
2414 #ifdef HAVE_slt
2415 has_scc = HAVE_slt;
2416 #endif
2417 case LE_EXPR:
2418 #ifdef HAVE_sle
2419 has_scc = HAVE_sle;
2420 #endif
2421 case TRUTH_ANDIF_EXPR:
2422 case TRUTH_ORIF_EXPR:
2423 case TRUTH_AND_EXPR:
2424 case TRUTH_OR_EXPR:
2425 case TRUTH_NOT_EXPR:
2426 case TRUTH_XOR_EXPR:
2427 if (! has_scc)
2429 op0 = gen_label_rtx ();
2430 jumpifnot (retval_rhs, op0);
2431 expand_value_return (const1_rtx);
2432 emit_label (op0);
2433 expand_value_return (const0_rtx);
2434 return;
2436 break;
2438 default:
2439 break;
2442 #endif /* HAVE_return */
2444 /* If the result is an aggregate that is being returned in one (or more)
2445 registers, load the registers here. The compiler currently can't handle
2446 copying a BLKmode value into registers. We could put this code in a
2447 more general area (for use by everyone instead of just function
2448 call/return), but until this feature is generally usable it is kept here
2449 (and in expand_call). The value must go into a pseudo in case there
2450 are cleanups that will clobber the real return register. */
2452 if (retval_rhs != 0
2453 && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
2454 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG)
2456 int i, bitpos, xbitpos;
2457 int big_endian_correction = 0;
2458 int bytes = int_size_in_bytes (TREE_TYPE (retval_rhs));
2459 int n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
2460 int bitsize = MIN (TYPE_ALIGN (TREE_TYPE (retval_rhs)),BITS_PER_WORD);
2461 rtx *result_pseudos = (rtx *) alloca (sizeof (rtx) * n_regs);
2462 rtx result_reg, src, dst;
2463 rtx result_val = expand_expr (retval_rhs, NULL_RTX, VOIDmode, 0);
2464 enum machine_mode tmpmode, result_reg_mode;
2466 /* Structures whose size is not a multiple of a word are aligned
2467 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2468 machine, this means we must skip the empty high order bytes when
2469 calculating the bit offset. */
2470 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2471 big_endian_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2472 * BITS_PER_UNIT));
2474 /* Copy the structure BITSIZE bits at a time. */
2475 for (bitpos = 0, xbitpos = big_endian_correction;
2476 bitpos < bytes * BITS_PER_UNIT;
2477 bitpos += bitsize, xbitpos += bitsize)
2479 /* We need a new destination pseudo each time xbitpos is
2480 on a word boundary and when xbitpos == big_endian_correction
2481 (the first time through). */
2482 if (xbitpos % BITS_PER_WORD == 0
2483 || xbitpos == big_endian_correction)
2485 /* Generate an appropriate register. */
2486 dst = gen_reg_rtx (word_mode);
2487 result_pseudos[xbitpos / BITS_PER_WORD] = dst;
2489 /* Clobber the destination before we move anything into it. */
2490 emit_insn (gen_rtx_CLOBBER (VOIDmode, dst));
2493 /* We need a new source operand each time bitpos is on a word
2494 boundary. */
2495 if (bitpos % BITS_PER_WORD == 0)
2496 src = operand_subword_force (result_val,
2497 bitpos / BITS_PER_WORD,
2498 BLKmode);
2500 /* Use bitpos for the source extraction (left justified) and
2501 xbitpos for the destination store (right justified). */
2502 store_bit_field (dst, bitsize, xbitpos % BITS_PER_WORD, word_mode,
2503 extract_bit_field (src, bitsize,
2504 bitpos % BITS_PER_WORD, 1,
2505 NULL_RTX, word_mode,
2506 word_mode,
2507 bitsize / BITS_PER_UNIT,
2508 BITS_PER_WORD),
2509 bitsize / BITS_PER_UNIT, BITS_PER_WORD);
2512 /* Find the smallest integer mode large enough to hold the
2513 entire structure and use that mode instead of BLKmode
2514 on the USE insn for the return register. */
2515 bytes = int_size_in_bytes (TREE_TYPE (retval_rhs));
2516 for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2517 tmpmode != MAX_MACHINE_MODE;
2518 tmpmode = GET_MODE_WIDER_MODE (tmpmode))
2520 /* Have we found a large enough mode? */
2521 if (GET_MODE_SIZE (tmpmode) >= bytes)
2522 break;
2525 /* No suitable mode found. */
2526 if (tmpmode == MAX_MACHINE_MODE)
2527 abort ();
2529 PUT_MODE (DECL_RTL (DECL_RESULT (current_function_decl)), tmpmode);
2531 if (GET_MODE_SIZE (tmpmode) < GET_MODE_SIZE (word_mode))
2532 result_reg_mode = word_mode;
2533 else
2534 result_reg_mode = tmpmode;
2535 result_reg = gen_reg_rtx (result_reg_mode);
2537 emit_queue ();
2538 for (i = 0; i < n_regs; i++)
2539 emit_move_insn (operand_subword (result_reg, i, 0, result_reg_mode),
2540 result_pseudos[i]);
2542 if (tmpmode != result_reg_mode)
2543 result_reg = gen_lowpart (tmpmode, result_reg);
2545 expand_value_return (result_reg);
2547 else if (cleanups
2548 && retval_rhs != 0
2549 && TREE_TYPE (retval_rhs) != void_type_node
2550 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG)
2552 /* Calculate the return value into a pseudo reg. */
2553 val = gen_reg_rtx (DECL_MODE (DECL_RESULT (current_function_decl)));
2554 val = expand_expr (retval_rhs, val, GET_MODE (val), 0);
2555 val = force_not_mem (val);
2556 emit_queue ();
2557 /* Return the calculated value, doing cleanups first. */
2558 expand_value_return (val);
2560 else
2562 /* No cleanups or no hard reg used;
2563 calculate value into hard return reg. */
2564 expand_expr (retval, const0_rtx, VOIDmode, 0);
2565 emit_queue ();
2566 expand_value_return (DECL_RTL (DECL_RESULT (current_function_decl)));
2570 /* Return 1 if the end of the generated RTX is not a barrier.
2571 This means code already compiled can drop through. */
2574 drop_through_at_end_p ()
2576 rtx insn = get_last_insn ();
2577 while (insn && GET_CODE (insn) == NOTE)
2578 insn = PREV_INSN (insn);
2579 return insn && GET_CODE (insn) != BARRIER;
2582 /* Emit code to alter this function's formal parms for a tail-recursive call.
2583 ACTUALS is a list of actual parameter expressions (chain of TREE_LISTs).
2584 FORMALS is the chain of decls of formals.
2585 Return 1 if this can be done;
2586 otherwise return 0 and do not emit any code. */
2588 static int
2589 tail_recursion_args (actuals, formals)
2590 tree actuals, formals;
2592 register tree a = actuals, f = formals;
2593 register int i;
2594 register rtx *argvec;
2596 /* Check that number and types of actuals are compatible
2597 with the formals. This is not always true in valid C code.
2598 Also check that no formal needs to be addressable
2599 and that all formals are scalars. */
2601 /* Also count the args. */
2603 for (a = actuals, f = formals, i = 0; a && f; a = TREE_CHAIN (a), f = TREE_CHAIN (f), i++)
2605 if (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_VALUE (a)))
2606 != TYPE_MAIN_VARIANT (TREE_TYPE (f)))
2607 return 0;
2608 if (GET_CODE (DECL_RTL (f)) != REG || DECL_MODE (f) == BLKmode)
2609 return 0;
2611 if (a != 0 || f != 0)
2612 return 0;
2614 /* Compute all the actuals. */
2616 argvec = (rtx *) alloca (i * sizeof (rtx));
2618 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
2619 argvec[i] = expand_expr (TREE_VALUE (a), NULL_RTX, VOIDmode, 0);
2621 /* Find which actual values refer to current values of previous formals.
2622 Copy each of them now, before any formal is changed. */
2624 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
2626 int copy = 0;
2627 register int j;
2628 for (f = formals, j = 0; j < i; f = TREE_CHAIN (f), j++)
2629 if (reg_mentioned_p (DECL_RTL (f), argvec[i]))
2630 { copy = 1; break; }
2631 if (copy)
2632 argvec[i] = copy_to_reg (argvec[i]);
2635 /* Store the values of the actuals into the formals. */
2637 for (f = formals, a = actuals, i = 0; f;
2638 f = TREE_CHAIN (f), a = TREE_CHAIN (a), i++)
2640 if (GET_MODE (DECL_RTL (f)) == GET_MODE (argvec[i]))
2641 emit_move_insn (DECL_RTL (f), argvec[i]);
2642 else
2643 convert_move (DECL_RTL (f), argvec[i],
2644 TREE_UNSIGNED (TREE_TYPE (TREE_VALUE (a))));
2647 free_temp_slots ();
2648 return 1;
2651 /* Generate the RTL code for entering a binding contour.
2652 The variables are declared one by one, by calls to `expand_decl'.
2654 EXIT_FLAG is nonzero if this construct should be visible to
2655 `exit_something'. */
2657 void
2658 expand_start_bindings (exit_flag)
2659 int exit_flag;
2661 struct nesting *thisblock = ALLOC_NESTING ();
2662 rtx note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_BEG);
2664 /* Make an entry on block_stack for the block we are entering. */
2666 thisblock->next = block_stack;
2667 thisblock->all = nesting_stack;
2668 thisblock->depth = ++nesting_depth;
2669 thisblock->data.block.stack_level = 0;
2670 thisblock->data.block.cleanups = 0;
2671 thisblock->data.block.function_call_count = 0;
2672 thisblock->data.block.exception_region = 0;
2673 thisblock->data.block.target_temp_slot_level = target_temp_slot_level;
2675 thisblock->data.block.conditional_code = 0;
2676 thisblock->data.block.last_unconditional_cleanup = note;
2677 thisblock->data.block.cleanup_ptr = &thisblock->data.block.cleanups;
2679 if (block_stack
2680 && !(block_stack->data.block.cleanups == NULL_TREE
2681 && block_stack->data.block.outer_cleanups == NULL_TREE))
2682 thisblock->data.block.outer_cleanups
2683 = tree_cons (NULL_TREE, block_stack->data.block.cleanups,
2684 block_stack->data.block.outer_cleanups);
2685 else
2686 thisblock->data.block.outer_cleanups = 0;
2687 thisblock->data.block.label_chain = 0;
2688 thisblock->data.block.innermost_stack_block = stack_block_stack;
2689 thisblock->data.block.first_insn = note;
2690 thisblock->data.block.block_start_count = ++block_start_count;
2691 thisblock->exit_label = exit_flag ? gen_label_rtx () : 0;
2692 block_stack = thisblock;
2693 nesting_stack = thisblock;
2695 /* Make a new level for allocating stack slots. */
2696 push_temp_slots ();
2699 /* Specify the scope of temporaries created by TARGET_EXPRs. Similar
2700 to CLEANUP_POINT_EXPR, but handles cases when a series of calls to
2701 expand_expr are made. After we end the region, we know that all
2702 space for all temporaries that were created by TARGET_EXPRs will be
2703 destroyed and their space freed for reuse. */
2705 void
2706 expand_start_target_temps ()
2708 /* This is so that even if the result is preserved, the space
2709 allocated will be freed, as we know that it is no longer in use. */
2710 push_temp_slots ();
2712 /* Start a new binding layer that will keep track of all cleanup
2713 actions to be performed. */
2714 expand_start_bindings (0);
2716 target_temp_slot_level = temp_slot_level;
2719 void
2720 expand_end_target_temps ()
2722 expand_end_bindings (NULL_TREE, 0, 0);
2724 /* This is so that even if the result is preserved, the space
2725 allocated will be freed, as we know that it is no longer in use. */
2726 pop_temp_slots ();
2729 /* Mark top block of block_stack as an implicit binding for an
2730 exception region. This is used to prevent infinite recursion when
2731 ending a binding with expand_end_bindings. It is only ever called
2732 by expand_eh_region_start, as that it the only way to create a
2733 block stack for a exception region. */
2735 void
2736 mark_block_as_eh_region ()
2738 block_stack->data.block.exception_region = 1;
2739 if (block_stack->next
2740 && block_stack->next->data.block.conditional_code)
2742 block_stack->data.block.conditional_code
2743 = block_stack->next->data.block.conditional_code;
2744 block_stack->data.block.last_unconditional_cleanup
2745 = block_stack->next->data.block.last_unconditional_cleanup;
2746 block_stack->data.block.cleanup_ptr
2747 = block_stack->next->data.block.cleanup_ptr;
2751 /* True if we are currently emitting insns in an area of output code
2752 that is controlled by a conditional expression. This is used by
2753 the cleanup handling code to generate conditional cleanup actions. */
2756 conditional_context ()
2758 return block_stack && block_stack->data.block.conditional_code;
2761 /* Mark top block of block_stack as not for an implicit binding for an
2762 exception region. This is only ever done by expand_eh_region_end
2763 to let expand_end_bindings know that it is being called explicitly
2764 to end the binding layer for just the binding layer associated with
2765 the exception region, otherwise expand_end_bindings would try and
2766 end all implicit binding layers for exceptions regions, and then
2767 one normal binding layer. */
2769 void
2770 mark_block_as_not_eh_region ()
2772 block_stack->data.block.exception_region = 0;
2775 /* True if the top block of block_stack was marked as for an exception
2776 region by mark_block_as_eh_region. */
2779 is_eh_region ()
2781 return block_stack && block_stack->data.block.exception_region;
2784 /* Given a pointer to a BLOCK node, save a pointer to the most recently
2785 generated NOTE_INSN_BLOCK_END in the BLOCK_END_NOTE field of the given
2786 BLOCK node. */
2788 void
2789 remember_end_note (block)
2790 register tree block;
2792 BLOCK_END_NOTE (block) = last_block_end_note;
2793 last_block_end_note = NULL_RTX;
2796 /* Generate RTL code to terminate a binding contour.
2797 VARS is the chain of VAR_DECL nodes
2798 for the variables bound in this contour.
2799 MARK_ENDS is nonzero if we should put a note at the beginning
2800 and end of this binding contour.
2802 DONT_JUMP_IN is nonzero if it is not valid to jump into this contour.
2803 (That is true automatically if the contour has a saved stack level.) */
2805 void
2806 expand_end_bindings (vars, mark_ends, dont_jump_in)
2807 tree vars;
2808 int mark_ends;
2809 int dont_jump_in;
2811 register struct nesting *thisblock;
2812 register tree decl;
2814 while (block_stack->data.block.exception_region)
2816 /* Because we don't need or want a new temporary level and
2817 because we didn't create one in expand_eh_region_start,
2818 create a fake one now to avoid removing one in
2819 expand_end_bindings. */
2820 push_temp_slots ();
2822 block_stack->data.block.exception_region = 0;
2824 expand_end_bindings (NULL_TREE, 0, 0);
2827 /* Since expand_eh_region_start does an expand_start_bindings, we
2828 have to first end all the bindings that were created by
2829 expand_eh_region_start. */
2831 thisblock = block_stack;
2833 if (warn_unused)
2834 for (decl = vars; decl; decl = TREE_CHAIN (decl))
2835 if (! TREE_USED (decl) && TREE_CODE (decl) == VAR_DECL
2836 && ! DECL_IN_SYSTEM_HEADER (decl)
2837 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
2838 warning_with_decl (decl, "unused variable `%s'");
2840 if (thisblock->exit_label)
2842 do_pending_stack_adjust ();
2843 emit_label (thisblock->exit_label);
2846 /* If necessary, make a handler for nonlocal gotos taking
2847 place in the function calls in this block. */
2848 if (function_call_count != thisblock->data.block.function_call_count
2849 && nonlocal_labels
2850 /* Make handler for outermost block
2851 if there were any nonlocal gotos to this function. */
2852 && (thisblock->next == 0 ? current_function_has_nonlocal_label
2853 /* Make handler for inner block if it has something
2854 special to do when you jump out of it. */
2855 : (thisblock->data.block.cleanups != 0
2856 || thisblock->data.block.stack_level != 0)))
2858 tree link;
2859 rtx afterward = gen_label_rtx ();
2860 rtx handler_label = gen_label_rtx ();
2861 rtx save_receiver = gen_reg_rtx (Pmode);
2862 rtx insns;
2864 /* Don't let jump_optimize delete the handler. */
2865 LABEL_PRESERVE_P (handler_label) = 1;
2867 /* Record the handler address in the stack slot for that purpose,
2868 during this block, saving and restoring the outer value. */
2869 if (thisblock->next != 0)
2871 emit_move_insn (nonlocal_goto_handler_slot, save_receiver);
2873 start_sequence ();
2874 emit_move_insn (save_receiver, nonlocal_goto_handler_slot);
2875 insns = get_insns ();
2876 end_sequence ();
2877 emit_insns_before (insns, thisblock->data.block.first_insn);
2880 start_sequence ();
2881 emit_move_insn (nonlocal_goto_handler_slot,
2882 gen_rtx_LABEL_REF (Pmode, handler_label));
2883 insns = get_insns ();
2884 end_sequence ();
2885 emit_insns_before (insns, thisblock->data.block.first_insn);
2887 /* Jump around the handler; it runs only when specially invoked. */
2888 emit_jump (afterward);
2889 emit_label (handler_label);
2891 #ifdef HAVE_nonlocal_goto
2892 if (! HAVE_nonlocal_goto)
2893 #endif
2894 /* First adjust our frame pointer to its actual value. It was
2895 previously set to the start of the virtual area corresponding to
2896 the stacked variables when we branched here and now needs to be
2897 adjusted to the actual hardware fp value.
2899 Assignments are to virtual registers are converted by
2900 instantiate_virtual_regs into the corresponding assignment
2901 to the underlying register (fp in this case) that makes
2902 the original assignment true.
2903 So the following insn will actually be
2904 decrementing fp by STARTING_FRAME_OFFSET. */
2905 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
2907 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
2908 if (fixed_regs[ARG_POINTER_REGNUM])
2910 #ifdef ELIMINABLE_REGS
2911 /* If the argument pointer can be eliminated in favor of the
2912 frame pointer, we don't need to restore it. We assume here
2913 that if such an elimination is present, it can always be used.
2914 This is the case on all known machines; if we don't make this
2915 assumption, we do unnecessary saving on many machines. */
2916 static struct elims {int from, to;} elim_regs[] = ELIMINABLE_REGS;
2917 int i;
2919 for (i = 0; i < sizeof elim_regs / sizeof elim_regs[0]; i++)
2920 if (elim_regs[i].from == ARG_POINTER_REGNUM
2921 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
2922 break;
2924 if (i == sizeof elim_regs / sizeof elim_regs [0])
2925 #endif
2927 /* Now restore our arg pointer from the address at which it
2928 was saved in our stack frame.
2929 If there hasn't be space allocated for it yet, make
2930 some now. */
2931 if (arg_pointer_save_area == 0)
2932 arg_pointer_save_area
2933 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
2934 emit_move_insn (virtual_incoming_args_rtx,
2935 /* We need a pseudo here, or else
2936 instantiate_virtual_regs_1 complains. */
2937 copy_to_reg (arg_pointer_save_area));
2940 #endif
2942 #ifdef HAVE_nonlocal_goto_receiver
2943 if (HAVE_nonlocal_goto_receiver)
2944 emit_insn (gen_nonlocal_goto_receiver ());
2945 #endif
2947 /* The handler expects the desired label address in the static chain
2948 register. It tests the address and does an appropriate jump
2949 to whatever label is desired. */
2950 for (link = nonlocal_labels; link; link = TREE_CHAIN (link))
2951 /* Skip any labels we shouldn't be able to jump to from here. */
2952 if (! DECL_TOO_LATE (TREE_VALUE (link)))
2954 rtx not_this = gen_label_rtx ();
2955 rtx this = gen_label_rtx ();
2956 do_jump_if_equal (static_chain_rtx,
2957 gen_rtx_LABEL_REF (Pmode, DECL_RTL (TREE_VALUE (link))),
2958 this, 0);
2959 emit_jump (not_this);
2960 emit_label (this);
2961 expand_goto (TREE_VALUE (link));
2962 emit_label (not_this);
2964 /* If label is not recognized, abort. */
2965 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "abort"), 0,
2966 VOIDmode, 0);
2967 emit_barrier ();
2968 emit_label (afterward);
2971 /* Don't allow jumping into a block that has a stack level.
2972 Cleanups are allowed, though. */
2973 if (dont_jump_in
2974 || thisblock->data.block.stack_level != 0)
2976 struct label_chain *chain;
2978 /* Any labels in this block are no longer valid to go to.
2979 Mark them to cause an error message. */
2980 for (chain = thisblock->data.block.label_chain; chain; chain = chain->next)
2982 DECL_TOO_LATE (chain->label) = 1;
2983 /* If any goto without a fixup came to this label,
2984 that must be an error, because gotos without fixups
2985 come from outside all saved stack-levels. */
2986 if (TREE_ADDRESSABLE (chain->label))
2987 error_with_decl (chain->label,
2988 "label `%s' used before containing binding contour");
2992 /* Restore stack level in effect before the block
2993 (only if variable-size objects allocated). */
2994 /* Perform any cleanups associated with the block. */
2996 if (thisblock->data.block.stack_level != 0
2997 || thisblock->data.block.cleanups != 0)
2999 /* Only clean up here if this point can actually be reached. */
3000 int reachable = GET_CODE (get_last_insn ()) != BARRIER;
3002 /* Don't let cleanups affect ({...}) constructs. */
3003 int old_expr_stmts_for_value = expr_stmts_for_value;
3004 rtx old_last_expr_value = last_expr_value;
3005 tree old_last_expr_type = last_expr_type;
3006 expr_stmts_for_value = 0;
3008 /* Do the cleanups. */
3009 expand_cleanups (thisblock->data.block.cleanups, NULL_TREE, 0, reachable);
3010 if (reachable)
3011 do_pending_stack_adjust ();
3013 expr_stmts_for_value = old_expr_stmts_for_value;
3014 last_expr_value = old_last_expr_value;
3015 last_expr_type = old_last_expr_type;
3017 /* Restore the stack level. */
3019 if (reachable && thisblock->data.block.stack_level != 0)
3021 emit_stack_restore (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3022 thisblock->data.block.stack_level, NULL_RTX);
3023 if (nonlocal_goto_handler_slot != 0)
3024 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level,
3025 NULL_RTX);
3028 /* Any gotos out of this block must also do these things.
3029 Also report any gotos with fixups that came to labels in this
3030 level. */
3031 fixup_gotos (thisblock,
3032 thisblock->data.block.stack_level,
3033 thisblock->data.block.cleanups,
3034 thisblock->data.block.first_insn,
3035 dont_jump_in);
3038 /* Mark the beginning and end of the scope if requested.
3039 We do this now, after running cleanups on the variables
3040 just going out of scope, so they are in scope for their cleanups. */
3042 if (mark_ends)
3043 last_block_end_note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_END);
3044 else
3045 /* Get rid of the beginning-mark if we don't make an end-mark. */
3046 NOTE_LINE_NUMBER (thisblock->data.block.first_insn) = NOTE_INSN_DELETED;
3048 /* If doing stupid register allocation, make sure lives of all
3049 register variables declared here extend thru end of scope. */
3051 if (obey_regdecls)
3052 for (decl = vars; decl; decl = TREE_CHAIN (decl))
3054 rtx rtl = DECL_RTL (decl);
3055 if (TREE_CODE (decl) == VAR_DECL && rtl != 0)
3056 use_variable (rtl);
3059 /* Restore the temporary level of TARGET_EXPRs. */
3060 target_temp_slot_level = thisblock->data.block.target_temp_slot_level;
3062 /* Restore block_stack level for containing block. */
3064 stack_block_stack = thisblock->data.block.innermost_stack_block;
3065 POPSTACK (block_stack);
3067 /* Pop the stack slot nesting and free any slots at this level. */
3068 pop_temp_slots ();
3073 /* Generate RTL for the automatic variable declaration DECL.
3074 (Other kinds of declarations are simply ignored if seen here.) */
3076 void
3077 expand_decl (decl)
3078 register tree decl;
3080 struct nesting *thisblock = block_stack;
3081 tree type;
3083 type = TREE_TYPE (decl);
3085 /* Only automatic variables need any expansion done.
3086 Static and external variables, and external functions,
3087 will be handled by `assemble_variable' (called from finish_decl).
3088 TYPE_DECL and CONST_DECL require nothing.
3089 PARM_DECLs are handled in `assign_parms'. */
3091 if (TREE_CODE (decl) != VAR_DECL)
3092 return;
3093 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
3094 return;
3096 /* Create the RTL representation for the variable. */
3098 if (type == error_mark_node)
3099 DECL_RTL (decl) = gen_rtx_MEM (BLKmode, const0_rtx);
3100 else if (DECL_SIZE (decl) == 0)
3101 /* Variable with incomplete type. */
3103 if (DECL_INITIAL (decl) == 0)
3104 /* Error message was already done; now avoid a crash. */
3105 DECL_RTL (decl) = assign_stack_temp (DECL_MODE (decl), 0, 1);
3106 else
3107 /* An initializer is going to decide the size of this array.
3108 Until we know the size, represent its address with a reg. */
3109 DECL_RTL (decl) = gen_rtx_MEM (BLKmode, gen_reg_rtx (Pmode));
3110 MEM_IN_STRUCT_P (DECL_RTL (decl)) = AGGREGATE_TYPE_P (type);
3112 else if (DECL_MODE (decl) != BLKmode
3113 /* If -ffloat-store, don't put explicit float vars
3114 into regs. */
3115 && !(flag_float_store
3116 && TREE_CODE (type) == REAL_TYPE)
3117 && ! TREE_THIS_VOLATILE (decl)
3118 && ! TREE_ADDRESSABLE (decl)
3119 && (DECL_REGISTER (decl) || ! obey_regdecls))
3121 /* Automatic variable that can go in a register. */
3122 int unsignedp = TREE_UNSIGNED (type);
3123 enum machine_mode reg_mode
3124 = promote_mode (type, DECL_MODE (decl), &unsignedp, 0);
3126 DECL_RTL (decl) = gen_reg_rtx (reg_mode);
3127 mark_user_reg (DECL_RTL (decl));
3129 if (TREE_CODE (type) == POINTER_TYPE)
3130 mark_reg_pointer (DECL_RTL (decl),
3131 (TYPE_ALIGN (TREE_TYPE (TREE_TYPE (decl)))
3132 / BITS_PER_UNIT));
3135 else if (TREE_CODE (DECL_SIZE (decl)) == INTEGER_CST
3136 && ! (flag_stack_check && ! STACK_CHECK_BUILTIN
3137 && (TREE_INT_CST_HIGH (DECL_SIZE (decl)) != 0
3138 || (TREE_INT_CST_LOW (DECL_SIZE (decl))
3139 > STACK_CHECK_MAX_VAR_SIZE * BITS_PER_UNIT))))
3141 /* Variable of fixed size that goes on the stack. */
3142 rtx oldaddr = 0;
3143 rtx addr;
3145 /* If we previously made RTL for this decl, it must be an array
3146 whose size was determined by the initializer.
3147 The old address was a register; set that register now
3148 to the proper address. */
3149 if (DECL_RTL (decl) != 0)
3151 if (GET_CODE (DECL_RTL (decl)) != MEM
3152 || GET_CODE (XEXP (DECL_RTL (decl), 0)) != REG)
3153 abort ();
3154 oldaddr = XEXP (DECL_RTL (decl), 0);
3157 DECL_RTL (decl)
3158 = assign_stack_temp (DECL_MODE (decl),
3159 ((TREE_INT_CST_LOW (DECL_SIZE (decl))
3160 + BITS_PER_UNIT - 1)
3161 / BITS_PER_UNIT),
3163 MEM_IN_STRUCT_P (DECL_RTL (decl)) = AGGREGATE_TYPE_P (TREE_TYPE (decl));
3165 /* Set alignment we actually gave this decl. */
3166 DECL_ALIGN (decl) = (DECL_MODE (decl) == BLKmode ? BIGGEST_ALIGNMENT
3167 : GET_MODE_BITSIZE (DECL_MODE (decl)));
3169 if (oldaddr)
3171 addr = force_operand (XEXP (DECL_RTL (decl), 0), oldaddr);
3172 if (addr != oldaddr)
3173 emit_move_insn (oldaddr, addr);
3176 /* If this is a memory ref that contains aggregate components,
3177 mark it as such for cse and loop optimize. */
3178 MEM_IN_STRUCT_P (DECL_RTL (decl)) = AGGREGATE_TYPE_P (TREE_TYPE (decl));
3179 #if 0
3180 /* If this is in memory because of -ffloat-store,
3181 set the volatile bit, to prevent optimizations from
3182 undoing the effects. */
3183 if (flag_float_store && TREE_CODE (type) == REAL_TYPE)
3184 MEM_VOLATILE_P (DECL_RTL (decl)) = 1;
3185 #endif
3187 else
3188 /* Dynamic-size object: must push space on the stack. */
3190 rtx address, size;
3192 /* Record the stack pointer on entry to block, if have
3193 not already done so. */
3194 if (thisblock->data.block.stack_level == 0)
3196 do_pending_stack_adjust ();
3197 emit_stack_save (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3198 &thisblock->data.block.stack_level,
3199 thisblock->data.block.first_insn);
3200 stack_block_stack = thisblock;
3203 /* Compute the variable's size, in bytes. */
3204 size = expand_expr (size_binop (CEIL_DIV_EXPR,
3205 DECL_SIZE (decl),
3206 size_int (BITS_PER_UNIT)),
3207 NULL_RTX, VOIDmode, 0);
3208 free_temp_slots ();
3210 /* Allocate space on the stack for the variable. Note that
3211 DECL_ALIGN says how the variable is to be aligned and we
3212 cannot use it to conclude anything about the alignment of
3213 the size. */
3214 address = allocate_dynamic_stack_space (size, NULL_RTX,
3215 TYPE_ALIGN (TREE_TYPE (decl)));
3217 /* Reference the variable indirect through that rtx. */
3218 DECL_RTL (decl) = gen_rtx_MEM (DECL_MODE (decl), address);
3220 /* If this is a memory ref that contains aggregate components,
3221 mark it as such for cse and loop optimize. */
3222 MEM_IN_STRUCT_P (DECL_RTL (decl)) = AGGREGATE_TYPE_P (TREE_TYPE (decl));
3224 /* Indicate the alignment we actually gave this variable. */
3225 #ifdef STACK_BOUNDARY
3226 DECL_ALIGN (decl) = STACK_BOUNDARY;
3227 #else
3228 DECL_ALIGN (decl) = BIGGEST_ALIGNMENT;
3229 #endif
3232 if (TREE_THIS_VOLATILE (decl))
3233 MEM_VOLATILE_P (DECL_RTL (decl)) = 1;
3234 #if 0 /* A variable is not necessarily unchanging
3235 just because it is const. RTX_UNCHANGING_P
3236 means no change in the function,
3237 not merely no change in the variable's scope.
3238 It is correct to set RTX_UNCHANGING_P if the variable's scope
3239 is the whole function. There's no convenient way to test that. */
3240 if (TREE_READONLY (decl))
3241 RTX_UNCHANGING_P (DECL_RTL (decl)) = 1;
3242 #endif
3244 /* If doing stupid register allocation, make sure life of any
3245 register variable starts here, at the start of its scope. */
3247 if (obey_regdecls)
3248 use_variable (DECL_RTL (decl));
3253 /* Emit code to perform the initialization of a declaration DECL. */
3255 void
3256 expand_decl_init (decl)
3257 tree decl;
3259 int was_used = TREE_USED (decl);
3261 /* If this is a CONST_DECL, we don't have to generate any code, but
3262 if DECL_INITIAL is a constant, call expand_expr to force TREE_CST_RTL
3263 to be set while in the obstack containing the constant. If we don't
3264 do this, we can lose if we have functions nested three deep and the middle
3265 function makes a CONST_DECL whose DECL_INITIAL is a STRING_CST while
3266 the innermost function is the first to expand that STRING_CST. */
3267 if (TREE_CODE (decl) == CONST_DECL)
3269 if (DECL_INITIAL (decl) && TREE_CONSTANT (DECL_INITIAL (decl)))
3270 expand_expr (DECL_INITIAL (decl), NULL_RTX, VOIDmode,
3271 EXPAND_INITIALIZER);
3272 return;
3275 if (TREE_STATIC (decl))
3276 return;
3278 /* Compute and store the initial value now. */
3280 if (DECL_INITIAL (decl) == error_mark_node)
3282 enum tree_code code = TREE_CODE (TREE_TYPE (decl));
3283 if (code == INTEGER_TYPE || code == REAL_TYPE || code == ENUMERAL_TYPE
3284 || code == POINTER_TYPE)
3285 expand_assignment (decl, convert (TREE_TYPE (decl), integer_zero_node),
3286 0, 0);
3287 emit_queue ();
3289 else if (DECL_INITIAL (decl) && TREE_CODE (DECL_INITIAL (decl)) != TREE_LIST)
3291 emit_line_note (DECL_SOURCE_FILE (decl), DECL_SOURCE_LINE (decl));
3292 expand_assignment (decl, DECL_INITIAL (decl), 0, 0);
3293 emit_queue ();
3296 /* Don't let the initialization count as "using" the variable. */
3297 TREE_USED (decl) = was_used;
3299 /* Free any temporaries we made while initializing the decl. */
3300 preserve_temp_slots (NULL_RTX);
3301 free_temp_slots ();
3304 /* CLEANUP is an expression to be executed at exit from this binding contour;
3305 for example, in C++, it might call the destructor for this variable.
3307 We wrap CLEANUP in an UNSAVE_EXPR node, so that we can expand the
3308 CLEANUP multiple times, and have the correct semantics. This
3309 happens in exception handling, for gotos, returns, breaks that
3310 leave the current scope.
3312 If CLEANUP is nonzero and DECL is zero, we record a cleanup
3313 that is not associated with any particular variable. */
3316 expand_decl_cleanup (decl, cleanup)
3317 tree decl, cleanup;
3319 struct nesting *thisblock = block_stack;
3321 /* Error if we are not in any block. */
3322 if (thisblock == 0)
3323 return 0;
3325 /* Record the cleanup if there is one. */
3327 if (cleanup != 0)
3329 tree t;
3330 rtx seq;
3331 tree *cleanups = &thisblock->data.block.cleanups;
3332 int cond_context = conditional_context ();
3334 if (cond_context)
3336 rtx flag = gen_reg_rtx (word_mode);
3337 rtx set_flag_0;
3338 tree cond;
3340 start_sequence ();
3341 emit_move_insn (flag, const0_rtx);
3342 set_flag_0 = get_insns ();
3343 end_sequence ();
3345 thisblock->data.block.last_unconditional_cleanup
3346 = emit_insns_after (set_flag_0,
3347 thisblock->data.block.last_unconditional_cleanup);
3349 emit_move_insn (flag, const1_rtx);
3351 /* All cleanups must be on the function_obstack. */
3352 push_obstacks_nochange ();
3353 resume_temporary_allocation ();
3355 cond = build_decl (VAR_DECL, NULL_TREE, type_for_mode (word_mode, 1));
3356 DECL_RTL (cond) = flag;
3358 /* Conditionalize the cleanup. */
3359 cleanup = build (COND_EXPR, void_type_node,
3360 truthvalue_conversion (cond),
3361 cleanup, integer_zero_node);
3362 cleanup = fold (cleanup);
3364 pop_obstacks ();
3366 cleanups = thisblock->data.block.cleanup_ptr;
3369 /* All cleanups must be on the function_obstack. */
3370 push_obstacks_nochange ();
3371 resume_temporary_allocation ();
3372 cleanup = unsave_expr (cleanup);
3373 pop_obstacks ();
3375 t = *cleanups = temp_tree_cons (decl, cleanup, *cleanups);
3377 if (! cond_context)
3378 /* If this block has a cleanup, it belongs in stack_block_stack. */
3379 stack_block_stack = thisblock;
3381 if (cond_context)
3383 start_sequence ();
3386 /* If this was optimized so that there is no exception region for the
3387 cleanup, then mark the TREE_LIST node, so that we can later tell
3388 if we need to call expand_eh_region_end. */
3389 if (! using_eh_for_cleanups_p
3390 || expand_eh_region_start_tree (decl, cleanup))
3391 TREE_ADDRESSABLE (t) = 1;
3392 /* If that started a new EH region, we're in a new block. */
3393 thisblock = block_stack;
3395 if (cond_context)
3397 seq = get_insns ();
3398 end_sequence ();
3399 if (seq)
3400 thisblock->data.block.last_unconditional_cleanup
3401 = emit_insns_after (seq,
3402 thisblock->data.block.last_unconditional_cleanup);
3404 else
3406 thisblock->data.block.last_unconditional_cleanup
3407 = get_last_insn ();
3408 thisblock->data.block.cleanup_ptr = &thisblock->data.block.cleanups;
3411 return 1;
3414 /* Like expand_decl_cleanup, but suppress generating an exception handler
3415 to perform the cleanup. */
3418 expand_decl_cleanup_no_eh (decl, cleanup)
3419 tree decl, cleanup;
3421 int save_eh = using_eh_for_cleanups_p;
3422 int result;
3424 using_eh_for_cleanups_p = 0;
3425 result = expand_decl_cleanup (decl, cleanup);
3426 using_eh_for_cleanups_p = save_eh;
3428 return result;
3431 /* Arrange for the top element of the dynamic cleanup chain to be
3432 popped if we exit the current binding contour. DECL is the
3433 associated declaration, if any, otherwise NULL_TREE. If the
3434 current contour is left via an exception, then __sjthrow will pop
3435 the top element off the dynamic cleanup chain. The code that
3436 avoids doing the action we push into the cleanup chain in the
3437 exceptional case is contained in expand_cleanups.
3439 This routine is only used by expand_eh_region_start, and that is
3440 the only way in which an exception region should be started. This
3441 routine is only used when using the setjmp/longjmp codegen method
3442 for exception handling. */
3445 expand_dcc_cleanup (decl)
3446 tree decl;
3448 struct nesting *thisblock = block_stack;
3449 tree cleanup;
3451 /* Error if we are not in any block. */
3452 if (thisblock == 0)
3453 return 0;
3455 /* Record the cleanup for the dynamic handler chain. */
3457 /* All cleanups must be on the function_obstack. */
3458 push_obstacks_nochange ();
3459 resume_temporary_allocation ();
3460 cleanup = make_node (POPDCC_EXPR);
3461 pop_obstacks ();
3463 /* Add the cleanup in a manner similar to expand_decl_cleanup. */
3464 thisblock->data.block.cleanups
3465 = temp_tree_cons (decl, cleanup, thisblock->data.block.cleanups);
3467 /* If this block has a cleanup, it belongs in stack_block_stack. */
3468 stack_block_stack = thisblock;
3469 return 1;
3472 /* Arrange for the top element of the dynamic handler chain to be
3473 popped if we exit the current binding contour. DECL is the
3474 associated declaration, if any, otherwise NULL_TREE. If the current
3475 contour is left via an exception, then __sjthrow will pop the top
3476 element off the dynamic handler chain. The code that avoids doing
3477 the action we push into the handler chain in the exceptional case
3478 is contained in expand_cleanups.
3480 This routine is only used by expand_eh_region_start, and that is
3481 the only way in which an exception region should be started. This
3482 routine is only used when using the setjmp/longjmp codegen method
3483 for exception handling. */
3486 expand_dhc_cleanup (decl)
3487 tree decl;
3489 struct nesting *thisblock = block_stack;
3490 tree cleanup;
3492 /* Error if we are not in any block. */
3493 if (thisblock == 0)
3494 return 0;
3496 /* Record the cleanup for the dynamic handler chain. */
3498 /* All cleanups must be on the function_obstack. */
3499 push_obstacks_nochange ();
3500 resume_temporary_allocation ();
3501 cleanup = make_node (POPDHC_EXPR);
3502 pop_obstacks ();
3504 /* Add the cleanup in a manner similar to expand_decl_cleanup. */
3505 thisblock->data.block.cleanups
3506 = temp_tree_cons (decl, cleanup, thisblock->data.block.cleanups);
3508 /* If this block has a cleanup, it belongs in stack_block_stack. */
3509 stack_block_stack = thisblock;
3510 return 1;
3513 /* DECL is an anonymous union. CLEANUP is a cleanup for DECL.
3514 DECL_ELTS is the list of elements that belong to DECL's type.
3515 In each, the TREE_VALUE is a VAR_DECL, and the TREE_PURPOSE a cleanup. */
3517 void
3518 expand_anon_union_decl (decl, cleanup, decl_elts)
3519 tree decl, cleanup, decl_elts;
3521 struct nesting *thisblock = block_stack;
3522 rtx x;
3524 expand_decl (decl);
3525 expand_decl_cleanup (decl, cleanup);
3526 x = DECL_RTL (decl);
3528 while (decl_elts)
3530 tree decl_elt = TREE_VALUE (decl_elts);
3531 tree cleanup_elt = TREE_PURPOSE (decl_elts);
3532 enum machine_mode mode = TYPE_MODE (TREE_TYPE (decl_elt));
3534 /* Propagate the union's alignment to the elements. */
3535 DECL_ALIGN (decl_elt) = DECL_ALIGN (decl);
3537 /* If the element has BLKmode and the union doesn't, the union is
3538 aligned such that the element doesn't need to have BLKmode, so
3539 change the element's mode to the appropriate one for its size. */
3540 if (mode == BLKmode && DECL_MODE (decl) != BLKmode)
3541 DECL_MODE (decl_elt) = mode
3542 = mode_for_size (TREE_INT_CST_LOW (DECL_SIZE (decl_elt)),
3543 MODE_INT, 1);
3545 /* (SUBREG (MEM ...)) at RTL generation time is invalid, so we
3546 instead create a new MEM rtx with the proper mode. */
3547 if (GET_CODE (x) == MEM)
3549 if (mode == GET_MODE (x))
3550 DECL_RTL (decl_elt) = x;
3551 else
3553 DECL_RTL (decl_elt) = gen_rtx_MEM (mode, copy_rtx (XEXP (x, 0)));
3554 MEM_IN_STRUCT_P (DECL_RTL (decl_elt)) = MEM_IN_STRUCT_P (x);
3555 RTX_UNCHANGING_P (DECL_RTL (decl_elt)) = RTX_UNCHANGING_P (x);
3558 else if (GET_CODE (x) == REG)
3560 if (mode == GET_MODE (x))
3561 DECL_RTL (decl_elt) = x;
3562 else
3563 DECL_RTL (decl_elt) = gen_rtx_SUBREG (mode, x, 0);
3565 else
3566 abort ();
3568 /* Record the cleanup if there is one. */
3570 if (cleanup != 0)
3571 thisblock->data.block.cleanups
3572 = temp_tree_cons (decl_elt, cleanup_elt,
3573 thisblock->data.block.cleanups);
3575 decl_elts = TREE_CHAIN (decl_elts);
3579 /* Expand a list of cleanups LIST.
3580 Elements may be expressions or may be nested lists.
3582 If DONT_DO is nonnull, then any list-element
3583 whose TREE_PURPOSE matches DONT_DO is omitted.
3584 This is sometimes used to avoid a cleanup associated with
3585 a value that is being returned out of the scope.
3587 If IN_FIXUP is non-zero, we are generating this cleanup for a fixup
3588 goto and handle protection regions specially in that case.
3590 If REACHABLE, we emit code, otherwise just inform the exception handling
3591 code about this finalization. */
3593 static void
3594 expand_cleanups (list, dont_do, in_fixup, reachable)
3595 tree list;
3596 tree dont_do;
3597 int in_fixup;
3598 int reachable;
3600 tree tail;
3601 for (tail = list; tail; tail = TREE_CHAIN (tail))
3602 if (dont_do == 0 || TREE_PURPOSE (tail) != dont_do)
3604 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3605 expand_cleanups (TREE_VALUE (tail), dont_do, in_fixup, reachable);
3606 else
3608 if (! in_fixup)
3610 tree cleanup = TREE_VALUE (tail);
3612 /* See expand_d{h,c}c_cleanup for why we avoid this. */
3613 if (TREE_CODE (cleanup) != POPDHC_EXPR
3614 && TREE_CODE (cleanup) != POPDCC_EXPR
3615 /* See expand_eh_region_start_tree for this case. */
3616 && ! TREE_ADDRESSABLE (tail))
3618 cleanup = protect_with_terminate (cleanup);
3619 expand_eh_region_end (cleanup);
3623 if (reachable)
3625 /* Cleanups may be run multiple times. For example,
3626 when exiting a binding contour, we expand the
3627 cleanups associated with that contour. When a goto
3628 within that binding contour has a target outside that
3629 contour, it will expand all cleanups from its scope to
3630 the target. Though the cleanups are expanded multiple
3631 times, the control paths are non-overlapping so the
3632 cleanups will not be executed twice. */
3634 /* We may need to protect fixups with rethrow regions. */
3635 int protect = (in_fixup && ! TREE_ADDRESSABLE (tail));
3636 if (protect)
3637 expand_fixup_region_start ();
3638 expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
3639 if (protect)
3640 expand_fixup_region_end (TREE_VALUE (tail));
3641 free_temp_slots ();
3647 /* Mark when the context we are emitting RTL for as a conditional
3648 context, so that any cleanup actions we register with
3649 expand_decl_init will be properly conditionalized when those
3650 cleanup actions are later performed. Must be called before any
3651 expression (tree) is expanded that is within a conditional context. */
3653 void
3654 start_cleanup_deferral ()
3656 /* block_stack can be NULL if we are inside the parameter list. It is
3657 OK to do nothing, because cleanups aren't possible here. */
3658 if (block_stack)
3659 ++block_stack->data.block.conditional_code;
3662 /* Mark the end of a conditional region of code. Because cleanup
3663 deferrals may be nested, we may still be in a conditional region
3664 after we end the currently deferred cleanups, only after we end all
3665 deferred cleanups, are we back in unconditional code. */
3667 void
3668 end_cleanup_deferral ()
3670 /* block_stack can be NULL if we are inside the parameter list. It is
3671 OK to do nothing, because cleanups aren't possible here. */
3672 if (block_stack)
3673 --block_stack->data.block.conditional_code;
3676 /* Move all cleanups from the current block_stack
3677 to the containing block_stack, where they are assumed to
3678 have been created. If anything can cause a temporary to
3679 be created, but not expanded for more than one level of
3680 block_stacks, then this code will have to change. */
3682 void
3683 move_cleanups_up ()
3685 struct nesting *block = block_stack;
3686 struct nesting *outer = block->next;
3688 outer->data.block.cleanups
3689 = chainon (block->data.block.cleanups,
3690 outer->data.block.cleanups);
3691 block->data.block.cleanups = 0;
3694 tree
3695 last_cleanup_this_contour ()
3697 if (block_stack == 0)
3698 return 0;
3700 return block_stack->data.block.cleanups;
3703 /* Return 1 if there are any pending cleanups at this point.
3704 If THIS_CONTOUR is nonzero, check the current contour as well.
3705 Otherwise, look only at the contours that enclose this one. */
3708 any_pending_cleanups (this_contour)
3709 int this_contour;
3711 struct nesting *block;
3713 if (block_stack == 0)
3714 return 0;
3716 if (this_contour && block_stack->data.block.cleanups != NULL)
3717 return 1;
3718 if (block_stack->data.block.cleanups == 0
3719 && block_stack->data.block.outer_cleanups == 0)
3720 return 0;
3722 for (block = block_stack->next; block; block = block->next)
3723 if (block->data.block.cleanups != 0)
3724 return 1;
3726 return 0;
3729 /* Enter a case (Pascal) or switch (C) statement.
3730 Push a block onto case_stack and nesting_stack
3731 to accumulate the case-labels that are seen
3732 and to record the labels generated for the statement.
3734 EXIT_FLAG is nonzero if `exit_something' should exit this case stmt.
3735 Otherwise, this construct is transparent for `exit_something'.
3737 EXPR is the index-expression to be dispatched on.
3738 TYPE is its nominal type. We could simply convert EXPR to this type,
3739 but instead we take short cuts. */
3741 void
3742 expand_start_case (exit_flag, expr, type, printname)
3743 int exit_flag;
3744 tree expr;
3745 tree type;
3746 char *printname;
3748 register struct nesting *thiscase = ALLOC_NESTING ();
3750 /* Make an entry on case_stack for the case we are entering. */
3752 thiscase->next = case_stack;
3753 thiscase->all = nesting_stack;
3754 thiscase->depth = ++nesting_depth;
3755 thiscase->exit_label = exit_flag ? gen_label_rtx () : 0;
3756 thiscase->data.case_stmt.case_list = 0;
3757 thiscase->data.case_stmt.index_expr = expr;
3758 thiscase->data.case_stmt.nominal_type = type;
3759 thiscase->data.case_stmt.default_label = 0;
3760 thiscase->data.case_stmt.num_ranges = 0;
3761 thiscase->data.case_stmt.printname = printname;
3762 thiscase->data.case_stmt.seenlabel = 0;
3763 case_stack = thiscase;
3764 nesting_stack = thiscase;
3766 do_pending_stack_adjust ();
3768 /* Make sure case_stmt.start points to something that won't
3769 need any transformation before expand_end_case. */
3770 if (GET_CODE (get_last_insn ()) != NOTE)
3771 emit_note (NULL_PTR, NOTE_INSN_DELETED);
3773 thiscase->data.case_stmt.start = get_last_insn ();
3775 start_cleanup_deferral ();
3779 /* Start a "dummy case statement" within which case labels are invalid
3780 and are not connected to any larger real case statement.
3781 This can be used if you don't want to let a case statement jump
3782 into the middle of certain kinds of constructs. */
3784 void
3785 expand_start_case_dummy ()
3787 register struct nesting *thiscase = ALLOC_NESTING ();
3789 /* Make an entry on case_stack for the dummy. */
3791 thiscase->next = case_stack;
3792 thiscase->all = nesting_stack;
3793 thiscase->depth = ++nesting_depth;
3794 thiscase->exit_label = 0;
3795 thiscase->data.case_stmt.case_list = 0;
3796 thiscase->data.case_stmt.start = 0;
3797 thiscase->data.case_stmt.nominal_type = 0;
3798 thiscase->data.case_stmt.default_label = 0;
3799 thiscase->data.case_stmt.num_ranges = 0;
3800 case_stack = thiscase;
3801 nesting_stack = thiscase;
3802 start_cleanup_deferral ();
3805 /* End a dummy case statement. */
3807 void
3808 expand_end_case_dummy ()
3810 end_cleanup_deferral ();
3811 POPSTACK (case_stack);
3814 /* Return the data type of the index-expression
3815 of the innermost case statement, or null if none. */
3817 tree
3818 case_index_expr_type ()
3820 if (case_stack)
3821 return TREE_TYPE (case_stack->data.case_stmt.index_expr);
3822 return 0;
3825 /* Accumulate one case or default label inside a case or switch statement.
3826 VALUE is the value of the case (a null pointer, for a default label).
3827 The function CONVERTER, when applied to arguments T and V,
3828 converts the value V to the type T.
3830 If not currently inside a case or switch statement, return 1 and do
3831 nothing. The caller will print a language-specific error message.
3832 If VALUE is a duplicate or overlaps, return 2 and do nothing
3833 except store the (first) duplicate node in *DUPLICATE.
3834 If VALUE is out of range, return 3 and do nothing.
3835 If we are jumping into the scope of a cleanup or var-sized array, return 5.
3836 Return 0 on success.
3838 Extended to handle range statements. */
3841 pushcase (value, converter, label, duplicate)
3842 register tree value;
3843 tree (*converter) PROTO((tree, tree));
3844 register tree label;
3845 tree *duplicate;
3847 tree index_type;
3848 tree nominal_type;
3850 /* Fail if not inside a real case statement. */
3851 if (! (case_stack && case_stack->data.case_stmt.start))
3852 return 1;
3854 if (stack_block_stack
3855 && stack_block_stack->depth > case_stack->depth)
3856 return 5;
3858 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
3859 nominal_type = case_stack->data.case_stmt.nominal_type;
3861 /* If the index is erroneous, avoid more problems: pretend to succeed. */
3862 if (index_type == error_mark_node)
3863 return 0;
3865 /* Convert VALUE to the type in which the comparisons are nominally done. */
3866 if (value != 0)
3867 value = (*converter) (nominal_type, value);
3869 /* If this is the first label, warn if any insns have been emitted. */
3870 if (case_stack->data.case_stmt.seenlabel == 0)
3872 rtx insn;
3873 for (insn = case_stack->data.case_stmt.start;
3874 insn;
3875 insn = NEXT_INSN (insn))
3877 if (GET_CODE (insn) == CODE_LABEL)
3878 break;
3879 if (GET_CODE (insn) != NOTE
3880 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn)) != USE))
3882 warning ("unreachable code at beginning of %s",
3883 case_stack->data.case_stmt.printname);
3884 break;
3888 case_stack->data.case_stmt.seenlabel = 1;
3890 /* Fail if this value is out of range for the actual type of the index
3891 (which may be narrower than NOMINAL_TYPE). */
3892 if (value != 0 && ! int_fits_type_p (value, index_type))
3893 return 3;
3895 /* Fail if this is a duplicate or overlaps another entry. */
3896 if (value == 0)
3898 if (case_stack->data.case_stmt.default_label != 0)
3900 *duplicate = case_stack->data.case_stmt.default_label;
3901 return 2;
3903 case_stack->data.case_stmt.default_label = label;
3905 else
3906 return add_case_node (value, value, label, duplicate);
3908 expand_label (label);
3909 return 0;
3912 /* Like pushcase but this case applies to all values between VALUE1 and
3913 VALUE2 (inclusive). If VALUE1 is NULL, the range starts at the lowest
3914 value of the index type and ends at VALUE2. If VALUE2 is NULL, the range
3915 starts at VALUE1 and ends at the highest value of the index type.
3916 If both are NULL, this case applies to all values.
3918 The return value is the same as that of pushcase but there is one
3919 additional error code: 4 means the specified range was empty. */
3922 pushcase_range (value1, value2, converter, label, duplicate)
3923 register tree value1, value2;
3924 tree (*converter) PROTO((tree, tree));
3925 register tree label;
3926 tree *duplicate;
3928 tree index_type;
3929 tree nominal_type;
3931 /* Fail if not inside a real case statement. */
3932 if (! (case_stack && case_stack->data.case_stmt.start))
3933 return 1;
3935 if (stack_block_stack
3936 && stack_block_stack->depth > case_stack->depth)
3937 return 5;
3939 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
3940 nominal_type = case_stack->data.case_stmt.nominal_type;
3942 /* If the index is erroneous, avoid more problems: pretend to succeed. */
3943 if (index_type == error_mark_node)
3944 return 0;
3946 /* If this is the first label, warn if any insns have been emitted. */
3947 if (case_stack->data.case_stmt.seenlabel == 0)
3949 rtx insn;
3950 for (insn = case_stack->data.case_stmt.start;
3951 insn;
3952 insn = NEXT_INSN (insn))
3954 if (GET_CODE (insn) == CODE_LABEL)
3955 break;
3956 if (GET_CODE (insn) != NOTE
3957 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn)) != USE))
3959 warning ("unreachable code at beginning of %s",
3960 case_stack->data.case_stmt.printname);
3961 break;
3965 case_stack->data.case_stmt.seenlabel = 1;
3967 /* Convert VALUEs to type in which the comparisons are nominally done
3968 and replace any unspecified value with the corresponding bound. */
3969 if (value1 == 0)
3970 value1 = TYPE_MIN_VALUE (index_type);
3971 if (value2 == 0)
3972 value2 = TYPE_MAX_VALUE (index_type);
3974 /* Fail if the range is empty. Do this before any conversion since
3975 we want to allow out-of-range empty ranges. */
3976 if (value2 && tree_int_cst_lt (value2, value1))
3977 return 4;
3979 value1 = (*converter) (nominal_type, value1);
3981 /* If the max was unbounded, use the max of the nominal_type we are
3982 converting to. Do this after the < check above to suppress false
3983 positives. */
3984 if (!value2)
3985 value2 = TYPE_MAX_VALUE (nominal_type);
3986 value2 = (*converter) (nominal_type, value2);
3988 /* Fail if these values are out of range. */
3989 if (TREE_CONSTANT_OVERFLOW (value1)
3990 || ! int_fits_type_p (value1, index_type))
3991 return 3;
3993 if (TREE_CONSTANT_OVERFLOW (value2)
3994 || ! int_fits_type_p (value2, index_type))
3995 return 3;
3997 return add_case_node (value1, value2, label, duplicate);
4000 /* Do the actual insertion of a case label for pushcase and pushcase_range
4001 into case_stack->data.case_stmt.case_list. Use an AVL tree to avoid
4002 slowdown for large switch statements. */
4004 static int
4005 add_case_node (low, high, label, duplicate)
4006 tree low, high;
4007 tree label;
4008 tree *duplicate;
4010 struct case_node *p, **q, *r;
4012 q = &case_stack->data.case_stmt.case_list;
4013 p = *q;
4015 while ((r = *q))
4017 p = r;
4019 /* Keep going past elements distinctly greater than HIGH. */
4020 if (tree_int_cst_lt (high, p->low))
4021 q = &p->left;
4023 /* or distinctly less than LOW. */
4024 else if (tree_int_cst_lt (p->high, low))
4025 q = &p->right;
4027 else
4029 /* We have an overlap; this is an error. */
4030 *duplicate = p->code_label;
4031 return 2;
4035 /* Add this label to the chain, and succeed.
4036 Copy LOW, HIGH so they are on temporary rather than momentary
4037 obstack and will thus survive till the end of the case statement. */
4039 r = (struct case_node *) oballoc (sizeof (struct case_node));
4040 r->low = copy_node (low);
4042 /* If the bounds are equal, turn this into the one-value case. */
4044 if (tree_int_cst_equal (low, high))
4045 r->high = r->low;
4046 else
4048 r->high = copy_node (high);
4049 case_stack->data.case_stmt.num_ranges++;
4052 r->code_label = label;
4053 expand_label (label);
4055 *q = r;
4056 r->parent = p;
4057 r->left = 0;
4058 r->right = 0;
4059 r->balance = 0;
4061 while (p)
4063 struct case_node *s;
4065 if (r == p->left)
4067 int b;
4069 if (! (b = p->balance))
4070 /* Growth propagation from left side. */
4071 p->balance = -1;
4072 else if (b < 0)
4074 if (r->balance < 0)
4076 /* R-Rotation */
4077 if (p->left = s = r->right)
4078 s->parent = p;
4080 r->right = p;
4081 p->balance = 0;
4082 r->balance = 0;
4083 s = p->parent;
4084 p->parent = r;
4086 if (r->parent = s)
4088 if (s->left == p)
4089 s->left = r;
4090 else
4091 s->right = r;
4093 else
4094 case_stack->data.case_stmt.case_list = r;
4096 else
4097 /* r->balance == +1 */
4099 /* LR-Rotation */
4101 int b2;
4102 struct case_node *t = r->right;
4104 if (p->left = s = t->right)
4105 s->parent = p;
4107 t->right = p;
4108 if (r->right = s = t->left)
4109 s->parent = r;
4111 t->left = r;
4112 b = t->balance;
4113 b2 = b < 0;
4114 p->balance = b2;
4115 b2 = -b2 - b;
4116 r->balance = b2;
4117 t->balance = 0;
4118 s = p->parent;
4119 p->parent = t;
4120 r->parent = t;
4122 if (t->parent = s)
4124 if (s->left == p)
4125 s->left = t;
4126 else
4127 s->right = t;
4129 else
4130 case_stack->data.case_stmt.case_list = t;
4132 break;
4135 else
4137 /* p->balance == +1; growth of left side balances the node. */
4138 p->balance = 0;
4139 break;
4142 else
4143 /* r == p->right */
4145 int b;
4147 if (! (b = p->balance))
4148 /* Growth propagation from right side. */
4149 p->balance++;
4150 else if (b > 0)
4152 if (r->balance > 0)
4154 /* L-Rotation */
4156 if (p->right = s = r->left)
4157 s->parent = p;
4159 r->left = p;
4160 p->balance = 0;
4161 r->balance = 0;
4162 s = p->parent;
4163 p->parent = r;
4164 if (r->parent = s)
4166 if (s->left == p)
4167 s->left = r;
4168 else
4169 s->right = r;
4172 else
4173 case_stack->data.case_stmt.case_list = r;
4176 else
4177 /* r->balance == -1 */
4179 /* RL-Rotation */
4180 int b2;
4181 struct case_node *t = r->left;
4183 if (p->right = s = t->left)
4184 s->parent = p;
4186 t->left = p;
4188 if (r->left = s = t->right)
4189 s->parent = r;
4191 t->right = r;
4192 b = t->balance;
4193 b2 = b < 0;
4194 r->balance = b2;
4195 b2 = -b2 - b;
4196 p->balance = b2;
4197 t->balance = 0;
4198 s = p->parent;
4199 p->parent = t;
4200 r->parent = t;
4202 if (t->parent = s)
4204 if (s->left == p)
4205 s->left = t;
4206 else
4207 s->right = t;
4210 else
4211 case_stack->data.case_stmt.case_list = t;
4213 break;
4215 else
4217 /* p->balance == -1; growth of right side balances the node. */
4218 p->balance = 0;
4219 break;
4223 r = p;
4224 p = p->parent;
4227 return 0;
4231 /* Returns the number of possible values of TYPE.
4232 Returns -1 if the number is unknown or variable.
4233 Returns -2 if the number does not fit in a HOST_WIDE_INT.
4234 Sets *SPARENESS to 2 if TYPE is an ENUMERAL_TYPE whose values
4235 do not increase monotonically (there may be duplicates);
4236 to 1 if the values increase monotonically, but not always by 1;
4237 otherwise sets it to 0. */
4239 HOST_WIDE_INT
4240 all_cases_count (type, spareness)
4241 tree type;
4242 int *spareness;
4244 HOST_WIDE_INT count;
4245 *spareness = 0;
4247 switch (TREE_CODE (type))
4249 tree t;
4250 case BOOLEAN_TYPE:
4251 count = 2;
4252 break;
4253 case CHAR_TYPE:
4254 count = 1 << BITS_PER_UNIT;
4255 break;
4256 default:
4257 case INTEGER_TYPE:
4258 if (TREE_CODE (TYPE_MIN_VALUE (type)) != INTEGER_CST
4259 || TYPE_MAX_VALUE (type) == NULL
4260 || TREE_CODE (TYPE_MAX_VALUE (type)) != INTEGER_CST)
4261 return -1;
4262 else
4264 /* count
4265 = TREE_INT_CST_LOW (TYPE_MAX_VALUE (type))
4266 - TREE_INT_CST_LOW (TYPE_MIN_VALUE (type)) + 1
4267 but with overflow checking. */
4268 tree mint = TYPE_MIN_VALUE (type);
4269 tree maxt = TYPE_MAX_VALUE (type);
4270 HOST_WIDE_INT lo, hi;
4271 neg_double(TREE_INT_CST_LOW (mint), TREE_INT_CST_HIGH (mint),
4272 &lo, &hi);
4273 add_double(TREE_INT_CST_LOW (maxt), TREE_INT_CST_HIGH (maxt),
4274 lo, hi, &lo, &hi);
4275 add_double (lo, hi, 1, 0, &lo, &hi);
4276 if (hi != 0 || lo < 0)
4277 return -2;
4278 count = lo;
4280 break;
4281 case ENUMERAL_TYPE:
4282 count = 0;
4283 for (t = TYPE_VALUES (type); t != NULL_TREE; t = TREE_CHAIN (t))
4285 if (TREE_CODE (TYPE_MIN_VALUE (type)) != INTEGER_CST
4286 || TREE_CODE (TREE_VALUE (t)) != INTEGER_CST
4287 || TREE_INT_CST_LOW (TYPE_MIN_VALUE (type)) + count
4288 != TREE_INT_CST_LOW (TREE_VALUE (t)))
4289 *spareness = 1;
4290 count++;
4292 if (*spareness == 1)
4294 tree prev = TREE_VALUE (TYPE_VALUES (type));
4295 for (t = TYPE_VALUES (type); t = TREE_CHAIN (t), t != NULL_TREE; )
4297 if (! tree_int_cst_lt (prev, TREE_VALUE (t)))
4299 *spareness = 2;
4300 break;
4302 prev = TREE_VALUE (t);
4307 return count;
4311 #define BITARRAY_TEST(ARRAY, INDEX) \
4312 ((ARRAY)[(unsigned) (INDEX) / HOST_BITS_PER_CHAR]\
4313 & (1 << ((unsigned) (INDEX) % HOST_BITS_PER_CHAR)))
4314 #define BITARRAY_SET(ARRAY, INDEX) \
4315 ((ARRAY)[(unsigned) (INDEX) / HOST_BITS_PER_CHAR]\
4316 |= 1 << ((unsigned) (INDEX) % HOST_BITS_PER_CHAR))
4318 /* Set the elements of the bitstring CASES_SEEN (which has length COUNT),
4319 with the case values we have seen, assuming the case expression
4320 has the given TYPE.
4321 SPARSENESS is as determined by all_cases_count.
4323 The time needed is proportional to COUNT, unless
4324 SPARSENESS is 2, in which case quadratic time is needed. */
4326 void
4327 mark_seen_cases (type, cases_seen, count, sparseness)
4328 tree type;
4329 unsigned char *cases_seen;
4330 long count;
4331 int sparseness;
4333 tree next_node_to_try = NULL_TREE;
4334 long next_node_offset = 0;
4336 register struct case_node *n, *root = case_stack->data.case_stmt.case_list;
4337 tree val = make_node (INTEGER_CST);
4338 TREE_TYPE (val) = type;
4339 if (! root)
4340 ; /* Do nothing */
4341 else if (sparseness == 2)
4343 tree t;
4344 HOST_WIDE_INT xlo;
4346 /* This less efficient loop is only needed to handle
4347 duplicate case values (multiple enum constants
4348 with the same value). */
4349 TREE_TYPE (val) = TREE_TYPE (root->low);
4350 for (t = TYPE_VALUES (type), xlo = 0; t != NULL_TREE;
4351 t = TREE_CHAIN (t), xlo++)
4353 TREE_INT_CST_LOW (val) = TREE_INT_CST_LOW (TREE_VALUE (t));
4354 TREE_INT_CST_HIGH (val) = TREE_INT_CST_HIGH (TREE_VALUE (t));
4355 n = root;
4358 /* Keep going past elements distinctly greater than VAL. */
4359 if (tree_int_cst_lt (val, n->low))
4360 n = n->left;
4362 /* or distinctly less than VAL. */
4363 else if (tree_int_cst_lt (n->high, val))
4364 n = n->right;
4366 else
4368 /* We have found a matching range. */
4369 BITARRAY_SET (cases_seen, xlo);
4370 break;
4373 while (n);
4376 else
4378 if (root->left)
4379 case_stack->data.case_stmt.case_list = root = case_tree2list (root, 0);
4380 for (n = root; n; n = n->right)
4382 TREE_INT_CST_LOW (val) = TREE_INT_CST_LOW (n->low);
4383 TREE_INT_CST_HIGH (val) = TREE_INT_CST_HIGH (n->low);
4384 while ( ! tree_int_cst_lt (n->high, val))
4386 /* Calculate (into xlo) the "offset" of the integer (val).
4387 The element with lowest value has offset 0, the next smallest
4388 element has offset 1, etc. */
4390 HOST_WIDE_INT xlo, xhi;
4391 tree t;
4392 if (sparseness && TYPE_VALUES (type) != NULL_TREE)
4394 /* The TYPE_VALUES will be in increasing order, so
4395 starting searching where we last ended. */
4396 t = next_node_to_try;
4397 xlo = next_node_offset;
4398 xhi = 0;
4399 for (;;)
4401 if (t == NULL_TREE)
4403 t = TYPE_VALUES (type);
4404 xlo = 0;
4406 if (tree_int_cst_equal (val, TREE_VALUE (t)))
4408 next_node_to_try = TREE_CHAIN (t);
4409 next_node_offset = xlo + 1;
4410 break;
4412 xlo++;
4413 t = TREE_CHAIN (t);
4414 if (t == next_node_to_try)
4416 xlo = -1;
4417 break;
4421 else
4423 t = TYPE_MIN_VALUE (type);
4424 if (t)
4425 neg_double (TREE_INT_CST_LOW (t), TREE_INT_CST_HIGH (t),
4426 &xlo, &xhi);
4427 else
4428 xlo = xhi = 0;
4429 add_double (xlo, xhi,
4430 TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
4431 &xlo, &xhi);
4434 if (xhi == 0 && xlo >= 0 && xlo < count)
4435 BITARRAY_SET (cases_seen, xlo);
4436 add_double (TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
4437 1, 0,
4438 &TREE_INT_CST_LOW (val), &TREE_INT_CST_HIGH (val));
4444 /* Called when the index of a switch statement is an enumerated type
4445 and there is no default label.
4447 Checks that all enumeration literals are covered by the case
4448 expressions of a switch. Also, warn if there are any extra
4449 switch cases that are *not* elements of the enumerated type.
4451 If all enumeration literals were covered by the case expressions,
4452 turn one of the expressions into the default expression since it should
4453 not be possible to fall through such a switch. */
4455 void
4456 check_for_full_enumeration_handling (type)
4457 tree type;
4459 register struct case_node *n;
4460 register tree chain;
4461 #if 0 /* variable used by 'if 0'ed code below. */
4462 register struct case_node **l;
4463 int all_values = 1;
4464 #endif
4466 /* True iff the selector type is a numbered set mode. */
4467 int sparseness = 0;
4469 /* The number of possible selector values. */
4470 HOST_WIDE_INT size;
4472 /* For each possible selector value. a one iff it has been matched
4473 by a case value alternative. */
4474 unsigned char *cases_seen;
4476 /* The allocated size of cases_seen, in chars. */
4477 long bytes_needed;
4479 if (! warn_switch)
4480 return;
4482 size = all_cases_count (type, &sparseness);
4483 bytes_needed = (size + HOST_BITS_PER_CHAR) / HOST_BITS_PER_CHAR;
4485 if (size > 0 && size < 600000
4486 /* We deliberately use malloc here - not xmalloc. */
4487 && (cases_seen = (unsigned char *) malloc (bytes_needed)) != NULL)
4489 long i;
4490 tree v = TYPE_VALUES (type);
4491 bzero (cases_seen, bytes_needed);
4493 /* The time complexity of this code is normally O(N), where
4494 N being the number of members in the enumerated type.
4495 However, if type is a ENUMERAL_TYPE whose values do not
4496 increase monotonically, O(N*log(N)) time may be needed. */
4498 mark_seen_cases (type, cases_seen, size, sparseness);
4500 for (i = 0; v != NULL_TREE && i < size; i++, v = TREE_CHAIN (v))
4502 if (BITARRAY_TEST(cases_seen, i) == 0)
4503 warning ("enumeration value `%s' not handled in switch",
4504 IDENTIFIER_POINTER (TREE_PURPOSE (v)));
4507 free (cases_seen);
4510 /* Now we go the other way around; we warn if there are case
4511 expressions that don't correspond to enumerators. This can
4512 occur since C and C++ don't enforce type-checking of
4513 assignments to enumeration variables. */
4515 if (case_stack->data.case_stmt.case_list
4516 && case_stack->data.case_stmt.case_list->left)
4517 case_stack->data.case_stmt.case_list
4518 = case_tree2list (case_stack->data.case_stmt.case_list, 0);
4519 if (warn_switch)
4520 for (n = case_stack->data.case_stmt.case_list; n; n = n->right)
4522 for (chain = TYPE_VALUES (type);
4523 chain && !tree_int_cst_equal (n->low, TREE_VALUE (chain));
4524 chain = TREE_CHAIN (chain))
4527 if (!chain)
4529 if (TYPE_NAME (type) == 0)
4530 warning ("case value `%d' not in enumerated type",
4531 TREE_INT_CST_LOW (n->low));
4532 else
4533 warning ("case value `%d' not in enumerated type `%s'",
4534 TREE_INT_CST_LOW (n->low),
4535 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
4536 == IDENTIFIER_NODE)
4537 ? TYPE_NAME (type)
4538 : DECL_NAME (TYPE_NAME (type))));
4540 if (!tree_int_cst_equal (n->low, n->high))
4542 for (chain = TYPE_VALUES (type);
4543 chain && !tree_int_cst_equal (n->high, TREE_VALUE (chain));
4544 chain = TREE_CHAIN (chain))
4547 if (!chain)
4549 if (TYPE_NAME (type) == 0)
4550 warning ("case value `%d' not in enumerated type",
4551 TREE_INT_CST_LOW (n->high));
4552 else
4553 warning ("case value `%d' not in enumerated type `%s'",
4554 TREE_INT_CST_LOW (n->high),
4555 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
4556 == IDENTIFIER_NODE)
4557 ? TYPE_NAME (type)
4558 : DECL_NAME (TYPE_NAME (type))));
4563 #if 0
4564 /* ??? This optimization is disabled because it causes valid programs to
4565 fail. ANSI C does not guarantee that an expression with enum type
4566 will have a value that is the same as one of the enumeration literals. */
4568 /* If all values were found as case labels, make one of them the default
4569 label. Thus, this switch will never fall through. We arbitrarily pick
4570 the last one to make the default since this is likely the most
4571 efficient choice. */
4573 if (all_values)
4575 for (l = &case_stack->data.case_stmt.case_list;
4576 (*l)->right != 0;
4577 l = &(*l)->right)
4580 case_stack->data.case_stmt.default_label = (*l)->code_label;
4581 *l = 0;
4583 #endif /* 0 */
4587 /* Terminate a case (Pascal) or switch (C) statement
4588 in which ORIG_INDEX is the expression to be tested.
4589 Generate the code to test it and jump to the right place. */
4591 void
4592 expand_end_case (orig_index)
4593 tree orig_index;
4595 tree minval, maxval, range, orig_minval;
4596 rtx default_label = 0;
4597 register struct case_node *n;
4598 int count;
4599 rtx index;
4600 rtx table_label;
4601 int ncases;
4602 rtx *labelvec;
4603 register int i;
4604 rtx before_case;
4605 register struct nesting *thiscase = case_stack;
4606 tree index_expr, index_type;
4607 int unsignedp;
4609 table_label = gen_label_rtx ();
4610 index_expr = thiscase->data.case_stmt.index_expr;
4611 index_type = TREE_TYPE (index_expr);
4612 unsignedp = TREE_UNSIGNED (index_type);
4614 do_pending_stack_adjust ();
4616 /* An ERROR_MARK occurs for various reasons including invalid data type. */
4617 if (index_type != error_mark_node)
4619 /* If switch expression was an enumerated type, check that all
4620 enumeration literals are covered by the cases.
4621 No sense trying this if there's a default case, however. */
4623 if (!thiscase->data.case_stmt.default_label
4624 && TREE_CODE (TREE_TYPE (orig_index)) == ENUMERAL_TYPE
4625 && TREE_CODE (index_expr) != INTEGER_CST)
4626 check_for_full_enumeration_handling (TREE_TYPE (orig_index));
4628 /* If this is the first label, warn if any insns have been emitted. */
4629 if (thiscase->data.case_stmt.seenlabel == 0)
4631 rtx insn;
4632 for (insn = get_last_insn ();
4633 insn != case_stack->data.case_stmt.start;
4634 insn = PREV_INSN (insn))
4635 if (GET_CODE (insn) != NOTE
4636 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn))!= USE))
4638 warning ("unreachable code at beginning of %s",
4639 case_stack->data.case_stmt.printname);
4640 break;
4644 /* If we don't have a default-label, create one here,
4645 after the body of the switch. */
4646 if (thiscase->data.case_stmt.default_label == 0)
4648 thiscase->data.case_stmt.default_label
4649 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
4650 expand_label (thiscase->data.case_stmt.default_label);
4652 default_label = label_rtx (thiscase->data.case_stmt.default_label);
4654 before_case = get_last_insn ();
4656 if (thiscase->data.case_stmt.case_list
4657 && thiscase->data.case_stmt.case_list->left)
4658 thiscase->data.case_stmt.case_list
4659 = case_tree2list(thiscase->data.case_stmt.case_list, 0);
4661 /* Simplify the case-list before we count it. */
4662 group_case_nodes (thiscase->data.case_stmt.case_list);
4664 /* Get upper and lower bounds of case values.
4665 Also convert all the case values to the index expr's data type. */
4667 count = 0;
4668 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
4670 /* Check low and high label values are integers. */
4671 if (TREE_CODE (n->low) != INTEGER_CST)
4672 abort ();
4673 if (TREE_CODE (n->high) != INTEGER_CST)
4674 abort ();
4676 n->low = convert (index_type, n->low);
4677 n->high = convert (index_type, n->high);
4679 /* Count the elements and track the largest and smallest
4680 of them (treating them as signed even if they are not). */
4681 if (count++ == 0)
4683 minval = n->low;
4684 maxval = n->high;
4686 else
4688 if (INT_CST_LT (n->low, minval))
4689 minval = n->low;
4690 if (INT_CST_LT (maxval, n->high))
4691 maxval = n->high;
4693 /* A range counts double, since it requires two compares. */
4694 if (! tree_int_cst_equal (n->low, n->high))
4695 count++;
4698 orig_minval = minval;
4700 /* Compute span of values. */
4701 if (count != 0)
4702 range = fold (build (MINUS_EXPR, index_type, maxval, minval));
4704 end_cleanup_deferral ();
4706 if (count == 0)
4708 expand_expr (index_expr, const0_rtx, VOIDmode, 0);
4709 emit_queue ();
4710 emit_jump (default_label);
4713 /* If range of values is much bigger than number of values,
4714 make a sequence of conditional branches instead of a dispatch.
4715 If the switch-index is a constant, do it this way
4716 because we can optimize it. */
4718 #ifndef CASE_VALUES_THRESHOLD
4719 #ifdef HAVE_casesi
4720 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
4721 #else
4722 /* If machine does not have a case insn that compares the
4723 bounds, this means extra overhead for dispatch tables
4724 which raises the threshold for using them. */
4725 #define CASE_VALUES_THRESHOLD 5
4726 #endif /* HAVE_casesi */
4727 #endif /* CASE_VALUES_THRESHOLD */
4729 else if (TREE_INT_CST_HIGH (range) != 0
4730 || count < CASE_VALUES_THRESHOLD
4731 || ((unsigned HOST_WIDE_INT) (TREE_INT_CST_LOW (range))
4732 > 10 * count)
4733 #ifndef ASM_OUTPUT_ADDR_DIFF_ELT
4734 || flag_pic
4735 #endif
4736 || TREE_CODE (index_expr) == INTEGER_CST
4737 /* These will reduce to a constant. */
4738 || (TREE_CODE (index_expr) == CALL_EXPR
4739 && TREE_CODE (TREE_OPERAND (index_expr, 0)) == ADDR_EXPR
4740 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == FUNCTION_DECL
4741 && DECL_FUNCTION_CODE (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == BUILT_IN_CLASSIFY_TYPE)
4742 || (TREE_CODE (index_expr) == COMPOUND_EXPR
4743 && TREE_CODE (TREE_OPERAND (index_expr, 1)) == INTEGER_CST))
4745 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
4747 /* If the index is a short or char that we do not have
4748 an insn to handle comparisons directly, convert it to
4749 a full integer now, rather than letting each comparison
4750 generate the conversion. */
4752 if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT
4753 && (cmp_optab->handlers[(int) GET_MODE(index)].insn_code
4754 == CODE_FOR_nothing))
4756 enum machine_mode wider_mode;
4757 for (wider_mode = GET_MODE (index); wider_mode != VOIDmode;
4758 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
4759 if (cmp_optab->handlers[(int) wider_mode].insn_code
4760 != CODE_FOR_nothing)
4762 index = convert_to_mode (wider_mode, index, unsignedp);
4763 break;
4767 emit_queue ();
4768 do_pending_stack_adjust ();
4770 index = protect_from_queue (index, 0);
4771 if (GET_CODE (index) == MEM)
4772 index = copy_to_reg (index);
4773 if (GET_CODE (index) == CONST_INT
4774 || TREE_CODE (index_expr) == INTEGER_CST)
4776 /* Make a tree node with the proper constant value
4777 if we don't already have one. */
4778 if (TREE_CODE (index_expr) != INTEGER_CST)
4780 index_expr
4781 = build_int_2 (INTVAL (index),
4782 unsignedp || INTVAL (index) >= 0 ? 0 : -1);
4783 index_expr = convert (index_type, index_expr);
4786 /* For constant index expressions we need only
4787 issue a unconditional branch to the appropriate
4788 target code. The job of removing any unreachable
4789 code is left to the optimisation phase if the
4790 "-O" option is specified. */
4791 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
4792 if (! tree_int_cst_lt (index_expr, n->low)
4793 && ! tree_int_cst_lt (n->high, index_expr))
4794 break;
4796 if (n)
4797 emit_jump (label_rtx (n->code_label));
4798 else
4799 emit_jump (default_label);
4801 else
4803 /* If the index expression is not constant we generate
4804 a binary decision tree to select the appropriate
4805 target code. This is done as follows:
4807 The list of cases is rearranged into a binary tree,
4808 nearly optimal assuming equal probability for each case.
4810 The tree is transformed into RTL, eliminating
4811 redundant test conditions at the same time.
4813 If program flow could reach the end of the
4814 decision tree an unconditional jump to the
4815 default code is emitted. */
4817 use_cost_table
4818 = (TREE_CODE (TREE_TYPE (orig_index)) != ENUMERAL_TYPE
4819 && estimate_case_costs (thiscase->data.case_stmt.case_list));
4820 balance_case_nodes (&thiscase->data.case_stmt.case_list,
4821 NULL_PTR);
4822 emit_case_nodes (index, thiscase->data.case_stmt.case_list,
4823 default_label, index_type);
4824 emit_jump_if_reachable (default_label);
4827 else
4829 int win = 0;
4830 #ifdef HAVE_casesi
4831 if (HAVE_casesi)
4833 enum machine_mode index_mode = SImode;
4834 int index_bits = GET_MODE_BITSIZE (index_mode);
4835 rtx op1, op2;
4836 enum machine_mode op_mode;
4838 /* Convert the index to SImode. */
4839 if (GET_MODE_BITSIZE (TYPE_MODE (index_type))
4840 > GET_MODE_BITSIZE (index_mode))
4842 enum machine_mode omode = TYPE_MODE (index_type);
4843 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
4845 /* We must handle the endpoints in the original mode. */
4846 index_expr = build (MINUS_EXPR, index_type,
4847 index_expr, minval);
4848 minval = integer_zero_node;
4849 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
4850 emit_cmp_insn (rangertx, index, LTU, NULL_RTX, omode, 1, 0);
4851 emit_jump_insn (gen_bltu (default_label));
4852 /* Now we can safely truncate. */
4853 index = convert_to_mode (index_mode, index, 0);
4855 else
4857 if (TYPE_MODE (index_type) != index_mode)
4859 index_expr = convert (type_for_size (index_bits, 0),
4860 index_expr);
4861 index_type = TREE_TYPE (index_expr);
4864 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
4866 emit_queue ();
4867 index = protect_from_queue (index, 0);
4868 do_pending_stack_adjust ();
4870 op_mode = insn_operand_mode[(int)CODE_FOR_casesi][0];
4871 if (! (*insn_operand_predicate[(int)CODE_FOR_casesi][0])
4872 (index, op_mode))
4873 index = copy_to_mode_reg (op_mode, index);
4875 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
4877 op_mode = insn_operand_mode[(int)CODE_FOR_casesi][1];
4878 if (! (*insn_operand_predicate[(int)CODE_FOR_casesi][1])
4879 (op1, op_mode))
4880 op1 = copy_to_mode_reg (op_mode, op1);
4882 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
4884 op_mode = insn_operand_mode[(int)CODE_FOR_casesi][2];
4885 if (! (*insn_operand_predicate[(int)CODE_FOR_casesi][2])
4886 (op2, op_mode))
4887 op2 = copy_to_mode_reg (op_mode, op2);
4889 emit_jump_insn (gen_casesi (index, op1, op2,
4890 table_label, default_label));
4891 win = 1;
4893 #endif
4894 #ifdef HAVE_tablejump
4895 if (! win && HAVE_tablejump)
4897 index_expr = convert (thiscase->data.case_stmt.nominal_type,
4898 fold (build (MINUS_EXPR, index_type,
4899 index_expr, minval)));
4900 index_type = TREE_TYPE (index_expr);
4901 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
4902 emit_queue ();
4903 index = protect_from_queue (index, 0);
4904 do_pending_stack_adjust ();
4906 do_tablejump (index, TYPE_MODE (index_type),
4907 expand_expr (range, NULL_RTX, VOIDmode, 0),
4908 table_label, default_label);
4909 win = 1;
4911 #endif
4912 if (! win)
4913 abort ();
4915 /* Get table of labels to jump to, in order of case index. */
4917 ncases = TREE_INT_CST_LOW (range) + 1;
4918 labelvec = (rtx *) alloca (ncases * sizeof (rtx));
4919 bzero ((char *) labelvec, ncases * sizeof (rtx));
4921 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
4923 register HOST_WIDE_INT i
4924 = TREE_INT_CST_LOW (n->low) - TREE_INT_CST_LOW (orig_minval);
4926 while (1)
4928 labelvec[i]
4929 = gen_rtx_LABEL_REF (Pmode, label_rtx (n->code_label));
4930 if (i + TREE_INT_CST_LOW (orig_minval)
4931 == TREE_INT_CST_LOW (n->high))
4932 break;
4933 i++;
4937 /* Fill in the gaps with the default. */
4938 for (i = 0; i < ncases; i++)
4939 if (labelvec[i] == 0)
4940 labelvec[i] = gen_rtx_LABEL_REF (Pmode, default_label);
4942 /* Output the table */
4943 emit_label (table_label);
4945 if (CASE_VECTOR_PC_RELATIVE || flag_pic)
4946 emit_jump_insn (gen_rtx_ADDR_DIFF_VEC (CASE_VECTOR_MODE,
4947 gen_rtx_LABEL_REF (Pmode, table_label),
4948 gen_rtvec_v (ncases, labelvec)));
4949 else
4950 emit_jump_insn (gen_rtx_ADDR_VEC (CASE_VECTOR_MODE,
4951 gen_rtvec_v (ncases, labelvec)));
4953 /* If the case insn drops through the table,
4954 after the table we must jump to the default-label.
4955 Otherwise record no drop-through after the table. */
4956 #ifdef CASE_DROPS_THROUGH
4957 emit_jump (default_label);
4958 #else
4959 emit_barrier ();
4960 #endif
4963 before_case = squeeze_notes (NEXT_INSN (before_case), get_last_insn ());
4964 reorder_insns (before_case, get_last_insn (),
4965 thiscase->data.case_stmt.start);
4967 else
4968 end_cleanup_deferral ();
4970 if (thiscase->exit_label)
4971 emit_label (thiscase->exit_label);
4973 POPSTACK (case_stack);
4975 free_temp_slots ();
4978 /* Convert the tree NODE into a list linked by the right field, with the left
4979 field zeroed. RIGHT is used for recursion; it is a list to be placed
4980 rightmost in the resulting list. */
4982 static struct case_node *
4983 case_tree2list (node, right)
4984 struct case_node *node, *right;
4986 struct case_node *left;
4988 if (node->right)
4989 right = case_tree2list (node->right, right);
4991 node->right = right;
4992 if (left = node->left)
4994 node->left = 0;
4995 return case_tree2list (left, node);
4998 return node;
5001 /* Generate code to jump to LABEL if OP1 and OP2 are equal. */
5003 static void
5004 do_jump_if_equal (op1, op2, label, unsignedp)
5005 rtx op1, op2, label;
5006 int unsignedp;
5008 if (GET_CODE (op1) == CONST_INT
5009 && GET_CODE (op2) == CONST_INT)
5011 if (INTVAL (op1) == INTVAL (op2))
5012 emit_jump (label);
5014 else
5016 enum machine_mode mode = GET_MODE (op1);
5017 if (mode == VOIDmode)
5018 mode = GET_MODE (op2);
5019 emit_cmp_insn (op1, op2, EQ, NULL_RTX, mode, unsignedp, 0);
5020 emit_jump_insn (gen_beq (label));
5024 /* Not all case values are encountered equally. This function
5025 uses a heuristic to weight case labels, in cases where that
5026 looks like a reasonable thing to do.
5028 Right now, all we try to guess is text, and we establish the
5029 following weights:
5031 chars above space: 16
5032 digits: 16
5033 default: 12
5034 space, punct: 8
5035 tab: 4
5036 newline: 2
5037 other "\" chars: 1
5038 remaining chars: 0
5040 If we find any cases in the switch that are not either -1 or in the range
5041 of valid ASCII characters, or are control characters other than those
5042 commonly used with "\", don't treat this switch scanning text.
5044 Return 1 if these nodes are suitable for cost estimation, otherwise
5045 return 0. */
5047 static int
5048 estimate_case_costs (node)
5049 case_node_ptr node;
5051 tree min_ascii = build_int_2 (-1, -1);
5052 tree max_ascii = convert (TREE_TYPE (node->high), build_int_2 (127, 0));
5053 case_node_ptr n;
5054 int i;
5056 /* If we haven't already made the cost table, make it now. Note that the
5057 lower bound of the table is -1, not zero. */
5059 if (cost_table == NULL)
5061 cost_table = ((short *) xmalloc (129 * sizeof (short))) + 1;
5062 bzero ((char *) (cost_table - 1), 129 * sizeof (short));
5064 for (i = 0; i < 128; i++)
5066 if (isalnum (i))
5067 cost_table[i] = 16;
5068 else if (ispunct (i))
5069 cost_table[i] = 8;
5070 else if (iscntrl (i))
5071 cost_table[i] = -1;
5074 cost_table[' '] = 8;
5075 cost_table['\t'] = 4;
5076 cost_table['\0'] = 4;
5077 cost_table['\n'] = 2;
5078 cost_table['\f'] = 1;
5079 cost_table['\v'] = 1;
5080 cost_table['\b'] = 1;
5083 /* See if all the case expressions look like text. It is text if the
5084 constant is >= -1 and the highest constant is <= 127. Do all comparisons
5085 as signed arithmetic since we don't want to ever access cost_table with a
5086 value less than -1. Also check that none of the constants in a range
5087 are strange control characters. */
5089 for (n = node; n; n = n->right)
5091 if ((INT_CST_LT (n->low, min_ascii)) || INT_CST_LT (max_ascii, n->high))
5092 return 0;
5094 for (i = TREE_INT_CST_LOW (n->low); i <= TREE_INT_CST_LOW (n->high); i++)
5095 if (cost_table[i] < 0)
5096 return 0;
5099 /* All interesting values are within the range of interesting
5100 ASCII characters. */
5101 return 1;
5104 /* Scan an ordered list of case nodes
5105 combining those with consecutive values or ranges.
5107 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
5109 static void
5110 group_case_nodes (head)
5111 case_node_ptr head;
5113 case_node_ptr node = head;
5115 while (node)
5117 rtx lb = next_real_insn (label_rtx (node->code_label));
5118 rtx lb2;
5119 case_node_ptr np = node;
5121 /* Try to group the successors of NODE with NODE. */
5122 while (((np = np->right) != 0)
5123 /* Do they jump to the same place? */
5124 && ((lb2 = next_real_insn (label_rtx (np->code_label))) == lb
5125 || (lb != 0 && lb2 != 0
5126 && simplejump_p (lb)
5127 && simplejump_p (lb2)
5128 && rtx_equal_p (SET_SRC (PATTERN (lb)),
5129 SET_SRC (PATTERN (lb2)))))
5130 /* Are their ranges consecutive? */
5131 && tree_int_cst_equal (np->low,
5132 fold (build (PLUS_EXPR,
5133 TREE_TYPE (node->high),
5134 node->high,
5135 integer_one_node)))
5136 /* An overflow is not consecutive. */
5137 && tree_int_cst_lt (node->high,
5138 fold (build (PLUS_EXPR,
5139 TREE_TYPE (node->high),
5140 node->high,
5141 integer_one_node))))
5143 node->high = np->high;
5145 /* NP is the first node after NODE which can't be grouped with it.
5146 Delete the nodes in between, and move on to that node. */
5147 node->right = np;
5148 node = np;
5152 /* Take an ordered list of case nodes
5153 and transform them into a near optimal binary tree,
5154 on the assumption that any target code selection value is as
5155 likely as any other.
5157 The transformation is performed by splitting the ordered
5158 list into two equal sections plus a pivot. The parts are
5159 then attached to the pivot as left and right branches. Each
5160 branch is is then transformed recursively. */
5162 static void
5163 balance_case_nodes (head, parent)
5164 case_node_ptr *head;
5165 case_node_ptr parent;
5167 register case_node_ptr np;
5169 np = *head;
5170 if (np)
5172 int cost = 0;
5173 int i = 0;
5174 int ranges = 0;
5175 register case_node_ptr *npp;
5176 case_node_ptr left;
5178 /* Count the number of entries on branch. Also count the ranges. */
5180 while (np)
5182 if (!tree_int_cst_equal (np->low, np->high))
5184 ranges++;
5185 if (use_cost_table)
5186 cost += cost_table[TREE_INT_CST_LOW (np->high)];
5189 if (use_cost_table)
5190 cost += cost_table[TREE_INT_CST_LOW (np->low)];
5192 i++;
5193 np = np->right;
5196 if (i > 2)
5198 /* Split this list if it is long enough for that to help. */
5199 npp = head;
5200 left = *npp;
5201 if (use_cost_table)
5203 /* Find the place in the list that bisects the list's total cost,
5204 Here I gets half the total cost. */
5205 int n_moved = 0;
5206 i = (cost + 1) / 2;
5207 while (1)
5209 /* Skip nodes while their cost does not reach that amount. */
5210 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5211 i -= cost_table[TREE_INT_CST_LOW ((*npp)->high)];
5212 i -= cost_table[TREE_INT_CST_LOW ((*npp)->low)];
5213 if (i <= 0)
5214 break;
5215 npp = &(*npp)->right;
5216 n_moved += 1;
5218 if (n_moved == 0)
5220 /* Leave this branch lopsided, but optimize left-hand
5221 side and fill in `parent' fields for right-hand side. */
5222 np = *head;
5223 np->parent = parent;
5224 balance_case_nodes (&np->left, np);
5225 for (; np->right; np = np->right)
5226 np->right->parent = np;
5227 return;
5230 /* If there are just three nodes, split at the middle one. */
5231 else if (i == 3)
5232 npp = &(*npp)->right;
5233 else
5235 /* Find the place in the list that bisects the list's total cost,
5236 where ranges count as 2.
5237 Here I gets half the total cost. */
5238 i = (i + ranges + 1) / 2;
5239 while (1)
5241 /* Skip nodes while their cost does not reach that amount. */
5242 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5243 i--;
5244 i--;
5245 if (i <= 0)
5246 break;
5247 npp = &(*npp)->right;
5250 *head = np = *npp;
5251 *npp = 0;
5252 np->parent = parent;
5253 np->left = left;
5255 /* Optimize each of the two split parts. */
5256 balance_case_nodes (&np->left, np);
5257 balance_case_nodes (&np->right, np);
5259 else
5261 /* Else leave this branch as one level,
5262 but fill in `parent' fields. */
5263 np = *head;
5264 np->parent = parent;
5265 for (; np->right; np = np->right)
5266 np->right->parent = np;
5271 /* Search the parent sections of the case node tree
5272 to see if a test for the lower bound of NODE would be redundant.
5273 INDEX_TYPE is the type of the index expression.
5275 The instructions to generate the case decision tree are
5276 output in the same order as nodes are processed so it is
5277 known that if a parent node checks the range of the current
5278 node minus one that the current node is bounded at its lower
5279 span. Thus the test would be redundant. */
5281 static int
5282 node_has_low_bound (node, index_type)
5283 case_node_ptr node;
5284 tree index_type;
5286 tree low_minus_one;
5287 case_node_ptr pnode;
5289 /* If the lower bound of this node is the lowest value in the index type,
5290 we need not test it. */
5292 if (tree_int_cst_equal (node->low, TYPE_MIN_VALUE (index_type)))
5293 return 1;
5295 /* If this node has a left branch, the value at the left must be less
5296 than that at this node, so it cannot be bounded at the bottom and
5297 we need not bother testing any further. */
5299 if (node->left)
5300 return 0;
5302 low_minus_one = fold (build (MINUS_EXPR, TREE_TYPE (node->low),
5303 node->low, integer_one_node));
5305 /* If the subtraction above overflowed, we can't verify anything.
5306 Otherwise, look for a parent that tests our value - 1. */
5308 if (! tree_int_cst_lt (low_minus_one, node->low))
5309 return 0;
5311 for (pnode = node->parent; pnode; pnode = pnode->parent)
5312 if (tree_int_cst_equal (low_minus_one, pnode->high))
5313 return 1;
5315 return 0;
5318 /* Search the parent sections of the case node tree
5319 to see if a test for the upper bound of NODE would be redundant.
5320 INDEX_TYPE is the type of the index expression.
5322 The instructions to generate the case decision tree are
5323 output in the same order as nodes are processed so it is
5324 known that if a parent node checks the range of the current
5325 node plus one that the current node is bounded at its upper
5326 span. Thus the test would be redundant. */
5328 static int
5329 node_has_high_bound (node, index_type)
5330 case_node_ptr node;
5331 tree index_type;
5333 tree high_plus_one;
5334 case_node_ptr pnode;
5336 /* If there is no upper bound, obviously no test is needed. */
5338 if (TYPE_MAX_VALUE (index_type) == NULL)
5339 return 1;
5341 /* If the upper bound of this node is the highest value in the type
5342 of the index expression, we need not test against it. */
5344 if (tree_int_cst_equal (node->high, TYPE_MAX_VALUE (index_type)))
5345 return 1;
5347 /* If this node has a right branch, the value at the right must be greater
5348 than that at this node, so it cannot be bounded at the top and
5349 we need not bother testing any further. */
5351 if (node->right)
5352 return 0;
5354 high_plus_one = fold (build (PLUS_EXPR, TREE_TYPE (node->high),
5355 node->high, integer_one_node));
5357 /* If the addition above overflowed, we can't verify anything.
5358 Otherwise, look for a parent that tests our value + 1. */
5360 if (! tree_int_cst_lt (node->high, high_plus_one))
5361 return 0;
5363 for (pnode = node->parent; pnode; pnode = pnode->parent)
5364 if (tree_int_cst_equal (high_plus_one, pnode->low))
5365 return 1;
5367 return 0;
5370 /* Search the parent sections of the
5371 case node tree to see if both tests for the upper and lower
5372 bounds of NODE would be redundant. */
5374 static int
5375 node_is_bounded (node, index_type)
5376 case_node_ptr node;
5377 tree index_type;
5379 return (node_has_low_bound (node, index_type)
5380 && node_has_high_bound (node, index_type));
5383 /* Emit an unconditional jump to LABEL unless it would be dead code. */
5385 static void
5386 emit_jump_if_reachable (label)
5387 rtx label;
5389 if (GET_CODE (get_last_insn ()) != BARRIER)
5390 emit_jump (label);
5393 /* Emit step-by-step code to select a case for the value of INDEX.
5394 The thus generated decision tree follows the form of the
5395 case-node binary tree NODE, whose nodes represent test conditions.
5396 INDEX_TYPE is the type of the index of the switch.
5398 Care is taken to prune redundant tests from the decision tree
5399 by detecting any boundary conditions already checked by
5400 emitted rtx. (See node_has_high_bound, node_has_low_bound
5401 and node_is_bounded, above.)
5403 Where the test conditions can be shown to be redundant we emit
5404 an unconditional jump to the target code. As a further
5405 optimization, the subordinates of a tree node are examined to
5406 check for bounded nodes. In this case conditional and/or
5407 unconditional jumps as a result of the boundary check for the
5408 current node are arranged to target the subordinates associated
5409 code for out of bound conditions on the current node node.
5411 We can assume that when control reaches the code generated here,
5412 the index value has already been compared with the parents
5413 of this node, and determined to be on the same side of each parent
5414 as this node is. Thus, if this node tests for the value 51,
5415 and a parent tested for 52, we don't need to consider
5416 the possibility of a value greater than 51. If another parent
5417 tests for the value 50, then this node need not test anything. */
5419 static void
5420 emit_case_nodes (index, node, default_label, index_type)
5421 rtx index;
5422 case_node_ptr node;
5423 rtx default_label;
5424 tree index_type;
5426 /* If INDEX has an unsigned type, we must make unsigned branches. */
5427 int unsignedp = TREE_UNSIGNED (index_type);
5428 typedef rtx rtx_function ();
5429 rtx_function *gen_bgt_pat = unsignedp ? gen_bgtu : gen_bgt;
5430 rtx_function *gen_bge_pat = unsignedp ? gen_bgeu : gen_bge;
5431 rtx_function *gen_blt_pat = unsignedp ? gen_bltu : gen_blt;
5432 rtx_function *gen_ble_pat = unsignedp ? gen_bleu : gen_ble;
5433 enum machine_mode mode = GET_MODE (index);
5435 /* See if our parents have already tested everything for us.
5436 If they have, emit an unconditional jump for this node. */
5437 if (node_is_bounded (node, index_type))
5438 emit_jump (label_rtx (node->code_label));
5440 else if (tree_int_cst_equal (node->low, node->high))
5442 /* Node is single valued. First see if the index expression matches
5443 this node and then check our children, if any. */
5445 do_jump_if_equal (index, expand_expr (node->low, NULL_RTX, VOIDmode, 0),
5446 label_rtx (node->code_label), unsignedp);
5448 if (node->right != 0 && node->left != 0)
5450 /* This node has children on both sides.
5451 Dispatch to one side or the other
5452 by comparing the index value with this node's value.
5453 If one subtree is bounded, check that one first,
5454 so we can avoid real branches in the tree. */
5456 if (node_is_bounded (node->right, index_type))
5458 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5459 VOIDmode, 0),
5460 GT, NULL_RTX, mode, unsignedp, 0);
5462 emit_jump_insn ((*gen_bgt_pat) (label_rtx (node->right->code_label)));
5463 emit_case_nodes (index, node->left, default_label, index_type);
5466 else if (node_is_bounded (node->left, index_type))
5468 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5469 VOIDmode, 0),
5470 LT, NULL_RTX, mode, unsignedp, 0);
5471 emit_jump_insn ((*gen_blt_pat) (label_rtx (node->left->code_label)));
5472 emit_case_nodes (index, node->right, default_label, index_type);
5475 else
5477 /* Neither node is bounded. First distinguish the two sides;
5478 then emit the code for one side at a time. */
5480 tree test_label
5481 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
5483 /* See if the value is on the right. */
5484 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5485 VOIDmode, 0),
5486 GT, NULL_RTX, mode, unsignedp, 0);
5487 emit_jump_insn ((*gen_bgt_pat) (label_rtx (test_label)));
5489 /* Value must be on the left.
5490 Handle the left-hand subtree. */
5491 emit_case_nodes (index, node->left, default_label, index_type);
5492 /* If left-hand subtree does nothing,
5493 go to default. */
5494 emit_jump_if_reachable (default_label);
5496 /* Code branches here for the right-hand subtree. */
5497 expand_label (test_label);
5498 emit_case_nodes (index, node->right, default_label, index_type);
5502 else if (node->right != 0 && node->left == 0)
5504 /* Here we have a right child but no left so we issue conditional
5505 branch to default and process the right child.
5507 Omit the conditional branch to default if we it avoid only one
5508 right child; it costs too much space to save so little time. */
5510 if (node->right->right || node->right->left
5511 || !tree_int_cst_equal (node->right->low, node->right->high))
5513 if (!node_has_low_bound (node, index_type))
5515 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5516 VOIDmode, 0),
5517 LT, NULL_RTX, mode, unsignedp, 0);
5518 emit_jump_insn ((*gen_blt_pat) (default_label));
5521 emit_case_nodes (index, node->right, default_label, index_type);
5523 else
5524 /* We cannot process node->right normally
5525 since we haven't ruled out the numbers less than
5526 this node's value. So handle node->right explicitly. */
5527 do_jump_if_equal (index,
5528 expand_expr (node->right->low, NULL_RTX,
5529 VOIDmode, 0),
5530 label_rtx (node->right->code_label), unsignedp);
5533 else if (node->right == 0 && node->left != 0)
5535 /* Just one subtree, on the left. */
5537 #if 0 /* The following code and comment were formerly part
5538 of the condition here, but they didn't work
5539 and I don't understand what the idea was. -- rms. */
5540 /* If our "most probable entry" is less probable
5541 than the default label, emit a jump to
5542 the default label using condition codes
5543 already lying around. With no right branch,
5544 a branch-greater-than will get us to the default
5545 label correctly. */
5546 if (use_cost_table
5547 && cost_table[TREE_INT_CST_LOW (node->high)] < 12)
5549 #endif /* 0 */
5550 if (node->left->left || node->left->right
5551 || !tree_int_cst_equal (node->left->low, node->left->high))
5553 if (!node_has_high_bound (node, index_type))
5555 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5556 VOIDmode, 0),
5557 GT, NULL_RTX, mode, unsignedp, 0);
5558 emit_jump_insn ((*gen_bgt_pat) (default_label));
5561 emit_case_nodes (index, node->left, default_label, index_type);
5563 else
5564 /* We cannot process node->left normally
5565 since we haven't ruled out the numbers less than
5566 this node's value. So handle node->left explicitly. */
5567 do_jump_if_equal (index,
5568 expand_expr (node->left->low, NULL_RTX,
5569 VOIDmode, 0),
5570 label_rtx (node->left->code_label), unsignedp);
5573 else
5575 /* Node is a range. These cases are very similar to those for a single
5576 value, except that we do not start by testing whether this node
5577 is the one to branch to. */
5579 if (node->right != 0 && node->left != 0)
5581 /* Node has subtrees on both sides.
5582 If the right-hand subtree is bounded,
5583 test for it first, since we can go straight there.
5584 Otherwise, we need to make a branch in the control structure,
5585 then handle the two subtrees. */
5586 tree test_label = 0;
5588 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5589 VOIDmode, 0),
5590 GT, NULL_RTX, mode, unsignedp, 0);
5592 if (node_is_bounded (node->right, index_type))
5593 /* Right hand node is fully bounded so we can eliminate any
5594 testing and branch directly to the target code. */
5595 emit_jump_insn ((*gen_bgt_pat) (label_rtx (node->right->code_label)));
5596 else
5598 /* Right hand node requires testing.
5599 Branch to a label where we will handle it later. */
5601 test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
5602 emit_jump_insn ((*gen_bgt_pat) (label_rtx (test_label)));
5605 /* Value belongs to this node or to the left-hand subtree. */
5607 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX, VOIDmode, 0),
5608 GE, NULL_RTX, mode, unsignedp, 0);
5609 emit_jump_insn ((*gen_bge_pat) (label_rtx (node->code_label)));
5611 /* Handle the left-hand subtree. */
5612 emit_case_nodes (index, node->left, default_label, index_type);
5614 /* If right node had to be handled later, do that now. */
5616 if (test_label)
5618 /* If the left-hand subtree fell through,
5619 don't let it fall into the right-hand subtree. */
5620 emit_jump_if_reachable (default_label);
5622 expand_label (test_label);
5623 emit_case_nodes (index, node->right, default_label, index_type);
5627 else if (node->right != 0 && node->left == 0)
5629 /* Deal with values to the left of this node,
5630 if they are possible. */
5631 if (!node_has_low_bound (node, index_type))
5633 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX,
5634 VOIDmode, 0),
5635 LT, NULL_RTX, mode, unsignedp, 0);
5636 emit_jump_insn ((*gen_blt_pat) (default_label));
5639 /* Value belongs to this node or to the right-hand subtree. */
5641 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5642 VOIDmode, 0),
5643 LE, NULL_RTX, mode, unsignedp, 0);
5644 emit_jump_insn ((*gen_ble_pat) (label_rtx (node->code_label)));
5646 emit_case_nodes (index, node->right, default_label, index_type);
5649 else if (node->right == 0 && node->left != 0)
5651 /* Deal with values to the right of this node,
5652 if they are possible. */
5653 if (!node_has_high_bound (node, index_type))
5655 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5656 VOIDmode, 0),
5657 GT, NULL_RTX, mode, unsignedp, 0);
5658 emit_jump_insn ((*gen_bgt_pat) (default_label));
5661 /* Value belongs to this node or to the left-hand subtree. */
5663 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX, VOIDmode, 0),
5664 GE, NULL_RTX, mode, unsignedp, 0);
5665 emit_jump_insn ((*gen_bge_pat) (label_rtx (node->code_label)));
5667 emit_case_nodes (index, node->left, default_label, index_type);
5670 else
5672 /* Node has no children so we check low and high bounds to remove
5673 redundant tests. Only one of the bounds can exist,
5674 since otherwise this node is bounded--a case tested already. */
5676 if (!node_has_high_bound (node, index_type))
5678 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5679 VOIDmode, 0),
5680 GT, NULL_RTX, mode, unsignedp, 0);
5681 emit_jump_insn ((*gen_bgt_pat) (default_label));
5684 if (!node_has_low_bound (node, index_type))
5686 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX,
5687 VOIDmode, 0),
5688 LT, NULL_RTX, mode, unsignedp, 0);
5689 emit_jump_insn ((*gen_blt_pat) (default_label));
5692 emit_jump (label_rtx (node->code_label));
5697 /* These routines are used by the loop unrolling code. They copy BLOCK trees
5698 so that the debugging info will be correct for the unrolled loop. */
5700 /* Indexed by block number, contains a pointer to the N'th block node. */
5702 static tree *block_vector;
5704 void
5705 find_loop_tree_blocks ()
5707 tree block = DECL_INITIAL (current_function_decl);
5709 block_vector = identify_blocks (block, get_insns ());
5712 void
5713 unroll_block_trees ()
5715 tree block = DECL_INITIAL (current_function_decl);
5717 reorder_blocks (block_vector, block, get_insns ());