1998-09-21 Ben Elliston <bje@cygnus.com>
[official-gcc.git] / gcc / stmt.c
blobb524362d5243ceee104c88939e1becb5ac53b357
1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 88, 89, 92-97, 1998 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
22 /* This file handles the generation of rtl code from tree structure
23 above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
24 It also creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
27 The functions whose names start with `expand_' are called by the
28 parser to generate RTL instructions for various kinds of constructs.
30 Some control and binding constructs require calling several such
31 functions at different times. For example, a simple if-then
32 is expanded by calling `expand_start_cond' (with the condition-expression
33 as argument) before parsing the then-clause and calling `expand_end_cond'
34 after parsing the then-clause. */
36 #include "config.h"
37 #include "system.h"
39 #include "rtl.h"
40 #include "tree.h"
41 #include "flags.h"
42 #include "except.h"
43 #include "function.h"
44 #include "insn-flags.h"
45 #include "insn-config.h"
46 #include "insn-codes.h"
47 #include "expr.h"
48 #include "hard-reg-set.h"
49 #include "obstack.h"
50 #include "loop.h"
51 #include "recog.h"
52 #include "machmode.h"
53 #include "toplev.h"
54 #include "output.h"
56 #define obstack_chunk_alloc xmalloc
57 #define obstack_chunk_free free
58 struct obstack stmt_obstack;
60 /* Assume that case vectors are not pc-relative. */
61 #ifndef CASE_VECTOR_PC_RELATIVE
62 #define CASE_VECTOR_PC_RELATIVE 0
63 #endif
65 /* Filename and line number of last line-number note,
66 whether we actually emitted it or not. */
67 char *emit_filename;
68 int emit_lineno;
70 /* Nonzero if within a ({...}) grouping, in which case we must
71 always compute a value for each expr-stmt in case it is the last one. */
73 int expr_stmts_for_value;
75 /* Each time we expand an expression-statement,
76 record the expr's type and its RTL value here. */
78 static tree last_expr_type;
79 static rtx last_expr_value;
81 /* Each time we expand the end of a binding contour (in `expand_end_bindings')
82 and we emit a new NOTE_INSN_BLOCK_END note, we save a pointer to it here.
83 This is used by the `remember_end_note' function to record the endpoint
84 of each generated block in its associated BLOCK node. */
86 static rtx last_block_end_note;
88 /* Number of binding contours started so far in this function. */
90 int block_start_count;
92 /* Nonzero if function being compiled needs to
93 return the address of where it has put a structure value. */
95 extern int current_function_returns_pcc_struct;
97 /* Label that will go on parm cleanup code, if any.
98 Jumping to this label runs cleanup code for parameters, if
99 such code must be run. Following this code is the logical return label. */
101 extern rtx cleanup_label;
103 /* Label that will go on function epilogue.
104 Jumping to this label serves as a "return" instruction
105 on machines which require execution of the epilogue on all returns. */
107 extern rtx return_label;
109 /* Offset to end of allocated area of stack frame.
110 If stack grows down, this is the address of the last stack slot allocated.
111 If stack grows up, this is the address for the next slot. */
112 extern int frame_offset;
114 /* Label to jump back to for tail recursion, or 0 if we have
115 not yet needed one for this function. */
116 extern rtx tail_recursion_label;
118 /* Place after which to insert the tail_recursion_label if we need one. */
119 extern rtx tail_recursion_reentry;
121 /* Location at which to save the argument pointer if it will need to be
122 referenced. There are two cases where this is done: if nonlocal gotos
123 exist, or if vars whose is an offset from the argument pointer will be
124 needed by inner routines. */
126 extern rtx arg_pointer_save_area;
128 /* Chain of all RTL_EXPRs that have insns in them. */
129 extern tree rtl_expr_chain;
131 /* Functions and data structures for expanding case statements. */
133 /* Case label structure, used to hold info on labels within case
134 statements. We handle "range" labels; for a single-value label
135 as in C, the high and low limits are the same.
137 An AVL tree of case nodes is initially created, and later transformed
138 to a list linked via the RIGHT fields in the nodes. Nodes with
139 higher case values are later in the list.
141 Switch statements can be output in one of two forms. A branch table
142 is used if there are more than a few labels and the labels are dense
143 within the range between the smallest and largest case value. If a
144 branch table is used, no further manipulations are done with the case
145 node chain.
147 The alternative to the use of a branch table is to generate a series
148 of compare and jump insns. When that is done, we use the LEFT, RIGHT,
149 and PARENT fields to hold a binary tree. Initially the tree is
150 totally unbalanced, with everything on the right. We balance the tree
151 with nodes on the left having lower case values than the parent
152 and nodes on the right having higher values. We then output the tree
153 in order. */
155 struct case_node
157 struct case_node *left; /* Left son in binary tree */
158 struct case_node *right; /* Right son in binary tree; also node chain */
159 struct case_node *parent; /* Parent of node in binary tree */
160 tree low; /* Lowest index value for this label */
161 tree high; /* Highest index value for this label */
162 tree code_label; /* Label to jump to when node matches */
163 int balance;
166 typedef struct case_node case_node;
167 typedef struct case_node *case_node_ptr;
169 /* These are used by estimate_case_costs and balance_case_nodes. */
171 /* This must be a signed type, and non-ANSI compilers lack signed char. */
172 static short *cost_table;
173 static int use_cost_table;
175 /* Stack of control and binding constructs we are currently inside.
177 These constructs begin when you call `expand_start_WHATEVER'
178 and end when you call `expand_end_WHATEVER'. This stack records
179 info about how the construct began that tells the end-function
180 what to do. It also may provide information about the construct
181 to alter the behavior of other constructs within the body.
182 For example, they may affect the behavior of C `break' and `continue'.
184 Each construct gets one `struct nesting' object.
185 All of these objects are chained through the `all' field.
186 `nesting_stack' points to the first object (innermost construct).
187 The position of an entry on `nesting_stack' is in its `depth' field.
189 Each type of construct has its own individual stack.
190 For example, loops have `loop_stack'. Each object points to the
191 next object of the same type through the `next' field.
193 Some constructs are visible to `break' exit-statements and others
194 are not. Which constructs are visible depends on the language.
195 Therefore, the data structure allows each construct to be visible
196 or not, according to the args given when the construct is started.
197 The construct is visible if the `exit_label' field is non-null.
198 In that case, the value should be a CODE_LABEL rtx. */
200 struct nesting
202 struct nesting *all;
203 struct nesting *next;
204 int depth;
205 rtx exit_label;
206 union
208 /* For conds (if-then and if-then-else statements). */
209 struct
211 /* Label for the end of the if construct.
212 There is none if EXITFLAG was not set
213 and no `else' has been seen yet. */
214 rtx endif_label;
215 /* Label for the end of this alternative.
216 This may be the end of the if or the next else/elseif. */
217 rtx next_label;
218 } cond;
219 /* For loops. */
220 struct
222 /* Label at the top of the loop; place to loop back to. */
223 rtx start_label;
224 /* Label at the end of the whole construct. */
225 rtx end_label;
226 /* Label before a jump that branches to the end of the whole
227 construct. This is where destructors go if any. */
228 rtx alt_end_label;
229 /* Label for `continue' statement to jump to;
230 this is in front of the stepper of the loop. */
231 rtx continue_label;
232 } loop;
233 /* For variable binding contours. */
234 struct
236 /* Sequence number of this binding contour within the function,
237 in order of entry. */
238 int block_start_count;
239 /* Nonzero => value to restore stack to on exit. */
240 rtx stack_level;
241 /* The NOTE that starts this contour.
242 Used by expand_goto to check whether the destination
243 is within each contour or not. */
244 rtx first_insn;
245 /* Innermost containing binding contour that has a stack level. */
246 struct nesting *innermost_stack_block;
247 /* List of cleanups to be run on exit from this contour.
248 This is a list of expressions to be evaluated.
249 The TREE_PURPOSE of each link is the ..._DECL node
250 which the cleanup pertains to. */
251 tree cleanups;
252 /* List of cleanup-lists of blocks containing this block,
253 as they were at the locus where this block appears.
254 There is an element for each containing block,
255 ordered innermost containing block first.
256 The tail of this list can be 0,
257 if all remaining elements would be empty lists.
258 The element's TREE_VALUE is the cleanup-list of that block,
259 which may be null. */
260 tree outer_cleanups;
261 /* Chain of labels defined inside this binding contour.
262 For contours that have stack levels or cleanups. */
263 struct label_chain *label_chain;
264 /* Number of function calls seen, as of start of this block. */
265 int function_call_count;
266 /* Nonzero if this is associated with a EH region. */
267 int exception_region;
268 /* The saved target_temp_slot_level from our outer block.
269 We may reset target_temp_slot_level to be the level of
270 this block, if that is done, target_temp_slot_level
271 reverts to the saved target_temp_slot_level at the very
272 end of the block. */
273 int target_temp_slot_level;
274 /* True if we are currently emitting insns in an area of
275 output code that is controlled by a conditional
276 expression. This is used by the cleanup handling code to
277 generate conditional cleanup actions. */
278 int conditional_code;
279 /* A place to move the start of the exception region for any
280 of the conditional cleanups, must be at the end or after
281 the start of the last unconditional cleanup, and before any
282 conditional branch points. */
283 rtx last_unconditional_cleanup;
284 /* When in a conditional context, this is the specific
285 cleanup list associated with last_unconditional_cleanup,
286 where we place the conditionalized cleanups. */
287 tree *cleanup_ptr;
288 } block;
289 /* For switch (C) or case (Pascal) statements,
290 and also for dummies (see `expand_start_case_dummy'). */
291 struct
293 /* The insn after which the case dispatch should finally
294 be emitted. Zero for a dummy. */
295 rtx start;
296 /* A list of case labels; it is first built as an AVL tree.
297 During expand_end_case, this is converted to a list, and may be
298 rearranged into a nearly balanced binary tree. */
299 struct case_node *case_list;
300 /* Label to jump to if no case matches. */
301 tree default_label;
302 /* The expression to be dispatched on. */
303 tree index_expr;
304 /* Type that INDEX_EXPR should be converted to. */
305 tree nominal_type;
306 /* Number of range exprs in case statement. */
307 int num_ranges;
308 /* Name of this kind of statement, for warnings. */
309 char *printname;
310 /* Used to save no_line_numbers till we see the first case label.
311 We set this to -1 when we see the first case label in this
312 case statement. */
313 int line_number_status;
314 } case_stmt;
315 } data;
318 /* Chain of all pending binding contours. */
319 struct nesting *block_stack;
321 /* If any new stacks are added here, add them to POPSTACKS too. */
323 /* Chain of all pending binding contours that restore stack levels
324 or have cleanups. */
325 struct nesting *stack_block_stack;
327 /* Chain of all pending conditional statements. */
328 struct nesting *cond_stack;
330 /* Chain of all pending loops. */
331 struct nesting *loop_stack;
333 /* Chain of all pending case or switch statements. */
334 struct nesting *case_stack;
336 /* Separate chain including all of the above,
337 chained through the `all' field. */
338 struct nesting *nesting_stack;
340 /* Number of entries on nesting_stack now. */
341 int nesting_depth;
343 /* Allocate and return a new `struct nesting'. */
345 #define ALLOC_NESTING() \
346 (struct nesting *) obstack_alloc (&stmt_obstack, sizeof (struct nesting))
348 /* Pop the nesting stack element by element until we pop off
349 the element which is at the top of STACK.
350 Update all the other stacks, popping off elements from them
351 as we pop them from nesting_stack. */
353 #define POPSTACK(STACK) \
354 do { struct nesting *target = STACK; \
355 struct nesting *this; \
356 do { this = nesting_stack; \
357 if (loop_stack == this) \
358 loop_stack = loop_stack->next; \
359 if (cond_stack == this) \
360 cond_stack = cond_stack->next; \
361 if (block_stack == this) \
362 block_stack = block_stack->next; \
363 if (stack_block_stack == this) \
364 stack_block_stack = stack_block_stack->next; \
365 if (case_stack == this) \
366 case_stack = case_stack->next; \
367 nesting_depth = nesting_stack->depth - 1; \
368 nesting_stack = this->all; \
369 obstack_free (&stmt_obstack, this); } \
370 while (this != target); } while (0)
372 /* In some cases it is impossible to generate code for a forward goto
373 until the label definition is seen. This happens when it may be necessary
374 for the goto to reset the stack pointer: we don't yet know how to do that.
375 So expand_goto puts an entry on this fixup list.
376 Each time a binding contour that resets the stack is exited,
377 we check each fixup.
378 If the target label has now been defined, we can insert the proper code. */
380 struct goto_fixup
382 /* Points to following fixup. */
383 struct goto_fixup *next;
384 /* Points to the insn before the jump insn.
385 If more code must be inserted, it goes after this insn. */
386 rtx before_jump;
387 /* The LABEL_DECL that this jump is jumping to, or 0
388 for break, continue or return. */
389 tree target;
390 /* The BLOCK for the place where this goto was found. */
391 tree context;
392 /* The CODE_LABEL rtx that this is jumping to. */
393 rtx target_rtl;
394 /* Number of binding contours started in current function
395 before the label reference. */
396 int block_start_count;
397 /* The outermost stack level that should be restored for this jump.
398 Each time a binding contour that resets the stack is exited,
399 if the target label is *not* yet defined, this slot is updated. */
400 rtx stack_level;
401 /* List of lists of cleanup expressions to be run by this goto.
402 There is one element for each block that this goto is within.
403 The tail of this list can be 0,
404 if all remaining elements would be empty.
405 The TREE_VALUE contains the cleanup list of that block as of the
406 time this goto was seen.
407 The TREE_ADDRESSABLE flag is 1 for a block that has been exited. */
408 tree cleanup_list_list;
411 static struct goto_fixup *goto_fixup_chain;
413 /* Within any binding contour that must restore a stack level,
414 all labels are recorded with a chain of these structures. */
416 struct label_chain
418 /* Points to following fixup. */
419 struct label_chain *next;
420 tree label;
424 /* Non-zero if we are using EH to handle cleanus. */
425 static int using_eh_for_cleanups_p = 0;
428 static void expand_goto_internal PROTO((tree, rtx, rtx));
429 static int expand_fixup PROTO((tree, rtx, rtx));
430 static void fixup_gotos PROTO((struct nesting *, rtx, tree,
431 rtx, int));
432 static void expand_null_return_1 PROTO((rtx, int));
433 static void expand_value_return PROTO((rtx));
434 static int tail_recursion_args PROTO((tree, tree));
435 static void expand_cleanups PROTO((tree, tree, int, int));
436 static void check_seenlabel PROTO((void));
437 static void do_jump_if_equal PROTO((rtx, rtx, rtx, int));
438 static int estimate_case_costs PROTO((case_node_ptr));
439 static void group_case_nodes PROTO((case_node_ptr));
440 static void balance_case_nodes PROTO((case_node_ptr *,
441 case_node_ptr));
442 static int node_has_low_bound PROTO((case_node_ptr, tree));
443 static int node_has_high_bound PROTO((case_node_ptr, tree));
444 static int node_is_bounded PROTO((case_node_ptr, tree));
445 static void emit_jump_if_reachable PROTO((rtx));
446 static void emit_case_nodes PROTO((rtx, case_node_ptr, rtx, tree));
447 static int add_case_node PROTO((tree, tree, tree, tree *));
448 static struct case_node *case_tree2list PROTO((case_node *, case_node *));
450 void
451 using_eh_for_cleanups ()
453 using_eh_for_cleanups_p = 1;
456 void
457 init_stmt ()
459 gcc_obstack_init (&stmt_obstack);
460 init_eh ();
463 void
464 init_stmt_for_function ()
466 /* We are not currently within any block, conditional, loop or case. */
467 block_stack = 0;
468 stack_block_stack = 0;
469 loop_stack = 0;
470 case_stack = 0;
471 cond_stack = 0;
472 nesting_stack = 0;
473 nesting_depth = 0;
475 block_start_count = 0;
477 /* No gotos have been expanded yet. */
478 goto_fixup_chain = 0;
480 /* We are not processing a ({...}) grouping. */
481 expr_stmts_for_value = 0;
482 last_expr_type = 0;
484 init_eh_for_function ();
487 void
488 save_stmt_status (p)
489 struct function *p;
491 p->block_stack = block_stack;
492 p->stack_block_stack = stack_block_stack;
493 p->cond_stack = cond_stack;
494 p->loop_stack = loop_stack;
495 p->case_stack = case_stack;
496 p->nesting_stack = nesting_stack;
497 p->nesting_depth = nesting_depth;
498 p->block_start_count = block_start_count;
499 p->last_expr_type = last_expr_type;
500 p->last_expr_value = last_expr_value;
501 p->expr_stmts_for_value = expr_stmts_for_value;
502 p->emit_filename = emit_filename;
503 p->emit_lineno = emit_lineno;
504 p->goto_fixup_chain = goto_fixup_chain;
505 save_eh_status (p);
508 void
509 restore_stmt_status (p)
510 struct function *p;
512 block_stack = p->block_stack;
513 stack_block_stack = p->stack_block_stack;
514 cond_stack = p->cond_stack;
515 loop_stack = p->loop_stack;
516 case_stack = p->case_stack;
517 nesting_stack = p->nesting_stack;
518 nesting_depth = p->nesting_depth;
519 block_start_count = p->block_start_count;
520 last_expr_type = p->last_expr_type;
521 last_expr_value = p->last_expr_value;
522 expr_stmts_for_value = p->expr_stmts_for_value;
523 emit_filename = p->emit_filename;
524 emit_lineno = p->emit_lineno;
525 goto_fixup_chain = p->goto_fixup_chain;
526 restore_eh_status (p);
529 /* Emit a no-op instruction. */
531 void
532 emit_nop ()
534 rtx last_insn;
536 last_insn = get_last_insn ();
537 if (!optimize
538 && (GET_CODE (last_insn) == CODE_LABEL
539 || (GET_CODE (last_insn) == NOTE
540 && prev_real_insn (last_insn) == 0)))
541 emit_insn (gen_nop ());
544 /* Return the rtx-label that corresponds to a LABEL_DECL,
545 creating it if necessary. */
548 label_rtx (label)
549 tree label;
551 if (TREE_CODE (label) != LABEL_DECL)
552 abort ();
554 if (DECL_RTL (label))
555 return DECL_RTL (label);
557 return DECL_RTL (label) = gen_label_rtx ();
560 /* Add an unconditional jump to LABEL as the next sequential instruction. */
562 void
563 emit_jump (label)
564 rtx label;
566 do_pending_stack_adjust ();
567 emit_jump_insn (gen_jump (label));
568 emit_barrier ();
571 /* Emit code to jump to the address
572 specified by the pointer expression EXP. */
574 void
575 expand_computed_goto (exp)
576 tree exp;
578 rtx x = expand_expr (exp, NULL_RTX, VOIDmode, 0);
580 #ifdef POINTERS_EXTEND_UNSIGNED
581 x = convert_memory_address (Pmode, x);
582 #endif
584 emit_queue ();
585 /* Be sure the function is executable. */
586 if (flag_check_memory_usage)
587 emit_library_call (chkr_check_exec_libfunc, 1,
588 VOIDmode, 1, x, ptr_mode);
590 do_pending_stack_adjust ();
591 emit_indirect_jump (x);
594 /* Handle goto statements and the labels that they can go to. */
596 /* Specify the location in the RTL code of a label LABEL,
597 which is a LABEL_DECL tree node.
599 This is used for the kind of label that the user can jump to with a
600 goto statement, and for alternatives of a switch or case statement.
601 RTL labels generated for loops and conditionals don't go through here;
602 they are generated directly at the RTL level, by other functions below.
604 Note that this has nothing to do with defining label *names*.
605 Languages vary in how they do that and what that even means. */
607 void
608 expand_label (label)
609 tree label;
611 struct label_chain *p;
613 do_pending_stack_adjust ();
614 emit_label (label_rtx (label));
615 if (DECL_NAME (label))
616 LABEL_NAME (DECL_RTL (label)) = IDENTIFIER_POINTER (DECL_NAME (label));
618 if (stack_block_stack != 0)
620 p = (struct label_chain *) oballoc (sizeof (struct label_chain));
621 p->next = stack_block_stack->data.block.label_chain;
622 stack_block_stack->data.block.label_chain = p;
623 p->label = label;
627 /* Declare that LABEL (a LABEL_DECL) may be used for nonlocal gotos
628 from nested functions. */
630 void
631 declare_nonlocal_label (label)
632 tree label;
634 nonlocal_labels = tree_cons (NULL_TREE, label, nonlocal_labels);
635 LABEL_PRESERVE_P (label_rtx (label)) = 1;
636 if (nonlocal_goto_handler_slot == 0)
638 nonlocal_goto_handler_slot
639 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
640 emit_stack_save (SAVE_NONLOCAL,
641 &nonlocal_goto_stack_level,
642 PREV_INSN (tail_recursion_reentry));
646 /* Generate RTL code for a `goto' statement with target label LABEL.
647 LABEL should be a LABEL_DECL tree node that was or will later be
648 defined with `expand_label'. */
650 void
651 expand_goto (label)
652 tree label;
654 tree context;
656 /* Check for a nonlocal goto to a containing function. */
657 context = decl_function_context (label);
658 if (context != 0 && context != current_function_decl)
660 struct function *p = find_function_data (context);
661 rtx label_ref = gen_rtx_LABEL_REF (Pmode, label_rtx (label));
662 rtx temp;
664 p->has_nonlocal_label = 1;
665 current_function_has_nonlocal_goto = 1;
666 LABEL_REF_NONLOCAL_P (label_ref) = 1;
668 /* Copy the rtl for the slots so that they won't be shared in
669 case the virtual stack vars register gets instantiated differently
670 in the parent than in the child. */
672 #if HAVE_nonlocal_goto
673 if (HAVE_nonlocal_goto)
674 emit_insn (gen_nonlocal_goto (lookup_static_chain (label),
675 copy_rtx (p->nonlocal_goto_handler_slot),
676 copy_rtx (p->nonlocal_goto_stack_level),
677 label_ref));
678 else
679 #endif
681 rtx addr;
683 /* Restore frame pointer for containing function.
684 This sets the actual hard register used for the frame pointer
685 to the location of the function's incoming static chain info.
686 The non-local goto handler will then adjust it to contain the
687 proper value and reload the argument pointer, if needed. */
688 emit_move_insn (hard_frame_pointer_rtx, lookup_static_chain (label));
690 /* We have now loaded the frame pointer hardware register with
691 the address of that corresponds to the start of the virtual
692 stack vars. So replace virtual_stack_vars_rtx in all
693 addresses we use with stack_pointer_rtx. */
695 /* Get addr of containing function's current nonlocal goto handler,
696 which will do any cleanups and then jump to the label. */
697 addr = copy_rtx (p->nonlocal_goto_handler_slot);
698 temp = copy_to_reg (replace_rtx (addr, virtual_stack_vars_rtx,
699 hard_frame_pointer_rtx));
701 /* Restore the stack pointer. Note this uses fp just restored. */
702 addr = p->nonlocal_goto_stack_level;
703 if (addr)
704 addr = replace_rtx (copy_rtx (addr),
705 virtual_stack_vars_rtx,
706 hard_frame_pointer_rtx);
708 emit_stack_restore (SAVE_NONLOCAL, addr, NULL_RTX);
710 /* Put in the static chain register the nonlocal label address. */
711 emit_move_insn (static_chain_rtx, label_ref);
712 /* USE of hard_frame_pointer_rtx added for consistency; not clear if
713 really needed. */
714 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
715 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
716 emit_insn (gen_rtx_USE (VOIDmode, static_chain_rtx));
717 emit_indirect_jump (temp);
720 else
721 expand_goto_internal (label, label_rtx (label), NULL_RTX);
724 /* Generate RTL code for a `goto' statement with target label BODY.
725 LABEL should be a LABEL_REF.
726 LAST_INSN, if non-0, is the rtx we should consider as the last
727 insn emitted (for the purposes of cleaning up a return). */
729 static void
730 expand_goto_internal (body, label, last_insn)
731 tree body;
732 rtx label;
733 rtx last_insn;
735 struct nesting *block;
736 rtx stack_level = 0;
738 if (GET_CODE (label) != CODE_LABEL)
739 abort ();
741 /* If label has already been defined, we can tell now
742 whether and how we must alter the stack level. */
744 if (PREV_INSN (label) != 0)
746 /* Find the innermost pending block that contains the label.
747 (Check containment by comparing insn-uids.)
748 Then restore the outermost stack level within that block,
749 and do cleanups of all blocks contained in it. */
750 for (block = block_stack; block; block = block->next)
752 if (INSN_UID (block->data.block.first_insn) < INSN_UID (label))
753 break;
754 if (block->data.block.stack_level != 0)
755 stack_level = block->data.block.stack_level;
756 /* Execute the cleanups for blocks we are exiting. */
757 if (block->data.block.cleanups != 0)
759 expand_cleanups (block->data.block.cleanups, NULL_TREE, 1, 1);
760 do_pending_stack_adjust ();
764 if (stack_level)
766 /* Ensure stack adjust isn't done by emit_jump, as this
767 would clobber the stack pointer. This one should be
768 deleted as dead by flow. */
769 clear_pending_stack_adjust ();
770 do_pending_stack_adjust ();
771 emit_stack_restore (SAVE_BLOCK, stack_level, NULL_RTX);
774 if (body != 0 && DECL_TOO_LATE (body))
775 error ("jump to `%s' invalidly jumps into binding contour",
776 IDENTIFIER_POINTER (DECL_NAME (body)));
778 /* Label not yet defined: may need to put this goto
779 on the fixup list. */
780 else if (! expand_fixup (body, label, last_insn))
782 /* No fixup needed. Record that the label is the target
783 of at least one goto that has no fixup. */
784 if (body != 0)
785 TREE_ADDRESSABLE (body) = 1;
788 emit_jump (label);
791 /* Generate if necessary a fixup for a goto
792 whose target label in tree structure (if any) is TREE_LABEL
793 and whose target in rtl is RTL_LABEL.
795 If LAST_INSN is nonzero, we pretend that the jump appears
796 after insn LAST_INSN instead of at the current point in the insn stream.
798 The fixup will be used later to insert insns just before the goto.
799 Those insns will restore the stack level as appropriate for the
800 target label, and will (in the case of C++) also invoke any object
801 destructors which have to be invoked when we exit the scopes which
802 are exited by the goto.
804 Value is nonzero if a fixup is made. */
806 static int
807 expand_fixup (tree_label, rtl_label, last_insn)
808 tree tree_label;
809 rtx rtl_label;
810 rtx last_insn;
812 struct nesting *block, *end_block;
814 /* See if we can recognize which block the label will be output in.
815 This is possible in some very common cases.
816 If we succeed, set END_BLOCK to that block.
817 Otherwise, set it to 0. */
819 if (cond_stack
820 && (rtl_label == cond_stack->data.cond.endif_label
821 || rtl_label == cond_stack->data.cond.next_label))
822 end_block = cond_stack;
823 /* If we are in a loop, recognize certain labels which
824 are likely targets. This reduces the number of fixups
825 we need to create. */
826 else if (loop_stack
827 && (rtl_label == loop_stack->data.loop.start_label
828 || rtl_label == loop_stack->data.loop.end_label
829 || rtl_label == loop_stack->data.loop.continue_label))
830 end_block = loop_stack;
831 else
832 end_block = 0;
834 /* Now set END_BLOCK to the binding level to which we will return. */
836 if (end_block)
838 struct nesting *next_block = end_block->all;
839 block = block_stack;
841 /* First see if the END_BLOCK is inside the innermost binding level.
842 If so, then no cleanups or stack levels are relevant. */
843 while (next_block && next_block != block)
844 next_block = next_block->all;
846 if (next_block)
847 return 0;
849 /* Otherwise, set END_BLOCK to the innermost binding level
850 which is outside the relevant control-structure nesting. */
851 next_block = block_stack->next;
852 for (block = block_stack; block != end_block; block = block->all)
853 if (block == next_block)
854 next_block = next_block->next;
855 end_block = next_block;
858 /* Does any containing block have a stack level or cleanups?
859 If not, no fixup is needed, and that is the normal case
860 (the only case, for standard C). */
861 for (block = block_stack; block != end_block; block = block->next)
862 if (block->data.block.stack_level != 0
863 || block->data.block.cleanups != 0)
864 break;
866 if (block != end_block)
868 /* Ok, a fixup is needed. Add a fixup to the list of such. */
869 struct goto_fixup *fixup
870 = (struct goto_fixup *) oballoc (sizeof (struct goto_fixup));
871 /* In case an old stack level is restored, make sure that comes
872 after any pending stack adjust. */
873 /* ?? If the fixup isn't to come at the present position,
874 doing the stack adjust here isn't useful. Doing it with our
875 settings at that location isn't useful either. Let's hope
876 someone does it! */
877 if (last_insn == 0)
878 do_pending_stack_adjust ();
879 fixup->target = tree_label;
880 fixup->target_rtl = rtl_label;
882 /* Create a BLOCK node and a corresponding matched set of
883 NOTE_INSN_BEGIN_BLOCK and NOTE_INSN_END_BLOCK notes at
884 this point. The notes will encapsulate any and all fixup
885 code which we might later insert at this point in the insn
886 stream. Also, the BLOCK node will be the parent (i.e. the
887 `SUPERBLOCK') of any other BLOCK nodes which we might create
888 later on when we are expanding the fixup code. */
891 register rtx original_before_jump
892 = last_insn ? last_insn : get_last_insn ();
894 start_sequence ();
895 pushlevel (0);
896 fixup->before_jump = emit_note (NULL_PTR, NOTE_INSN_BLOCK_BEG);
897 last_block_end_note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_END);
898 fixup->context = poplevel (1, 0, 0); /* Create the BLOCK node now! */
899 end_sequence ();
900 emit_insns_after (fixup->before_jump, original_before_jump);
903 fixup->block_start_count = block_start_count;
904 fixup->stack_level = 0;
905 fixup->cleanup_list_list
906 = ((block->data.block.outer_cleanups
907 || block->data.block.cleanups)
908 ? tree_cons (NULL_TREE, block->data.block.cleanups,
909 block->data.block.outer_cleanups)
910 : 0);
911 fixup->next = goto_fixup_chain;
912 goto_fixup_chain = fixup;
915 return block != 0;
920 /* Expand any needed fixups in the outputmost binding level of the
921 function. FIRST_INSN is the first insn in the function. */
923 void
924 expand_fixups (first_insn)
925 rtx first_insn;
927 fixup_gotos (NULL_PTR, NULL_RTX, NULL_TREE, first_insn, 0);
930 /* When exiting a binding contour, process all pending gotos requiring fixups.
931 THISBLOCK is the structure that describes the block being exited.
932 STACK_LEVEL is the rtx for the stack level to restore exiting this contour.
933 CLEANUP_LIST is a list of expressions to evaluate on exiting this contour.
934 FIRST_INSN is the insn that began this contour.
936 Gotos that jump out of this contour must restore the
937 stack level and do the cleanups before actually jumping.
939 DONT_JUMP_IN nonzero means report error there is a jump into this
940 contour from before the beginning of the contour.
941 This is also done if STACK_LEVEL is nonzero. */
943 static void
944 fixup_gotos (thisblock, stack_level, cleanup_list, first_insn, dont_jump_in)
945 struct nesting *thisblock;
946 rtx stack_level;
947 tree cleanup_list;
948 rtx first_insn;
949 int dont_jump_in;
951 register struct goto_fixup *f, *prev;
953 /* F is the fixup we are considering; PREV is the previous one. */
954 /* We run this loop in two passes so that cleanups of exited blocks
955 are run first, and blocks that are exited are marked so
956 afterwards. */
958 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
960 /* Test for a fixup that is inactive because it is already handled. */
961 if (f->before_jump == 0)
963 /* Delete inactive fixup from the chain, if that is easy to do. */
964 if (prev != 0)
965 prev->next = f->next;
967 /* Has this fixup's target label been defined?
968 If so, we can finalize it. */
969 else if (PREV_INSN (f->target_rtl) != 0)
971 register rtx cleanup_insns;
973 /* Get the first non-label after the label
974 this goto jumps to. If that's before this scope begins,
975 we don't have a jump into the scope. */
976 rtx after_label = f->target_rtl;
977 while (after_label != 0 && GET_CODE (after_label) == CODE_LABEL)
978 after_label = NEXT_INSN (after_label);
980 /* If this fixup jumped into this contour from before the beginning
981 of this contour, report an error. */
982 /* ??? Bug: this does not detect jumping in through intermediate
983 blocks that have stack levels or cleanups.
984 It detects only a problem with the innermost block
985 around the label. */
986 if (f->target != 0
987 && (dont_jump_in || stack_level || cleanup_list)
988 /* If AFTER_LABEL is 0, it means the jump goes to the end
989 of the rtl, which means it jumps into this scope. */
990 && (after_label == 0
991 || INSN_UID (first_insn) < INSN_UID (after_label))
992 && INSN_UID (first_insn) > INSN_UID (f->before_jump)
993 && ! DECL_ERROR_ISSUED (f->target))
995 error_with_decl (f->target,
996 "label `%s' used before containing binding contour");
997 /* Prevent multiple errors for one label. */
998 DECL_ERROR_ISSUED (f->target) = 1;
1001 /* We will expand the cleanups into a sequence of their own and
1002 then later on we will attach this new sequence to the insn
1003 stream just ahead of the actual jump insn. */
1005 start_sequence ();
1007 /* Temporarily restore the lexical context where we will
1008 logically be inserting the fixup code. We do this for the
1009 sake of getting the debugging information right. */
1011 pushlevel (0);
1012 set_block (f->context);
1014 /* Expand the cleanups for blocks this jump exits. */
1015 if (f->cleanup_list_list)
1017 tree lists;
1018 for (lists = f->cleanup_list_list; lists; lists = TREE_CHAIN (lists))
1019 /* Marked elements correspond to blocks that have been closed.
1020 Do their cleanups. */
1021 if (TREE_ADDRESSABLE (lists)
1022 && TREE_VALUE (lists) != 0)
1024 expand_cleanups (TREE_VALUE (lists), NULL_TREE, 1, 1);
1025 /* Pop any pushes done in the cleanups,
1026 in case function is about to return. */
1027 do_pending_stack_adjust ();
1031 /* Restore stack level for the biggest contour that this
1032 jump jumps out of. */
1033 if (f->stack_level)
1034 emit_stack_restore (SAVE_BLOCK, f->stack_level, f->before_jump);
1036 /* Finish up the sequence containing the insns which implement the
1037 necessary cleanups, and then attach that whole sequence to the
1038 insn stream just ahead of the actual jump insn. Attaching it
1039 at that point insures that any cleanups which are in fact
1040 implicit C++ object destructions (which must be executed upon
1041 leaving the block) appear (to the debugger) to be taking place
1042 in an area of the generated code where the object(s) being
1043 destructed are still "in scope". */
1045 cleanup_insns = get_insns ();
1046 poplevel (1, 0, 0);
1048 end_sequence ();
1049 emit_insns_after (cleanup_insns, f->before_jump);
1052 f->before_jump = 0;
1056 /* For any still-undefined labels, do the cleanups for this block now.
1057 We must do this now since items in the cleanup list may go out
1058 of scope when the block ends. */
1059 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1060 if (f->before_jump != 0
1061 && PREV_INSN (f->target_rtl) == 0
1062 /* Label has still not appeared. If we are exiting a block with
1063 a stack level to restore, that started before the fixup,
1064 mark this stack level as needing restoration
1065 when the fixup is later finalized. */
1066 && thisblock != 0
1067 /* Note: if THISBLOCK == 0 and we have a label that hasn't appeared, it
1068 means the label is undefined. That's erroneous, but possible. */
1069 && (thisblock->data.block.block_start_count
1070 <= f->block_start_count))
1072 tree lists = f->cleanup_list_list;
1073 rtx cleanup_insns;
1075 for (; lists; lists = TREE_CHAIN (lists))
1076 /* If the following elt. corresponds to our containing block
1077 then the elt. must be for this block. */
1078 if (TREE_CHAIN (lists) == thisblock->data.block.outer_cleanups)
1080 start_sequence ();
1081 pushlevel (0);
1082 set_block (f->context);
1083 expand_cleanups (TREE_VALUE (lists), NULL_TREE, 1, 1);
1084 do_pending_stack_adjust ();
1085 cleanup_insns = get_insns ();
1086 poplevel (1, 0, 0);
1087 end_sequence ();
1088 if (cleanup_insns != 0)
1089 f->before_jump
1090 = emit_insns_after (cleanup_insns, f->before_jump);
1092 f->cleanup_list_list = TREE_CHAIN (lists);
1095 if (stack_level)
1096 f->stack_level = stack_level;
1102 /* Generate RTL for an asm statement (explicit assembler code).
1103 BODY is a STRING_CST node containing the assembler code text,
1104 or an ADDR_EXPR containing a STRING_CST. */
1106 void
1107 expand_asm (body)
1108 tree body;
1110 if (flag_check_memory_usage)
1112 error ("`asm' cannot be used with `-fcheck-memory-usage'");
1113 return;
1116 if (TREE_CODE (body) == ADDR_EXPR)
1117 body = TREE_OPERAND (body, 0);
1119 emit_insn (gen_rtx_ASM_INPUT (VOIDmode,
1120 TREE_STRING_POINTER (body)));
1121 last_expr_type = 0;
1124 /* Generate RTL for an asm statement with arguments.
1125 STRING is the instruction template.
1126 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
1127 Each output or input has an expression in the TREE_VALUE and
1128 a constraint-string in the TREE_PURPOSE.
1129 CLOBBERS is a list of STRING_CST nodes each naming a hard register
1130 that is clobbered by this insn.
1132 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
1133 Some elements of OUTPUTS may be replaced with trees representing temporary
1134 values. The caller should copy those temporary values to the originally
1135 specified lvalues.
1137 VOL nonzero means the insn is volatile; don't optimize it. */
1139 void
1140 expand_asm_operands (string, outputs, inputs, clobbers, vol, filename, line)
1141 tree string, outputs, inputs, clobbers;
1142 int vol;
1143 char *filename;
1144 int line;
1146 rtvec argvec, constraints;
1147 rtx body;
1148 int ninputs = list_length (inputs);
1149 int noutputs = list_length (outputs);
1150 int ninout = 0;
1151 int nclobbers;
1152 tree tail;
1153 register int i;
1154 /* Vector of RTX's of evaluated output operands. */
1155 rtx *output_rtx = (rtx *) alloca (noutputs * sizeof (rtx));
1156 int *inout_opnum = (int *) alloca (noutputs * sizeof (int));
1157 enum machine_mode *inout_mode
1158 = (enum machine_mode *) alloca (noutputs * sizeof (enum machine_mode));
1159 /* The insn we have emitted. */
1160 rtx insn;
1162 /* An ASM with no outputs needs to be treated as volatile, for now. */
1163 if (noutputs == 0)
1164 vol = 1;
1166 if (flag_check_memory_usage)
1168 error ("`asm' cannot be used with `-fcheck-memory-usage'");
1169 return;
1172 /* Count the number of meaningful clobbered registers, ignoring what
1173 we would ignore later. */
1174 nclobbers = 0;
1175 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1177 char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1178 i = decode_reg_name (regname);
1179 if (i >= 0 || i == -4)
1180 ++nclobbers;
1181 else if (i == -2)
1182 error ("unknown register name `%s' in `asm'", regname);
1185 last_expr_type = 0;
1187 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1189 tree val = TREE_VALUE (tail);
1190 tree type = TREE_TYPE (val);
1191 int j;
1192 int found_equal = 0;
1193 int found_plus = 0;
1194 int allows_reg = 0;
1196 /* If there's an erroneous arg, emit no insn. */
1197 if (TREE_TYPE (val) == error_mark_node)
1198 return;
1200 /* Make sure constraint has `=' and does not have `+'. Also, see
1201 if it allows any register. Be liberal on the latter test, since
1202 the worst that happens if we get it wrong is we issue an error
1203 message. */
1205 for (j = 0; j < TREE_STRING_LENGTH (TREE_PURPOSE (tail)) - 1; j++)
1206 switch (TREE_STRING_POINTER (TREE_PURPOSE (tail))[j])
1208 case '+':
1209 /* Make sure we can specify the matching operand. */
1210 if (i > 9)
1212 error ("output operand constraint %d contains `+'", i);
1213 return;
1216 /* Replace '+' with '='. */
1217 TREE_STRING_POINTER (TREE_PURPOSE (tail))[j] = '=';
1218 found_plus = 1;
1219 break;
1221 case '=':
1222 found_equal = 1;
1223 break;
1225 case '?': case '!': case '*': case '%': case '&':
1226 case 'V': case 'm': case 'o': case '<': case '>':
1227 case 'E': case 'F': case 'G': case 'H': case 'X':
1228 case 's': case 'i': case 'n':
1229 case 'I': case 'J': case 'K': case 'L': case 'M':
1230 case 'N': case 'O': case 'P': case ',':
1231 #ifdef EXTRA_CONSTRAINT
1232 case 'Q': case 'R': case 'S': case 'T': case 'U':
1233 #endif
1234 break;
1236 case '0': case '1': case '2': case '3': case '4':
1237 case '5': case '6': case '7': case '8': case '9':
1238 error ("matching constraint not valid in output operand");
1239 break;
1241 case 'p': case 'g': case 'r':
1242 default:
1243 allows_reg = 1;
1244 break;
1247 if (! found_equal && ! found_plus)
1249 error ("output operand constraint lacks `='");
1250 return;
1253 /* If an output operand is not a decl or indirect ref and our constraint
1254 allows a register, make a temporary to act as an intermediate.
1255 Make the asm insn write into that, then our caller will copy it to
1256 the real output operand. Likewise for promoted variables. */
1258 if (TREE_CODE (val) == INDIRECT_REF
1259 || (TREE_CODE_CLASS (TREE_CODE (val)) == 'd'
1260 && ! (GET_CODE (DECL_RTL (val)) == REG
1261 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
1262 || ! allows_reg
1263 || found_plus)
1265 if (! allows_reg)
1266 mark_addressable (TREE_VALUE (tail));
1268 output_rtx[i]
1269 = expand_expr (TREE_VALUE (tail), NULL_RTX, VOIDmode,
1270 EXPAND_MEMORY_USE_WO);
1272 if (! allows_reg && GET_CODE (output_rtx[i]) != MEM)
1273 error ("output number %d not directly addressable", i);
1275 else
1277 output_rtx[i] = assign_temp (type, 0, 0, 0);
1278 TREE_VALUE (tail) = make_tree (type, output_rtx[i]);
1281 if (found_plus)
1283 inout_mode[ninout] = TYPE_MODE (TREE_TYPE (TREE_VALUE (tail)));
1284 inout_opnum[ninout++] = i;
1288 ninputs += ninout;
1289 if (ninputs + noutputs > MAX_RECOG_OPERANDS)
1291 error ("more than %d operands in `asm'", MAX_RECOG_OPERANDS);
1292 return;
1295 /* Make vectors for the expression-rtx and constraint strings. */
1297 argvec = rtvec_alloc (ninputs);
1298 constraints = rtvec_alloc (ninputs);
1300 body = gen_rtx_ASM_OPERANDS (VOIDmode,
1301 TREE_STRING_POINTER (string), "", 0, argvec,
1302 constraints, filename, line);
1304 MEM_VOLATILE_P (body) = vol;
1306 /* Eval the inputs and put them into ARGVEC.
1307 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
1309 i = 0;
1310 for (tail = inputs; tail; tail = TREE_CHAIN (tail))
1312 int j;
1313 int allows_reg = 0;
1315 /* If there's an erroneous arg, emit no insn,
1316 because the ASM_INPUT would get VOIDmode
1317 and that could cause a crash in reload. */
1318 if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
1319 return;
1320 if (TREE_PURPOSE (tail) == NULL_TREE)
1322 error ("hard register `%s' listed as input operand to `asm'",
1323 TREE_STRING_POINTER (TREE_VALUE (tail)) );
1324 return;
1327 /* Make sure constraint has neither `=' nor `+'. */
1329 for (j = 0; j < TREE_STRING_LENGTH (TREE_PURPOSE (tail)) - 1; j++)
1330 switch (TREE_STRING_POINTER (TREE_PURPOSE (tail))[j])
1332 case '+': case '=':
1333 error ("input operand constraint contains `%c'",
1334 TREE_STRING_POINTER (TREE_PURPOSE (tail))[j]);
1335 return;
1337 case '?': case '!': case '*': case '%': case '&':
1338 case 'V': case 'm': case 'o': case '<': case '>':
1339 case 'E': case 'F': case 'G': case 'H': case 'X':
1340 case 's': case 'i': case 'n':
1341 case 'I': case 'J': case 'K': case 'L': case 'M':
1342 case 'N': case 'O': case 'P': case ',':
1343 #ifdef EXTRA_CONSTRAINT
1344 case 'Q': case 'R': case 'S': case 'T': case 'U':
1345 #endif
1346 break;
1348 /* Whether or not a numeric constraint allows a register is
1349 decided by the matching constraint, and so there is no need
1350 to do anything special with them. We must handle them in
1351 the default case, so that we don't unnecessarily force
1352 operands to memory. */
1353 case '0': case '1': case '2': case '3': case '4':
1354 case '5': case '6': case '7': case '8': case '9':
1355 if (TREE_STRING_POINTER (TREE_PURPOSE (tail))[j]
1356 >= '0' + noutputs)
1358 error
1359 ("matching constraint references invalid operand number");
1360 return;
1363 /* ... fall through ... */
1365 case 'p': case 'g': case 'r':
1366 default:
1367 allows_reg = 1;
1368 break;
1371 if (! allows_reg)
1372 mark_addressable (TREE_VALUE (tail));
1374 XVECEXP (body, 3, i) /* argvec */
1375 = expand_expr (TREE_VALUE (tail), NULL_RTX, VOIDmode, 0);
1376 if (CONSTANT_P (XVECEXP (body, 3, i))
1377 && ! general_operand (XVECEXP (body, 3, i),
1378 TYPE_MODE (TREE_TYPE (TREE_VALUE (tail)))))
1380 if (allows_reg)
1381 XVECEXP (body, 3, i)
1382 = force_reg (TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))),
1383 XVECEXP (body, 3, i));
1384 else
1385 XVECEXP (body, 3, i)
1386 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))),
1387 XVECEXP (body, 3, i));
1390 if (! allows_reg
1391 && (GET_CODE (XVECEXP (body, 3, i)) == REG
1392 || GET_CODE (XVECEXP (body, 3, i)) == SUBREG
1393 || GET_CODE (XVECEXP (body, 3, i)) == CONCAT))
1395 tree type = TREE_TYPE (TREE_VALUE (tail));
1396 rtx memloc = assign_temp (type, 1, 1, 1);
1398 emit_move_insn (memloc, XVECEXP (body, 3, i));
1399 XVECEXP (body, 3, i) = memloc;
1402 XVECEXP (body, 4, i) /* constraints */
1403 = gen_rtx_ASM_INPUT (TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))),
1404 TREE_STRING_POINTER (TREE_PURPOSE (tail)));
1405 i++;
1408 /* Protect all the operands from the queue,
1409 now that they have all been evaluated. */
1411 for (i = 0; i < ninputs - ninout; i++)
1412 XVECEXP (body, 3, i) = protect_from_queue (XVECEXP (body, 3, i), 0);
1414 for (i = 0; i < noutputs; i++)
1415 output_rtx[i] = protect_from_queue (output_rtx[i], 1);
1417 /* For in-out operands, copy output rtx to input rtx. */
1418 for (i = 0; i < ninout; i++)
1420 static char match[9+1][2]
1421 = {"0", "1", "2", "3", "4", "5", "6", "7", "8", "9"};
1422 int j = inout_opnum[i];
1424 XVECEXP (body, 3, ninputs - ninout + i) /* argvec */
1425 = output_rtx[j];
1426 XVECEXP (body, 4, ninputs - ninout + i) /* constraints */
1427 = gen_rtx_ASM_INPUT (inout_mode[j], match[j]);
1430 /* Now, for each output, construct an rtx
1431 (set OUTPUT (asm_operands INSN OUTPUTNUMBER OUTPUTCONSTRAINT
1432 ARGVEC CONSTRAINTS))
1433 If there is more than one, put them inside a PARALLEL. */
1435 if (noutputs == 1 && nclobbers == 0)
1437 XSTR (body, 1) = TREE_STRING_POINTER (TREE_PURPOSE (outputs));
1438 insn = emit_insn (gen_rtx_SET (VOIDmode, output_rtx[0], body));
1440 else if (noutputs == 0 && nclobbers == 0)
1442 /* No output operands: put in a raw ASM_OPERANDS rtx. */
1443 insn = emit_insn (body);
1445 else
1447 rtx obody = body;
1448 int num = noutputs;
1449 if (num == 0) num = 1;
1450 body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
1452 /* For each output operand, store a SET. */
1454 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1456 XVECEXP (body, 0, i)
1457 = gen_rtx_SET (VOIDmode,
1458 output_rtx[i],
1459 gen_rtx_ASM_OPERANDS (VOIDmode,
1460 TREE_STRING_POINTER (string),
1461 TREE_STRING_POINTER (TREE_PURPOSE (tail)),
1462 i, argvec, constraints,
1463 filename, line));
1464 MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
1467 /* If there are no outputs (but there are some clobbers)
1468 store the bare ASM_OPERANDS into the PARALLEL. */
1470 if (i == 0)
1471 XVECEXP (body, 0, i++) = obody;
1473 /* Store (clobber REG) for each clobbered register specified. */
1475 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1477 char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1478 int j = decode_reg_name (regname);
1480 if (j < 0)
1482 if (j == -3) /* `cc', which is not a register */
1483 continue;
1485 if (j == -4) /* `memory', don't cache memory across asm */
1487 XVECEXP (body, 0, i++)
1488 = gen_rtx_CLOBBER (VOIDmode,
1489 gen_rtx_MEM (BLKmode,
1490 gen_rtx_SCRATCH (VOIDmode)));
1491 continue;
1494 /* Ignore unknown register, error already signaled. */
1495 continue;
1498 /* Use QImode since that's guaranteed to clobber just one reg. */
1499 XVECEXP (body, 0, i++)
1500 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (QImode, j));
1503 insn = emit_insn (body);
1506 free_temp_slots ();
1509 /* Generate RTL to evaluate the expression EXP
1510 and remember it in case this is the VALUE in a ({... VALUE; }) constr. */
1512 void
1513 expand_expr_stmt (exp)
1514 tree exp;
1516 /* If -W, warn about statements with no side effects,
1517 except for an explicit cast to void (e.g. for assert()), and
1518 except inside a ({...}) where they may be useful. */
1519 if (expr_stmts_for_value == 0 && exp != error_mark_node)
1521 if (! TREE_SIDE_EFFECTS (exp) && (extra_warnings || warn_unused)
1522 && !(TREE_CODE (exp) == CONVERT_EXPR
1523 && TREE_TYPE (exp) == void_type_node))
1524 warning_with_file_and_line (emit_filename, emit_lineno,
1525 "statement with no effect");
1526 else if (warn_unused)
1527 warn_if_unused_value (exp);
1530 /* If EXP is of function type and we are expanding statements for
1531 value, convert it to pointer-to-function. */
1532 if (expr_stmts_for_value && TREE_CODE (TREE_TYPE (exp)) == FUNCTION_TYPE)
1533 exp = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (exp)), exp);
1535 last_expr_type = TREE_TYPE (exp);
1536 if (flag_syntax_only && ! expr_stmts_for_value)
1537 last_expr_value = 0;
1538 else
1539 last_expr_value = expand_expr (exp,
1540 (expr_stmts_for_value
1541 ? NULL_RTX : const0_rtx),
1542 VOIDmode, 0);
1544 /* If all we do is reference a volatile value in memory,
1545 copy it to a register to be sure it is actually touched. */
1546 if (last_expr_value != 0 && GET_CODE (last_expr_value) == MEM
1547 && TREE_THIS_VOLATILE (exp))
1549 if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode)
1551 else if (TYPE_MODE (TREE_TYPE (exp)) != BLKmode)
1552 copy_to_reg (last_expr_value);
1553 else
1555 rtx lab = gen_label_rtx ();
1557 /* Compare the value with itself to reference it. */
1558 emit_cmp_insn (last_expr_value, last_expr_value, EQ,
1559 expand_expr (TYPE_SIZE (last_expr_type),
1560 NULL_RTX, VOIDmode, 0),
1561 BLKmode, 0,
1562 TYPE_ALIGN (last_expr_type) / BITS_PER_UNIT);
1563 emit_jump_insn ((*bcc_gen_fctn[(int) EQ]) (lab));
1564 emit_label (lab);
1568 /* If this expression is part of a ({...}) and is in memory, we may have
1569 to preserve temporaries. */
1570 preserve_temp_slots (last_expr_value);
1572 /* Free any temporaries used to evaluate this expression. Any temporary
1573 used as a result of this expression will already have been preserved
1574 above. */
1575 free_temp_slots ();
1577 emit_queue ();
1580 /* Warn if EXP contains any computations whose results are not used.
1581 Return 1 if a warning is printed; 0 otherwise. */
1584 warn_if_unused_value (exp)
1585 tree exp;
1587 if (TREE_USED (exp))
1588 return 0;
1590 switch (TREE_CODE (exp))
1592 case PREINCREMENT_EXPR:
1593 case POSTINCREMENT_EXPR:
1594 case PREDECREMENT_EXPR:
1595 case POSTDECREMENT_EXPR:
1596 case MODIFY_EXPR:
1597 case INIT_EXPR:
1598 case TARGET_EXPR:
1599 case CALL_EXPR:
1600 case METHOD_CALL_EXPR:
1601 case RTL_EXPR:
1602 case TRY_CATCH_EXPR:
1603 case WITH_CLEANUP_EXPR:
1604 case EXIT_EXPR:
1605 /* We don't warn about COND_EXPR because it may be a useful
1606 construct if either arm contains a side effect. */
1607 case COND_EXPR:
1608 return 0;
1610 case BIND_EXPR:
1611 /* For a binding, warn if no side effect within it. */
1612 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1614 case SAVE_EXPR:
1615 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1617 case TRUTH_ORIF_EXPR:
1618 case TRUTH_ANDIF_EXPR:
1619 /* In && or ||, warn if 2nd operand has no side effect. */
1620 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1622 case COMPOUND_EXPR:
1623 if (TREE_NO_UNUSED_WARNING (exp))
1624 return 0;
1625 if (warn_if_unused_value (TREE_OPERAND (exp, 0)))
1626 return 1;
1627 /* Let people do `(foo (), 0)' without a warning. */
1628 if (TREE_CONSTANT (TREE_OPERAND (exp, 1)))
1629 return 0;
1630 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1632 case NOP_EXPR:
1633 case CONVERT_EXPR:
1634 case NON_LVALUE_EXPR:
1635 /* Don't warn about values cast to void. */
1636 if (TREE_TYPE (exp) == void_type_node)
1637 return 0;
1638 /* Don't warn about conversions not explicit in the user's program. */
1639 if (TREE_NO_UNUSED_WARNING (exp))
1640 return 0;
1641 /* Assignment to a cast usually results in a cast of a modify.
1642 Don't complain about that. There can be an arbitrary number of
1643 casts before the modify, so we must loop until we find the first
1644 non-cast expression and then test to see if that is a modify. */
1646 tree tem = TREE_OPERAND (exp, 0);
1648 while (TREE_CODE (tem) == CONVERT_EXPR || TREE_CODE (tem) == NOP_EXPR)
1649 tem = TREE_OPERAND (tem, 0);
1651 if (TREE_CODE (tem) == MODIFY_EXPR || TREE_CODE (tem) == INIT_EXPR
1652 || TREE_CODE (tem) == CALL_EXPR)
1653 return 0;
1655 goto warn;
1657 case INDIRECT_REF:
1658 /* Don't warn about automatic dereferencing of references, since
1659 the user cannot control it. */
1660 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == REFERENCE_TYPE)
1661 return warn_if_unused_value (TREE_OPERAND (exp, 0));
1662 /* ... fall through ... */
1664 default:
1665 /* Referencing a volatile value is a side effect, so don't warn. */
1666 if ((TREE_CODE_CLASS (TREE_CODE (exp)) == 'd'
1667 || TREE_CODE_CLASS (TREE_CODE (exp)) == 'r')
1668 && TREE_THIS_VOLATILE (exp))
1669 return 0;
1670 warn:
1671 warning_with_file_and_line (emit_filename, emit_lineno,
1672 "value computed is not used");
1673 return 1;
1677 /* Clear out the memory of the last expression evaluated. */
1679 void
1680 clear_last_expr ()
1682 last_expr_type = 0;
1685 /* Begin a statement which will return a value.
1686 Return the RTL_EXPR for this statement expr.
1687 The caller must save that value and pass it to expand_end_stmt_expr. */
1689 tree
1690 expand_start_stmt_expr ()
1692 int momentary;
1693 tree t;
1695 /* Make the RTL_EXPR node temporary, not momentary,
1696 so that rtl_expr_chain doesn't become garbage. */
1697 momentary = suspend_momentary ();
1698 t = make_node (RTL_EXPR);
1699 resume_momentary (momentary);
1700 do_pending_stack_adjust ();
1701 start_sequence_for_rtl_expr (t);
1702 NO_DEFER_POP;
1703 expr_stmts_for_value++;
1704 return t;
1707 /* Restore the previous state at the end of a statement that returns a value.
1708 Returns a tree node representing the statement's value and the
1709 insns to compute the value.
1711 The nodes of that expression have been freed by now, so we cannot use them.
1712 But we don't want to do that anyway; the expression has already been
1713 evaluated and now we just want to use the value. So generate a RTL_EXPR
1714 with the proper type and RTL value.
1716 If the last substatement was not an expression,
1717 return something with type `void'. */
1719 tree
1720 expand_end_stmt_expr (t)
1721 tree t;
1723 OK_DEFER_POP;
1725 if (last_expr_type == 0)
1727 last_expr_type = void_type_node;
1728 last_expr_value = const0_rtx;
1730 else if (last_expr_value == 0)
1731 /* There are some cases where this can happen, such as when the
1732 statement is void type. */
1733 last_expr_value = const0_rtx;
1734 else if (GET_CODE (last_expr_value) != REG && ! CONSTANT_P (last_expr_value))
1735 /* Remove any possible QUEUED. */
1736 last_expr_value = protect_from_queue (last_expr_value, 0);
1738 emit_queue ();
1740 TREE_TYPE (t) = last_expr_type;
1741 RTL_EXPR_RTL (t) = last_expr_value;
1742 RTL_EXPR_SEQUENCE (t) = get_insns ();
1744 rtl_expr_chain = tree_cons (NULL_TREE, t, rtl_expr_chain);
1746 end_sequence ();
1748 /* Don't consider deleting this expr or containing exprs at tree level. */
1749 TREE_SIDE_EFFECTS (t) = 1;
1750 /* Propagate volatility of the actual RTL expr. */
1751 TREE_THIS_VOLATILE (t) = volatile_refs_p (last_expr_value);
1753 last_expr_type = 0;
1754 expr_stmts_for_value--;
1756 return t;
1759 /* Generate RTL for the start of an if-then. COND is the expression
1760 whose truth should be tested.
1762 If EXITFLAG is nonzero, this conditional is visible to
1763 `exit_something'. */
1765 void
1766 expand_start_cond (cond, exitflag)
1767 tree cond;
1768 int exitflag;
1770 struct nesting *thiscond = ALLOC_NESTING ();
1772 /* Make an entry on cond_stack for the cond we are entering. */
1774 thiscond->next = cond_stack;
1775 thiscond->all = nesting_stack;
1776 thiscond->depth = ++nesting_depth;
1777 thiscond->data.cond.next_label = gen_label_rtx ();
1778 /* Before we encounter an `else', we don't need a separate exit label
1779 unless there are supposed to be exit statements
1780 to exit this conditional. */
1781 thiscond->exit_label = exitflag ? gen_label_rtx () : 0;
1782 thiscond->data.cond.endif_label = thiscond->exit_label;
1783 cond_stack = thiscond;
1784 nesting_stack = thiscond;
1786 do_jump (cond, thiscond->data.cond.next_label, NULL_RTX);
1789 /* Generate RTL between then-clause and the elseif-clause
1790 of an if-then-elseif-.... */
1792 void
1793 expand_start_elseif (cond)
1794 tree cond;
1796 if (cond_stack->data.cond.endif_label == 0)
1797 cond_stack->data.cond.endif_label = gen_label_rtx ();
1798 emit_jump (cond_stack->data.cond.endif_label);
1799 emit_label (cond_stack->data.cond.next_label);
1800 cond_stack->data.cond.next_label = gen_label_rtx ();
1801 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
1804 /* Generate RTL between the then-clause and the else-clause
1805 of an if-then-else. */
1807 void
1808 expand_start_else ()
1810 if (cond_stack->data.cond.endif_label == 0)
1811 cond_stack->data.cond.endif_label = gen_label_rtx ();
1813 emit_jump (cond_stack->data.cond.endif_label);
1814 emit_label (cond_stack->data.cond.next_label);
1815 cond_stack->data.cond.next_label = 0; /* No more _else or _elseif calls. */
1818 /* After calling expand_start_else, turn this "else" into an "else if"
1819 by providing another condition. */
1821 void
1822 expand_elseif (cond)
1823 tree cond;
1825 cond_stack->data.cond.next_label = gen_label_rtx ();
1826 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
1829 /* Generate RTL for the end of an if-then.
1830 Pop the record for it off of cond_stack. */
1832 void
1833 expand_end_cond ()
1835 struct nesting *thiscond = cond_stack;
1837 do_pending_stack_adjust ();
1838 if (thiscond->data.cond.next_label)
1839 emit_label (thiscond->data.cond.next_label);
1840 if (thiscond->data.cond.endif_label)
1841 emit_label (thiscond->data.cond.endif_label);
1843 POPSTACK (cond_stack);
1844 last_expr_type = 0;
1849 /* Generate RTL for the start of a loop. EXIT_FLAG is nonzero if this
1850 loop should be exited by `exit_something'. This is a loop for which
1851 `expand_continue' will jump to the top of the loop.
1853 Make an entry on loop_stack to record the labels associated with
1854 this loop. */
1856 struct nesting *
1857 expand_start_loop (exit_flag)
1858 int exit_flag;
1860 register struct nesting *thisloop = ALLOC_NESTING ();
1862 /* Make an entry on loop_stack for the loop we are entering. */
1864 thisloop->next = loop_stack;
1865 thisloop->all = nesting_stack;
1866 thisloop->depth = ++nesting_depth;
1867 thisloop->data.loop.start_label = gen_label_rtx ();
1868 thisloop->data.loop.end_label = gen_label_rtx ();
1869 thisloop->data.loop.alt_end_label = 0;
1870 thisloop->data.loop.continue_label = thisloop->data.loop.start_label;
1871 thisloop->exit_label = exit_flag ? thisloop->data.loop.end_label : 0;
1872 loop_stack = thisloop;
1873 nesting_stack = thisloop;
1875 do_pending_stack_adjust ();
1876 emit_queue ();
1877 emit_note (NULL_PTR, NOTE_INSN_LOOP_BEG);
1878 emit_label (thisloop->data.loop.start_label);
1880 return thisloop;
1883 /* Like expand_start_loop but for a loop where the continuation point
1884 (for expand_continue_loop) will be specified explicitly. */
1886 struct nesting *
1887 expand_start_loop_continue_elsewhere (exit_flag)
1888 int exit_flag;
1890 struct nesting *thisloop = expand_start_loop (exit_flag);
1891 loop_stack->data.loop.continue_label = gen_label_rtx ();
1892 return thisloop;
1895 /* Specify the continuation point for a loop started with
1896 expand_start_loop_continue_elsewhere.
1897 Use this at the point in the code to which a continue statement
1898 should jump. */
1900 void
1901 expand_loop_continue_here ()
1903 do_pending_stack_adjust ();
1904 emit_note (NULL_PTR, NOTE_INSN_LOOP_CONT);
1905 emit_label (loop_stack->data.loop.continue_label);
1908 /* Finish a loop. Generate a jump back to the top and the loop-exit label.
1909 Pop the block off of loop_stack. */
1911 void
1912 expand_end_loop ()
1914 rtx start_label = loop_stack->data.loop.start_label;
1915 rtx insn = get_last_insn ();
1917 /* Mark the continue-point at the top of the loop if none elsewhere. */
1918 if (start_label == loop_stack->data.loop.continue_label)
1919 emit_note_before (NOTE_INSN_LOOP_CONT, start_label);
1921 do_pending_stack_adjust ();
1923 /* If optimizing, perhaps reorder the loop. If the loop starts with
1924 a loop exit, roll that to the end where it will optimize together
1925 with the jump back.
1927 We look for the conditional branch to the exit, except that once
1928 we find such a branch, we don't look past 30 instructions.
1930 In more detail, if the loop presently looks like this (in pseudo-C):
1932 start_label:
1933 if (test) goto end_label;
1934 body;
1935 goto start_label;
1936 end_label:
1938 transform it to look like:
1940 goto start_label;
1941 newstart_label:
1942 body;
1943 start_label:
1944 if (test) goto end_label;
1945 goto newstart_label;
1946 end_label:
1948 Here, the `test' may actually consist of some reasonably complex
1949 code, terminating in a test. */
1951 if (optimize
1953 ! (GET_CODE (insn) == JUMP_INSN
1954 && GET_CODE (PATTERN (insn)) == SET
1955 && SET_DEST (PATTERN (insn)) == pc_rtx
1956 && GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE))
1958 int eh_regions = 0;
1959 int num_insns = 0;
1960 rtx last_test_insn = NULL_RTX;
1962 /* Scan insns from the top of the loop looking for a qualified
1963 conditional exit. */
1964 for (insn = NEXT_INSN (loop_stack->data.loop.start_label); insn;
1965 insn = NEXT_INSN (insn))
1967 if (GET_CODE (insn) == NOTE)
1969 if (optimize < 2
1970 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
1971 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
1972 /* The code that actually moves the exit test will
1973 carefully leave BLOCK notes in their original
1974 location. That means, however, that we can't debug
1975 the exit test itself. So, we refuse to move code
1976 containing BLOCK notes at low optimization levels. */
1977 break;
1979 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
1980 ++eh_regions;
1981 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END)
1983 --eh_regions;
1984 if (eh_regions < 0)
1985 /* We've come to the end of an EH region, but
1986 never saw the beginning of that region. That
1987 means that an EH region begins before the top
1988 of the loop, and ends in the middle of it. The
1989 existence of such a situation violates a basic
1990 assumption in this code, since that would imply
1991 that even when EH_REGIONS is zero, we might
1992 move code out of an exception region. */
1993 abort ();
1996 /* We already know this INSN is a NOTE, so there's no
1997 point in looking at it to see if it's a JUMP. */
1998 continue;
2001 if (GET_CODE (insn) == JUMP_INSN || GET_CODE (insn) == INSN)
2002 num_insns++;
2004 if (last_test_insn && num_insns > 30)
2005 break;
2007 if (eh_regions > 0)
2008 /* We don't want to move a partial EH region. Consider:
2010 while ( ( { try {
2011 if (cond ()) 0;
2012 else {
2013 bar();
2016 } catch (...) {
2018 } )) {
2019 body;
2022 This isn't legal C++, but here's what it's supposed to
2023 mean: if cond() is true, stop looping. Otherwise,
2024 call bar, and keep looping. In addition, if cond
2025 throws an exception, catch it and keep looping. Such
2026 constructs are certainy legal in LISP.
2028 We should not move the `if (cond()) 0' test since then
2029 the EH-region for the try-block would be broken up.
2030 (In this case we would the EH_BEG note for the `try'
2031 and `if cond()' but not the call to bar() or the
2032 EH_END note.)
2034 So we don't look for tests within an EH region. */
2035 continue;
2037 if (GET_CODE (insn) == JUMP_INSN
2038 && GET_CODE (PATTERN (insn)) == SET
2039 && SET_DEST (PATTERN (insn)) == pc_rtx)
2041 /* This is indeed a jump. */
2042 rtx dest1 = NULL_RTX;
2043 rtx dest2 = NULL_RTX;
2044 rtx potential_last_test;
2045 if (GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE)
2047 /* A conditional jump. */
2048 dest1 = XEXP (SET_SRC (PATTERN (insn)), 1);
2049 dest2 = XEXP (SET_SRC (PATTERN (insn)), 2);
2050 potential_last_test = insn;
2052 else
2054 /* An unconditional jump. */
2055 dest1 = SET_SRC (PATTERN (insn));
2056 /* Include the BARRIER after the JUMP. */
2057 potential_last_test = NEXT_INSN (insn);
2060 do {
2061 if (dest1 && GET_CODE (dest1) == LABEL_REF
2062 && ((XEXP (dest1, 0)
2063 == loop_stack->data.loop.alt_end_label)
2064 || (XEXP (dest1, 0)
2065 == loop_stack->data.loop.end_label)))
2067 last_test_insn = potential_last_test;
2068 break;
2071 /* If this was a conditional jump, there may be
2072 another label at which we should look. */
2073 dest1 = dest2;
2074 dest2 = NULL_RTX;
2075 } while (dest1);
2079 if (last_test_insn != 0 && last_test_insn != get_last_insn ())
2081 /* We found one. Move everything from there up
2082 to the end of the loop, and add a jump into the loop
2083 to jump to there. */
2084 register rtx newstart_label = gen_label_rtx ();
2085 register rtx start_move = start_label;
2086 rtx next_insn;
2088 /* If the start label is preceded by a NOTE_INSN_LOOP_CONT note,
2089 then we want to move this note also. */
2090 if (GET_CODE (PREV_INSN (start_move)) == NOTE
2091 && (NOTE_LINE_NUMBER (PREV_INSN (start_move))
2092 == NOTE_INSN_LOOP_CONT))
2093 start_move = PREV_INSN (start_move);
2095 emit_label_after (newstart_label, PREV_INSN (start_move));
2097 /* Actually move the insns. Start at the beginning, and
2098 keep copying insns until we've copied the
2099 last_test_insn. */
2100 for (insn = start_move; insn; insn = next_insn)
2102 /* Figure out which insn comes after this one. We have
2103 to do this before we move INSN. */
2104 if (insn == last_test_insn)
2105 /* We've moved all the insns. */
2106 next_insn = NULL_RTX;
2107 else
2108 next_insn = NEXT_INSN (insn);
2110 if (GET_CODE (insn) == NOTE
2111 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2112 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
2113 /* We don't want to move NOTE_INSN_BLOCK_BEGs or
2114 NOTE_INSN_BLOCK_ENDs because the correct generation
2115 of debugging information depends on these appearing
2116 in the same order in the RTL and in the tree
2117 structure, where they are represented as BLOCKs.
2118 So, we don't move block notes. Of course, moving
2119 the code inside the block is likely to make it
2120 impossible to debug the instructions in the exit
2121 test, but such is the price of optimization. */
2122 continue;
2124 /* Move the INSN. */
2125 reorder_insns (insn, insn, get_last_insn ());
2128 emit_jump_insn_after (gen_jump (start_label),
2129 PREV_INSN (newstart_label));
2130 emit_barrier_after (PREV_INSN (newstart_label));
2131 start_label = newstart_label;
2135 emit_jump (start_label);
2136 emit_note (NULL_PTR, NOTE_INSN_LOOP_END);
2137 emit_label (loop_stack->data.loop.end_label);
2139 POPSTACK (loop_stack);
2141 last_expr_type = 0;
2144 /* Generate a jump to the current loop's continue-point.
2145 This is usually the top of the loop, but may be specified
2146 explicitly elsewhere. If not currently inside a loop,
2147 return 0 and do nothing; caller will print an error message. */
2150 expand_continue_loop (whichloop)
2151 struct nesting *whichloop;
2153 last_expr_type = 0;
2154 if (whichloop == 0)
2155 whichloop = loop_stack;
2156 if (whichloop == 0)
2157 return 0;
2158 expand_goto_internal (NULL_TREE, whichloop->data.loop.continue_label,
2159 NULL_RTX);
2160 return 1;
2163 /* Generate a jump to exit the current loop. If not currently inside a loop,
2164 return 0 and do nothing; caller will print an error message. */
2167 expand_exit_loop (whichloop)
2168 struct nesting *whichloop;
2170 last_expr_type = 0;
2171 if (whichloop == 0)
2172 whichloop = loop_stack;
2173 if (whichloop == 0)
2174 return 0;
2175 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label, NULL_RTX);
2176 return 1;
2179 /* Generate a conditional jump to exit the current loop if COND
2180 evaluates to zero. If not currently inside a loop,
2181 return 0 and do nothing; caller will print an error message. */
2184 expand_exit_loop_if_false (whichloop, cond)
2185 struct nesting *whichloop;
2186 tree cond;
2188 rtx label = gen_label_rtx ();
2189 rtx last_insn;
2190 last_expr_type = 0;
2192 if (whichloop == 0)
2193 whichloop = loop_stack;
2194 if (whichloop == 0)
2195 return 0;
2196 /* In order to handle fixups, we actually create a conditional jump
2197 around a unconditional branch to exit the loop. If fixups are
2198 necessary, they go before the unconditional branch. */
2201 do_jump (cond, NULL_RTX, label);
2202 last_insn = get_last_insn ();
2203 if (GET_CODE (last_insn) == CODE_LABEL)
2204 whichloop->data.loop.alt_end_label = last_insn;
2205 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label,
2206 NULL_RTX);
2207 emit_label (label);
2209 return 1;
2212 /* Return non-zero if we should preserve sub-expressions as separate
2213 pseudos. We never do so if we aren't optimizing. We always do so
2214 if -fexpensive-optimizations.
2216 Otherwise, we only do so if we are in the "early" part of a loop. I.e.,
2217 the loop may still be a small one. */
2220 preserve_subexpressions_p ()
2222 rtx insn;
2224 if (flag_expensive_optimizations)
2225 return 1;
2227 if (optimize == 0 || loop_stack == 0)
2228 return 0;
2230 insn = get_last_insn_anywhere ();
2232 return (insn
2233 && (INSN_UID (insn) - INSN_UID (loop_stack->data.loop.start_label)
2234 < n_non_fixed_regs * 3));
2238 /* Generate a jump to exit the current loop, conditional, binding contour
2239 or case statement. Not all such constructs are visible to this function,
2240 only those started with EXIT_FLAG nonzero. Individual languages use
2241 the EXIT_FLAG parameter to control which kinds of constructs you can
2242 exit this way.
2244 If not currently inside anything that can be exited,
2245 return 0 and do nothing; caller will print an error message. */
2248 expand_exit_something ()
2250 struct nesting *n;
2251 last_expr_type = 0;
2252 for (n = nesting_stack; n; n = n->all)
2253 if (n->exit_label != 0)
2255 expand_goto_internal (NULL_TREE, n->exit_label, NULL_RTX);
2256 return 1;
2259 return 0;
2262 /* Generate RTL to return from the current function, with no value.
2263 (That is, we do not do anything about returning any value.) */
2265 void
2266 expand_null_return ()
2268 struct nesting *block = block_stack;
2269 rtx last_insn = 0;
2271 /* Does any pending block have cleanups? */
2273 while (block && block->data.block.cleanups == 0)
2274 block = block->next;
2276 /* If yes, use a goto to return, since that runs cleanups. */
2278 expand_null_return_1 (last_insn, block != 0);
2281 /* Generate RTL to return from the current function, with value VAL. */
2283 static void
2284 expand_value_return (val)
2285 rtx val;
2287 struct nesting *block = block_stack;
2288 rtx last_insn = get_last_insn ();
2289 rtx return_reg = DECL_RTL (DECL_RESULT (current_function_decl));
2291 /* Copy the value to the return location
2292 unless it's already there. */
2294 if (return_reg != val)
2296 #ifdef PROMOTE_FUNCTION_RETURN
2297 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
2298 int unsignedp = TREE_UNSIGNED (type);
2299 enum machine_mode mode
2300 = promote_mode (type, DECL_MODE (DECL_RESULT (current_function_decl)),
2301 &unsignedp, 1);
2303 if (GET_MODE (val) != VOIDmode && GET_MODE (val) != mode)
2304 convert_move (return_reg, val, unsignedp);
2305 else
2306 #endif
2307 emit_move_insn (return_reg, val);
2309 if (GET_CODE (return_reg) == REG
2310 && REGNO (return_reg) < FIRST_PSEUDO_REGISTER)
2311 emit_insn (gen_rtx_USE (VOIDmode, return_reg));
2312 /* Handle calls that return values in multiple non-contiguous locations.
2313 The Irix 6 ABI has examples of this. */
2314 else if (GET_CODE (return_reg) == PARALLEL)
2316 int i;
2318 for (i = 0; i < XVECLEN (return_reg, 0); i++)
2320 rtx x = XEXP (XVECEXP (return_reg, 0, i), 0);
2322 if (GET_CODE (x) == REG
2323 && REGNO (x) < FIRST_PSEUDO_REGISTER)
2324 emit_insn (gen_rtx_USE (VOIDmode, x));
2328 /* Does any pending block have cleanups? */
2330 while (block && block->data.block.cleanups == 0)
2331 block = block->next;
2333 /* If yes, use a goto to return, since that runs cleanups.
2334 Use LAST_INSN to put cleanups *before* the move insn emitted above. */
2336 expand_null_return_1 (last_insn, block != 0);
2339 /* Output a return with no value. If LAST_INSN is nonzero,
2340 pretend that the return takes place after LAST_INSN.
2341 If USE_GOTO is nonzero then don't use a return instruction;
2342 go to the return label instead. This causes any cleanups
2343 of pending blocks to be executed normally. */
2345 static void
2346 expand_null_return_1 (last_insn, use_goto)
2347 rtx last_insn;
2348 int use_goto;
2350 rtx end_label = cleanup_label ? cleanup_label : return_label;
2352 clear_pending_stack_adjust ();
2353 do_pending_stack_adjust ();
2354 last_expr_type = 0;
2356 /* PCC-struct return always uses an epilogue. */
2357 if (current_function_returns_pcc_struct || use_goto)
2359 if (end_label == 0)
2360 end_label = return_label = gen_label_rtx ();
2361 expand_goto_internal (NULL_TREE, end_label, last_insn);
2362 return;
2365 /* Otherwise output a simple return-insn if one is available,
2366 unless it won't do the job. */
2367 #ifdef HAVE_return
2368 if (HAVE_return && use_goto == 0 && cleanup_label == 0)
2370 emit_jump_insn (gen_return ());
2371 emit_barrier ();
2372 return;
2374 #endif
2376 /* Otherwise jump to the epilogue. */
2377 expand_goto_internal (NULL_TREE, end_label, last_insn);
2380 /* Generate RTL to evaluate the expression RETVAL and return it
2381 from the current function. */
2383 void
2384 expand_return (retval)
2385 tree retval;
2387 /* If there are any cleanups to be performed, then they will
2388 be inserted following LAST_INSN. It is desirable
2389 that the last_insn, for such purposes, should be the
2390 last insn before computing the return value. Otherwise, cleanups
2391 which call functions can clobber the return value. */
2392 /* ??? rms: I think that is erroneous, because in C++ it would
2393 run destructors on variables that might be used in the subsequent
2394 computation of the return value. */
2395 rtx last_insn = 0;
2396 register rtx val = 0;
2397 register rtx op0;
2398 tree retval_rhs;
2399 int cleanups;
2401 /* If function wants no value, give it none. */
2402 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
2404 expand_expr (retval, NULL_RTX, VOIDmode, 0);
2405 emit_queue ();
2406 expand_null_return ();
2407 return;
2410 /* Are any cleanups needed? E.g. C++ destructors to be run? */
2411 /* This is not sufficient. We also need to watch for cleanups of the
2412 expression we are about to expand. Unfortunately, we cannot know
2413 if it has cleanups until we expand it, and we want to change how we
2414 expand it depending upon if we need cleanups. We can't win. */
2415 #if 0
2416 cleanups = any_pending_cleanups (1);
2417 #else
2418 cleanups = 1;
2419 #endif
2421 if (TREE_CODE (retval) == RESULT_DECL)
2422 retval_rhs = retval;
2423 else if ((TREE_CODE (retval) == MODIFY_EXPR || TREE_CODE (retval) == INIT_EXPR)
2424 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
2425 retval_rhs = TREE_OPERAND (retval, 1);
2426 else if (TREE_TYPE (retval) == void_type_node)
2427 /* Recognize tail-recursive call to void function. */
2428 retval_rhs = retval;
2429 else
2430 retval_rhs = NULL_TREE;
2432 /* Only use `last_insn' if there are cleanups which must be run. */
2433 if (cleanups || cleanup_label != 0)
2434 last_insn = get_last_insn ();
2436 /* Distribute return down conditional expr if either of the sides
2437 may involve tail recursion (see test below). This enhances the number
2438 of tail recursions we see. Don't do this always since it can produce
2439 sub-optimal code in some cases and we distribute assignments into
2440 conditional expressions when it would help. */
2442 if (optimize && retval_rhs != 0
2443 && frame_offset == 0
2444 && TREE_CODE (retval_rhs) == COND_EXPR
2445 && (TREE_CODE (TREE_OPERAND (retval_rhs, 1)) == CALL_EXPR
2446 || TREE_CODE (TREE_OPERAND (retval_rhs, 2)) == CALL_EXPR))
2448 rtx label = gen_label_rtx ();
2449 tree expr;
2451 do_jump (TREE_OPERAND (retval_rhs, 0), label, NULL_RTX);
2452 expr = build (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (current_function_decl)),
2453 DECL_RESULT (current_function_decl),
2454 TREE_OPERAND (retval_rhs, 1));
2455 TREE_SIDE_EFFECTS (expr) = 1;
2456 expand_return (expr);
2457 emit_label (label);
2459 expr = build (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (current_function_decl)),
2460 DECL_RESULT (current_function_decl),
2461 TREE_OPERAND (retval_rhs, 2));
2462 TREE_SIDE_EFFECTS (expr) = 1;
2463 expand_return (expr);
2464 return;
2467 /* For tail-recursive call to current function,
2468 just jump back to the beginning.
2469 It's unsafe if any auto variable in this function
2470 has its address taken; for simplicity,
2471 require stack frame to be empty. */
2472 if (optimize && retval_rhs != 0
2473 && frame_offset == 0
2474 && TREE_CODE (retval_rhs) == CALL_EXPR
2475 && TREE_CODE (TREE_OPERAND (retval_rhs, 0)) == ADDR_EXPR
2476 && TREE_OPERAND (TREE_OPERAND (retval_rhs, 0), 0) == current_function_decl
2477 /* Finish checking validity, and if valid emit code
2478 to set the argument variables for the new call. */
2479 && tail_recursion_args (TREE_OPERAND (retval_rhs, 1),
2480 DECL_ARGUMENTS (current_function_decl)))
2482 if (tail_recursion_label == 0)
2484 tail_recursion_label = gen_label_rtx ();
2485 emit_label_after (tail_recursion_label,
2486 tail_recursion_reentry);
2488 emit_queue ();
2489 expand_goto_internal (NULL_TREE, tail_recursion_label, last_insn);
2490 emit_barrier ();
2491 return;
2493 #ifdef HAVE_return
2494 /* This optimization is safe if there are local cleanups
2495 because expand_null_return takes care of them.
2496 ??? I think it should also be safe when there is a cleanup label,
2497 because expand_null_return takes care of them, too.
2498 Any reason why not? */
2499 if (HAVE_return && cleanup_label == 0
2500 && ! current_function_returns_pcc_struct
2501 && BRANCH_COST <= 1)
2503 /* If this is return x == y; then generate
2504 if (x == y) return 1; else return 0;
2505 if we can do it with explicit return insns and branches are cheap,
2506 but not if we have the corresponding scc insn. */
2507 int has_scc = 0;
2508 if (retval_rhs)
2509 switch (TREE_CODE (retval_rhs))
2511 case EQ_EXPR:
2512 #ifdef HAVE_seq
2513 has_scc = HAVE_seq;
2514 #endif
2515 case NE_EXPR:
2516 #ifdef HAVE_sne
2517 has_scc = HAVE_sne;
2518 #endif
2519 case GT_EXPR:
2520 #ifdef HAVE_sgt
2521 has_scc = HAVE_sgt;
2522 #endif
2523 case GE_EXPR:
2524 #ifdef HAVE_sge
2525 has_scc = HAVE_sge;
2526 #endif
2527 case LT_EXPR:
2528 #ifdef HAVE_slt
2529 has_scc = HAVE_slt;
2530 #endif
2531 case LE_EXPR:
2532 #ifdef HAVE_sle
2533 has_scc = HAVE_sle;
2534 #endif
2535 case TRUTH_ANDIF_EXPR:
2536 case TRUTH_ORIF_EXPR:
2537 case TRUTH_AND_EXPR:
2538 case TRUTH_OR_EXPR:
2539 case TRUTH_NOT_EXPR:
2540 case TRUTH_XOR_EXPR:
2541 if (! has_scc)
2543 op0 = gen_label_rtx ();
2544 jumpifnot (retval_rhs, op0);
2545 expand_value_return (const1_rtx);
2546 emit_label (op0);
2547 expand_value_return (const0_rtx);
2548 return;
2550 break;
2552 default:
2553 break;
2556 #endif /* HAVE_return */
2558 /* If the result is an aggregate that is being returned in one (or more)
2559 registers, load the registers here. The compiler currently can't handle
2560 copying a BLKmode value into registers. We could put this code in a
2561 more general area (for use by everyone instead of just function
2562 call/return), but until this feature is generally usable it is kept here
2563 (and in expand_call). The value must go into a pseudo in case there
2564 are cleanups that will clobber the real return register. */
2566 if (retval_rhs != 0
2567 && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
2568 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG)
2570 int i, bitpos, xbitpos;
2571 int big_endian_correction = 0;
2572 int bytes = int_size_in_bytes (TREE_TYPE (retval_rhs));
2573 int n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
2574 int bitsize = MIN (TYPE_ALIGN (TREE_TYPE (retval_rhs)),BITS_PER_WORD);
2575 rtx *result_pseudos = (rtx *) alloca (sizeof (rtx) * n_regs);
2576 rtx result_reg, src = NULL_RTX, dst = NULL_RTX;
2577 rtx result_val = expand_expr (retval_rhs, NULL_RTX, VOIDmode, 0);
2578 enum machine_mode tmpmode, result_reg_mode;
2580 /* Structures whose size is not a multiple of a word are aligned
2581 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2582 machine, this means we must skip the empty high order bytes when
2583 calculating the bit offset. */
2584 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2585 big_endian_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2586 * BITS_PER_UNIT));
2588 /* Copy the structure BITSIZE bits at a time. */
2589 for (bitpos = 0, xbitpos = big_endian_correction;
2590 bitpos < bytes * BITS_PER_UNIT;
2591 bitpos += bitsize, xbitpos += bitsize)
2593 /* We need a new destination pseudo each time xbitpos is
2594 on a word boundary and when xbitpos == big_endian_correction
2595 (the first time through). */
2596 if (xbitpos % BITS_PER_WORD == 0
2597 || xbitpos == big_endian_correction)
2599 /* Generate an appropriate register. */
2600 dst = gen_reg_rtx (word_mode);
2601 result_pseudos[xbitpos / BITS_PER_WORD] = dst;
2603 /* Clobber the destination before we move anything into it. */
2604 emit_insn (gen_rtx_CLOBBER (VOIDmode, dst));
2607 /* We need a new source operand each time bitpos is on a word
2608 boundary. */
2609 if (bitpos % BITS_PER_WORD == 0)
2610 src = operand_subword_force (result_val,
2611 bitpos / BITS_PER_WORD,
2612 BLKmode);
2614 /* Use bitpos for the source extraction (left justified) and
2615 xbitpos for the destination store (right justified). */
2616 store_bit_field (dst, bitsize, xbitpos % BITS_PER_WORD, word_mode,
2617 extract_bit_field (src, bitsize,
2618 bitpos % BITS_PER_WORD, 1,
2619 NULL_RTX, word_mode,
2620 word_mode,
2621 bitsize / BITS_PER_UNIT,
2622 BITS_PER_WORD),
2623 bitsize / BITS_PER_UNIT, BITS_PER_WORD);
2626 /* Find the smallest integer mode large enough to hold the
2627 entire structure and use that mode instead of BLKmode
2628 on the USE insn for the return register. */
2629 bytes = int_size_in_bytes (TREE_TYPE (retval_rhs));
2630 for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2631 tmpmode != MAX_MACHINE_MODE;
2632 tmpmode = GET_MODE_WIDER_MODE (tmpmode))
2634 /* Have we found a large enough mode? */
2635 if (GET_MODE_SIZE (tmpmode) >= bytes)
2636 break;
2639 /* No suitable mode found. */
2640 if (tmpmode == MAX_MACHINE_MODE)
2641 abort ();
2643 PUT_MODE (DECL_RTL (DECL_RESULT (current_function_decl)), tmpmode);
2645 if (GET_MODE_SIZE (tmpmode) < GET_MODE_SIZE (word_mode))
2646 result_reg_mode = word_mode;
2647 else
2648 result_reg_mode = tmpmode;
2649 result_reg = gen_reg_rtx (result_reg_mode);
2651 emit_queue ();
2652 for (i = 0; i < n_regs; i++)
2653 emit_move_insn (operand_subword (result_reg, i, 0, result_reg_mode),
2654 result_pseudos[i]);
2656 if (tmpmode != result_reg_mode)
2657 result_reg = gen_lowpart (tmpmode, result_reg);
2659 expand_value_return (result_reg);
2661 else if (cleanups
2662 && retval_rhs != 0
2663 && TREE_TYPE (retval_rhs) != void_type_node
2664 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG)
2666 /* Calculate the return value into a pseudo reg. */
2667 val = gen_reg_rtx (DECL_MODE (DECL_RESULT (current_function_decl)));
2668 val = expand_expr (retval_rhs, val, GET_MODE (val), 0);
2669 val = force_not_mem (val);
2670 emit_queue ();
2671 /* Return the calculated value, doing cleanups first. */
2672 expand_value_return (val);
2674 else
2676 /* No cleanups or no hard reg used;
2677 calculate value into hard return reg. */
2678 expand_expr (retval, const0_rtx, VOIDmode, 0);
2679 emit_queue ();
2680 expand_value_return (DECL_RTL (DECL_RESULT (current_function_decl)));
2684 /* Return 1 if the end of the generated RTX is not a barrier.
2685 This means code already compiled can drop through. */
2688 drop_through_at_end_p ()
2690 rtx insn = get_last_insn ();
2691 while (insn && GET_CODE (insn) == NOTE)
2692 insn = PREV_INSN (insn);
2693 return insn && GET_CODE (insn) != BARRIER;
2696 /* Emit code to alter this function's formal parms for a tail-recursive call.
2697 ACTUALS is a list of actual parameter expressions (chain of TREE_LISTs).
2698 FORMALS is the chain of decls of formals.
2699 Return 1 if this can be done;
2700 otherwise return 0 and do not emit any code. */
2702 static int
2703 tail_recursion_args (actuals, formals)
2704 tree actuals, formals;
2706 register tree a = actuals, f = formals;
2707 register int i;
2708 register rtx *argvec;
2710 /* Check that number and types of actuals are compatible
2711 with the formals. This is not always true in valid C code.
2712 Also check that no formal needs to be addressable
2713 and that all formals are scalars. */
2715 /* Also count the args. */
2717 for (a = actuals, f = formals, i = 0; a && f; a = TREE_CHAIN (a), f = TREE_CHAIN (f), i++)
2719 if (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_VALUE (a)))
2720 != TYPE_MAIN_VARIANT (TREE_TYPE (f)))
2721 return 0;
2722 if (GET_CODE (DECL_RTL (f)) != REG || DECL_MODE (f) == BLKmode)
2723 return 0;
2725 if (a != 0 || f != 0)
2726 return 0;
2728 /* Compute all the actuals. */
2730 argvec = (rtx *) alloca (i * sizeof (rtx));
2732 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
2733 argvec[i] = expand_expr (TREE_VALUE (a), NULL_RTX, VOIDmode, 0);
2735 /* Find which actual values refer to current values of previous formals.
2736 Copy each of them now, before any formal is changed. */
2738 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
2740 int copy = 0;
2741 register int j;
2742 for (f = formals, j = 0; j < i; f = TREE_CHAIN (f), j++)
2743 if (reg_mentioned_p (DECL_RTL (f), argvec[i]))
2744 { copy = 1; break; }
2745 if (copy)
2746 argvec[i] = copy_to_reg (argvec[i]);
2749 /* Store the values of the actuals into the formals. */
2751 for (f = formals, a = actuals, i = 0; f;
2752 f = TREE_CHAIN (f), a = TREE_CHAIN (a), i++)
2754 if (GET_MODE (DECL_RTL (f)) == GET_MODE (argvec[i]))
2755 emit_move_insn (DECL_RTL (f), argvec[i]);
2756 else
2757 convert_move (DECL_RTL (f), argvec[i],
2758 TREE_UNSIGNED (TREE_TYPE (TREE_VALUE (a))));
2761 free_temp_slots ();
2762 return 1;
2765 /* Generate the RTL code for entering a binding contour.
2766 The variables are declared one by one, by calls to `expand_decl'.
2768 EXIT_FLAG is nonzero if this construct should be visible to
2769 `exit_something'. */
2771 void
2772 expand_start_bindings (exit_flag)
2773 int exit_flag;
2775 struct nesting *thisblock = ALLOC_NESTING ();
2776 rtx note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_BEG);
2778 /* Make an entry on block_stack for the block we are entering. */
2780 thisblock->next = block_stack;
2781 thisblock->all = nesting_stack;
2782 thisblock->depth = ++nesting_depth;
2783 thisblock->data.block.stack_level = 0;
2784 thisblock->data.block.cleanups = 0;
2785 thisblock->data.block.function_call_count = 0;
2786 thisblock->data.block.exception_region = 0;
2787 thisblock->data.block.target_temp_slot_level = target_temp_slot_level;
2789 thisblock->data.block.conditional_code = 0;
2790 thisblock->data.block.last_unconditional_cleanup = note;
2791 thisblock->data.block.cleanup_ptr = &thisblock->data.block.cleanups;
2793 if (block_stack
2794 && !(block_stack->data.block.cleanups == NULL_TREE
2795 && block_stack->data.block.outer_cleanups == NULL_TREE))
2796 thisblock->data.block.outer_cleanups
2797 = tree_cons (NULL_TREE, block_stack->data.block.cleanups,
2798 block_stack->data.block.outer_cleanups);
2799 else
2800 thisblock->data.block.outer_cleanups = 0;
2801 thisblock->data.block.label_chain = 0;
2802 thisblock->data.block.innermost_stack_block = stack_block_stack;
2803 thisblock->data.block.first_insn = note;
2804 thisblock->data.block.block_start_count = ++block_start_count;
2805 thisblock->exit_label = exit_flag ? gen_label_rtx () : 0;
2806 block_stack = thisblock;
2807 nesting_stack = thisblock;
2809 /* Make a new level for allocating stack slots. */
2810 push_temp_slots ();
2813 /* Specify the scope of temporaries created by TARGET_EXPRs. Similar
2814 to CLEANUP_POINT_EXPR, but handles cases when a series of calls to
2815 expand_expr are made. After we end the region, we know that all
2816 space for all temporaries that were created by TARGET_EXPRs will be
2817 destroyed and their space freed for reuse. */
2819 void
2820 expand_start_target_temps ()
2822 /* This is so that even if the result is preserved, the space
2823 allocated will be freed, as we know that it is no longer in use. */
2824 push_temp_slots ();
2826 /* Start a new binding layer that will keep track of all cleanup
2827 actions to be performed. */
2828 expand_start_bindings (0);
2830 target_temp_slot_level = temp_slot_level;
2833 void
2834 expand_end_target_temps ()
2836 expand_end_bindings (NULL_TREE, 0, 0);
2838 /* This is so that even if the result is preserved, the space
2839 allocated will be freed, as we know that it is no longer in use. */
2840 pop_temp_slots ();
2843 /* Mark top block of block_stack as an implicit binding for an
2844 exception region. This is used to prevent infinite recursion when
2845 ending a binding with expand_end_bindings. It is only ever called
2846 by expand_eh_region_start, as that it the only way to create a
2847 block stack for a exception region. */
2849 void
2850 mark_block_as_eh_region ()
2852 block_stack->data.block.exception_region = 1;
2853 if (block_stack->next
2854 && block_stack->next->data.block.conditional_code)
2856 block_stack->data.block.conditional_code
2857 = block_stack->next->data.block.conditional_code;
2858 block_stack->data.block.last_unconditional_cleanup
2859 = block_stack->next->data.block.last_unconditional_cleanup;
2860 block_stack->data.block.cleanup_ptr
2861 = block_stack->next->data.block.cleanup_ptr;
2865 /* True if we are currently emitting insns in an area of output code
2866 that is controlled by a conditional expression. This is used by
2867 the cleanup handling code to generate conditional cleanup actions. */
2870 conditional_context ()
2872 return block_stack && block_stack->data.block.conditional_code;
2875 /* Mark top block of block_stack as not for an implicit binding for an
2876 exception region. This is only ever done by expand_eh_region_end
2877 to let expand_end_bindings know that it is being called explicitly
2878 to end the binding layer for just the binding layer associated with
2879 the exception region, otherwise expand_end_bindings would try and
2880 end all implicit binding layers for exceptions regions, and then
2881 one normal binding layer. */
2883 void
2884 mark_block_as_not_eh_region ()
2886 block_stack->data.block.exception_region = 0;
2889 /* True if the top block of block_stack was marked as for an exception
2890 region by mark_block_as_eh_region. */
2893 is_eh_region ()
2895 return block_stack && block_stack->data.block.exception_region;
2898 /* Given a pointer to a BLOCK node, save a pointer to the most recently
2899 generated NOTE_INSN_BLOCK_END in the BLOCK_END_NOTE field of the given
2900 BLOCK node. */
2902 void
2903 remember_end_note (block)
2904 register tree block;
2906 BLOCK_END_NOTE (block) = last_block_end_note;
2907 last_block_end_note = NULL_RTX;
2910 /* Generate RTL code to terminate a binding contour.
2911 VARS is the chain of VAR_DECL nodes
2912 for the variables bound in this contour.
2913 MARK_ENDS is nonzero if we should put a note at the beginning
2914 and end of this binding contour.
2916 DONT_JUMP_IN is nonzero if it is not valid to jump into this contour.
2917 (That is true automatically if the contour has a saved stack level.) */
2919 void
2920 expand_end_bindings (vars, mark_ends, dont_jump_in)
2921 tree vars;
2922 int mark_ends;
2923 int dont_jump_in;
2925 register struct nesting *thisblock;
2926 register tree decl;
2928 while (block_stack->data.block.exception_region)
2930 /* Because we don't need or want a new temporary level and
2931 because we didn't create one in expand_eh_region_start,
2932 create a fake one now to avoid removing one in
2933 expand_end_bindings. */
2934 push_temp_slots ();
2936 block_stack->data.block.exception_region = 0;
2938 expand_end_bindings (NULL_TREE, 0, 0);
2941 /* Since expand_eh_region_start does an expand_start_bindings, we
2942 have to first end all the bindings that were created by
2943 expand_eh_region_start. */
2945 thisblock = block_stack;
2947 if (warn_unused)
2948 for (decl = vars; decl; decl = TREE_CHAIN (decl))
2949 if (! TREE_USED (decl) && TREE_CODE (decl) == VAR_DECL
2950 && ! DECL_IN_SYSTEM_HEADER (decl)
2951 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
2952 warning_with_decl (decl, "unused variable `%s'");
2954 if (thisblock->exit_label)
2956 do_pending_stack_adjust ();
2957 emit_label (thisblock->exit_label);
2960 /* If necessary, make a handler for nonlocal gotos taking
2961 place in the function calls in this block. */
2962 if (function_call_count != thisblock->data.block.function_call_count
2963 && nonlocal_labels
2964 /* Make handler for outermost block
2965 if there were any nonlocal gotos to this function. */
2966 && (thisblock->next == 0 ? current_function_has_nonlocal_label
2967 /* Make handler for inner block if it has something
2968 special to do when you jump out of it. */
2969 : (thisblock->data.block.cleanups != 0
2970 || thisblock->data.block.stack_level != 0)))
2972 tree link;
2973 rtx afterward = gen_label_rtx ();
2974 rtx handler_label = gen_label_rtx ();
2975 rtx save_receiver = gen_reg_rtx (Pmode);
2976 rtx insns;
2978 /* Don't let jump_optimize delete the handler. */
2979 LABEL_PRESERVE_P (handler_label) = 1;
2981 /* Record the handler address in the stack slot for that purpose,
2982 during this block, saving and restoring the outer value. */
2983 if (thisblock->next != 0)
2985 emit_move_insn (nonlocal_goto_handler_slot, save_receiver);
2987 start_sequence ();
2988 emit_move_insn (save_receiver, nonlocal_goto_handler_slot);
2989 insns = get_insns ();
2990 end_sequence ();
2991 emit_insns_before (insns, thisblock->data.block.first_insn);
2994 start_sequence ();
2995 emit_move_insn (nonlocal_goto_handler_slot,
2996 gen_rtx_LABEL_REF (Pmode, handler_label));
2997 insns = get_insns ();
2998 end_sequence ();
2999 emit_insns_before (insns, thisblock->data.block.first_insn);
3001 /* Jump around the handler; it runs only when specially invoked. */
3002 emit_jump (afterward);
3003 emit_label (handler_label);
3005 #ifdef HAVE_nonlocal_goto
3006 if (! HAVE_nonlocal_goto)
3007 #endif
3008 /* First adjust our frame pointer to its actual value. It was
3009 previously set to the start of the virtual area corresponding to
3010 the stacked variables when we branched here and now needs to be
3011 adjusted to the actual hardware fp value.
3013 Assignments are to virtual registers are converted by
3014 instantiate_virtual_regs into the corresponding assignment
3015 to the underlying register (fp in this case) that makes
3016 the original assignment true.
3017 So the following insn will actually be
3018 decrementing fp by STARTING_FRAME_OFFSET. */
3019 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
3021 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3022 if (fixed_regs[ARG_POINTER_REGNUM])
3024 #ifdef ELIMINABLE_REGS
3025 /* If the argument pointer can be eliminated in favor of the
3026 frame pointer, we don't need to restore it. We assume here
3027 that if such an elimination is present, it can always be used.
3028 This is the case on all known machines; if we don't make this
3029 assumption, we do unnecessary saving on many machines. */
3030 static struct elims {int from, to;} elim_regs[] = ELIMINABLE_REGS;
3031 size_t i;
3033 for (i = 0; i < sizeof elim_regs / sizeof elim_regs[0]; i++)
3034 if (elim_regs[i].from == ARG_POINTER_REGNUM
3035 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
3036 break;
3038 if (i == sizeof elim_regs / sizeof elim_regs [0])
3039 #endif
3041 /* Now restore our arg pointer from the address at which it
3042 was saved in our stack frame.
3043 If there hasn't be space allocated for it yet, make
3044 some now. */
3045 if (arg_pointer_save_area == 0)
3046 arg_pointer_save_area
3047 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
3048 emit_move_insn (virtual_incoming_args_rtx,
3049 /* We need a pseudo here, or else
3050 instantiate_virtual_regs_1 complains. */
3051 copy_to_reg (arg_pointer_save_area));
3054 #endif
3056 #ifdef HAVE_nonlocal_goto_receiver
3057 if (HAVE_nonlocal_goto_receiver)
3058 emit_insn (gen_nonlocal_goto_receiver ());
3059 #endif
3061 /* The handler expects the desired label address in the static chain
3062 register. It tests the address and does an appropriate jump
3063 to whatever label is desired. */
3064 for (link = nonlocal_labels; link; link = TREE_CHAIN (link))
3065 /* Skip any labels we shouldn't be able to jump to from here. */
3066 if (! DECL_TOO_LATE (TREE_VALUE (link)))
3068 rtx not_this = gen_label_rtx ();
3069 rtx this = gen_label_rtx ();
3070 do_jump_if_equal (static_chain_rtx,
3071 gen_rtx_LABEL_REF (Pmode, DECL_RTL (TREE_VALUE (link))),
3072 this, 0);
3073 emit_jump (not_this);
3074 emit_label (this);
3075 expand_goto (TREE_VALUE (link));
3076 emit_label (not_this);
3078 /* If label is not recognized, abort. */
3079 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "abort"), 0,
3080 VOIDmode, 0);
3081 emit_barrier ();
3082 emit_label (afterward);
3085 /* Don't allow jumping into a block that has a stack level.
3086 Cleanups are allowed, though. */
3087 if (dont_jump_in
3088 || thisblock->data.block.stack_level != 0)
3090 struct label_chain *chain;
3092 /* Any labels in this block are no longer valid to go to.
3093 Mark them to cause an error message. */
3094 for (chain = thisblock->data.block.label_chain; chain; chain = chain->next)
3096 DECL_TOO_LATE (chain->label) = 1;
3097 /* If any goto without a fixup came to this label,
3098 that must be an error, because gotos without fixups
3099 come from outside all saved stack-levels. */
3100 if (TREE_ADDRESSABLE (chain->label))
3101 error_with_decl (chain->label,
3102 "label `%s' used before containing binding contour");
3106 /* Restore stack level in effect before the block
3107 (only if variable-size objects allocated). */
3108 /* Perform any cleanups associated with the block. */
3110 if (thisblock->data.block.stack_level != 0
3111 || thisblock->data.block.cleanups != 0)
3113 /* Only clean up here if this point can actually be reached. */
3114 int reachable = GET_CODE (get_last_insn ()) != BARRIER;
3116 /* Don't let cleanups affect ({...}) constructs. */
3117 int old_expr_stmts_for_value = expr_stmts_for_value;
3118 rtx old_last_expr_value = last_expr_value;
3119 tree old_last_expr_type = last_expr_type;
3120 expr_stmts_for_value = 0;
3122 /* Do the cleanups. */
3123 expand_cleanups (thisblock->data.block.cleanups, NULL_TREE, 0, reachable);
3124 if (reachable)
3125 do_pending_stack_adjust ();
3127 expr_stmts_for_value = old_expr_stmts_for_value;
3128 last_expr_value = old_last_expr_value;
3129 last_expr_type = old_last_expr_type;
3131 /* Restore the stack level. */
3133 if (reachable && thisblock->data.block.stack_level != 0)
3135 emit_stack_restore (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3136 thisblock->data.block.stack_level, NULL_RTX);
3137 if (nonlocal_goto_handler_slot != 0)
3138 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level,
3139 NULL_RTX);
3142 /* Any gotos out of this block must also do these things.
3143 Also report any gotos with fixups that came to labels in this
3144 level. */
3145 fixup_gotos (thisblock,
3146 thisblock->data.block.stack_level,
3147 thisblock->data.block.cleanups,
3148 thisblock->data.block.first_insn,
3149 dont_jump_in);
3152 /* Mark the beginning and end of the scope if requested.
3153 We do this now, after running cleanups on the variables
3154 just going out of scope, so they are in scope for their cleanups. */
3156 if (mark_ends)
3157 last_block_end_note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_END);
3158 else
3159 /* Get rid of the beginning-mark if we don't make an end-mark. */
3160 NOTE_LINE_NUMBER (thisblock->data.block.first_insn) = NOTE_INSN_DELETED;
3162 /* If doing stupid register allocation, make sure lives of all
3163 register variables declared here extend thru end of scope. */
3165 if (obey_regdecls)
3166 for (decl = vars; decl; decl = TREE_CHAIN (decl))
3168 rtx rtl = DECL_RTL (decl);
3169 if (TREE_CODE (decl) == VAR_DECL && rtl != 0)
3170 use_variable (rtl);
3173 /* Restore the temporary level of TARGET_EXPRs. */
3174 target_temp_slot_level = thisblock->data.block.target_temp_slot_level;
3176 /* Restore block_stack level for containing block. */
3178 stack_block_stack = thisblock->data.block.innermost_stack_block;
3179 POPSTACK (block_stack);
3181 /* Pop the stack slot nesting and free any slots at this level. */
3182 pop_temp_slots ();
3187 /* Generate RTL for the automatic variable declaration DECL.
3188 (Other kinds of declarations are simply ignored if seen here.) */
3190 void
3191 expand_decl (decl)
3192 register tree decl;
3194 struct nesting *thisblock = block_stack;
3195 tree type;
3197 type = TREE_TYPE (decl);
3199 /* Only automatic variables need any expansion done.
3200 Static and external variables, and external functions,
3201 will be handled by `assemble_variable' (called from finish_decl).
3202 TYPE_DECL and CONST_DECL require nothing.
3203 PARM_DECLs are handled in `assign_parms'. */
3205 if (TREE_CODE (decl) != VAR_DECL)
3206 return;
3207 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
3208 return;
3210 /* Create the RTL representation for the variable. */
3212 if (type == error_mark_node)
3213 DECL_RTL (decl) = gen_rtx_MEM (BLKmode, const0_rtx);
3214 else if (DECL_SIZE (decl) == 0)
3215 /* Variable with incomplete type. */
3217 if (DECL_INITIAL (decl) == 0)
3218 /* Error message was already done; now avoid a crash. */
3219 DECL_RTL (decl) = assign_stack_temp (DECL_MODE (decl), 0, 1);
3220 else
3221 /* An initializer is going to decide the size of this array.
3222 Until we know the size, represent its address with a reg. */
3223 DECL_RTL (decl) = gen_rtx_MEM (BLKmode, gen_reg_rtx (Pmode));
3224 MEM_IN_STRUCT_P (DECL_RTL (decl)) = AGGREGATE_TYPE_P (type);
3226 else if (DECL_MODE (decl) != BLKmode
3227 /* If -ffloat-store, don't put explicit float vars
3228 into regs. */
3229 && !(flag_float_store
3230 && TREE_CODE (type) == REAL_TYPE)
3231 && ! TREE_THIS_VOLATILE (decl)
3232 && ! TREE_ADDRESSABLE (decl)
3233 && (DECL_REGISTER (decl) || ! obey_regdecls)
3234 /* if -fcheck-memory-usage, check all variables. */
3235 && ! flag_check_memory_usage)
3237 /* Automatic variable that can go in a register. */
3238 int unsignedp = TREE_UNSIGNED (type);
3239 enum machine_mode reg_mode
3240 = promote_mode (type, DECL_MODE (decl), &unsignedp, 0);
3242 DECL_RTL (decl) = gen_reg_rtx (reg_mode);
3243 mark_user_reg (DECL_RTL (decl));
3245 if (POINTER_TYPE_P (type))
3246 mark_reg_pointer (DECL_RTL (decl),
3247 (TYPE_ALIGN (TREE_TYPE (TREE_TYPE (decl)))
3248 / BITS_PER_UNIT));
3251 else if (TREE_CODE (DECL_SIZE (decl)) == INTEGER_CST
3252 && ! (flag_stack_check && ! STACK_CHECK_BUILTIN
3253 && (TREE_INT_CST_HIGH (DECL_SIZE (decl)) != 0
3254 || (TREE_INT_CST_LOW (DECL_SIZE (decl))
3255 > STACK_CHECK_MAX_VAR_SIZE * BITS_PER_UNIT))))
3257 /* Variable of fixed size that goes on the stack. */
3258 rtx oldaddr = 0;
3259 rtx addr;
3261 /* If we previously made RTL for this decl, it must be an array
3262 whose size was determined by the initializer.
3263 The old address was a register; set that register now
3264 to the proper address. */
3265 if (DECL_RTL (decl) != 0)
3267 if (GET_CODE (DECL_RTL (decl)) != MEM
3268 || GET_CODE (XEXP (DECL_RTL (decl), 0)) != REG)
3269 abort ();
3270 oldaddr = XEXP (DECL_RTL (decl), 0);
3273 DECL_RTL (decl)
3274 = assign_stack_temp (DECL_MODE (decl),
3275 ((TREE_INT_CST_LOW (DECL_SIZE (decl))
3276 + BITS_PER_UNIT - 1)
3277 / BITS_PER_UNIT),
3279 MEM_IN_STRUCT_P (DECL_RTL (decl)) = AGGREGATE_TYPE_P (TREE_TYPE (decl));
3281 /* Set alignment we actually gave this decl. */
3282 DECL_ALIGN (decl) = (DECL_MODE (decl) == BLKmode ? BIGGEST_ALIGNMENT
3283 : GET_MODE_BITSIZE (DECL_MODE (decl)));
3285 if (oldaddr)
3287 addr = force_operand (XEXP (DECL_RTL (decl), 0), oldaddr);
3288 if (addr != oldaddr)
3289 emit_move_insn (oldaddr, addr);
3292 /* If this is a memory ref that contains aggregate components,
3293 mark it as such for cse and loop optimize. */
3294 MEM_IN_STRUCT_P (DECL_RTL (decl)) = AGGREGATE_TYPE_P (TREE_TYPE (decl));
3295 #if 0
3296 /* If this is in memory because of -ffloat-store,
3297 set the volatile bit, to prevent optimizations from
3298 undoing the effects. */
3299 if (flag_float_store && TREE_CODE (type) == REAL_TYPE)
3300 MEM_VOLATILE_P (DECL_RTL (decl)) = 1;
3301 #endif
3303 MEM_ALIAS_SET (DECL_RTL (decl)) = get_alias_set (decl);
3305 else
3306 /* Dynamic-size object: must push space on the stack. */
3308 rtx address, size;
3310 /* Record the stack pointer on entry to block, if have
3311 not already done so. */
3312 if (thisblock->data.block.stack_level == 0)
3314 do_pending_stack_adjust ();
3315 emit_stack_save (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3316 &thisblock->data.block.stack_level,
3317 thisblock->data.block.first_insn);
3318 stack_block_stack = thisblock;
3321 /* Compute the variable's size, in bytes. */
3322 size = expand_expr (size_binop (CEIL_DIV_EXPR,
3323 DECL_SIZE (decl),
3324 size_int (BITS_PER_UNIT)),
3325 NULL_RTX, VOIDmode, 0);
3326 free_temp_slots ();
3328 /* Allocate space on the stack for the variable. Note that
3329 DECL_ALIGN says how the variable is to be aligned and we
3330 cannot use it to conclude anything about the alignment of
3331 the size. */
3332 address = allocate_dynamic_stack_space (size, NULL_RTX,
3333 TYPE_ALIGN (TREE_TYPE (decl)));
3335 /* Reference the variable indirect through that rtx. */
3336 DECL_RTL (decl) = gen_rtx_MEM (DECL_MODE (decl), address);
3338 /* If this is a memory ref that contains aggregate components,
3339 mark it as such for cse and loop optimize. */
3340 MEM_IN_STRUCT_P (DECL_RTL (decl)) = AGGREGATE_TYPE_P (TREE_TYPE (decl));
3342 /* Indicate the alignment we actually gave this variable. */
3343 #ifdef STACK_BOUNDARY
3344 DECL_ALIGN (decl) = STACK_BOUNDARY;
3345 #else
3346 DECL_ALIGN (decl) = BIGGEST_ALIGNMENT;
3347 #endif
3350 if (TREE_THIS_VOLATILE (decl))
3351 MEM_VOLATILE_P (DECL_RTL (decl)) = 1;
3352 #if 0 /* A variable is not necessarily unchanging
3353 just because it is const. RTX_UNCHANGING_P
3354 means no change in the function,
3355 not merely no change in the variable's scope.
3356 It is correct to set RTX_UNCHANGING_P if the variable's scope
3357 is the whole function. There's no convenient way to test that. */
3358 if (TREE_READONLY (decl))
3359 RTX_UNCHANGING_P (DECL_RTL (decl)) = 1;
3360 #endif
3362 /* If doing stupid register allocation, make sure life of any
3363 register variable starts here, at the start of its scope. */
3365 if (obey_regdecls)
3366 use_variable (DECL_RTL (decl));
3371 /* Emit code to perform the initialization of a declaration DECL. */
3373 void
3374 expand_decl_init (decl)
3375 tree decl;
3377 int was_used = TREE_USED (decl);
3379 /* If this is a CONST_DECL, we don't have to generate any code, but
3380 if DECL_INITIAL is a constant, call expand_expr to force TREE_CST_RTL
3381 to be set while in the obstack containing the constant. If we don't
3382 do this, we can lose if we have functions nested three deep and the middle
3383 function makes a CONST_DECL whose DECL_INITIAL is a STRING_CST while
3384 the innermost function is the first to expand that STRING_CST. */
3385 if (TREE_CODE (decl) == CONST_DECL)
3387 if (DECL_INITIAL (decl) && TREE_CONSTANT (DECL_INITIAL (decl)))
3388 expand_expr (DECL_INITIAL (decl), NULL_RTX, VOIDmode,
3389 EXPAND_INITIALIZER);
3390 return;
3393 if (TREE_STATIC (decl))
3394 return;
3396 /* Compute and store the initial value now. */
3398 if (DECL_INITIAL (decl) == error_mark_node)
3400 enum tree_code code = TREE_CODE (TREE_TYPE (decl));
3402 if (code == INTEGER_TYPE || code == REAL_TYPE || code == ENUMERAL_TYPE
3403 || code == POINTER_TYPE || code == REFERENCE_TYPE)
3404 expand_assignment (decl, convert (TREE_TYPE (decl), integer_zero_node),
3405 0, 0);
3406 emit_queue ();
3408 else if (DECL_INITIAL (decl) && TREE_CODE (DECL_INITIAL (decl)) != TREE_LIST)
3410 emit_line_note (DECL_SOURCE_FILE (decl), DECL_SOURCE_LINE (decl));
3411 expand_assignment (decl, DECL_INITIAL (decl), 0, 0);
3412 emit_queue ();
3415 /* Don't let the initialization count as "using" the variable. */
3416 TREE_USED (decl) = was_used;
3418 /* Free any temporaries we made while initializing the decl. */
3419 preserve_temp_slots (NULL_RTX);
3420 free_temp_slots ();
3423 /* CLEANUP is an expression to be executed at exit from this binding contour;
3424 for example, in C++, it might call the destructor for this variable.
3426 We wrap CLEANUP in an UNSAVE_EXPR node, so that we can expand the
3427 CLEANUP multiple times, and have the correct semantics. This
3428 happens in exception handling, for gotos, returns, breaks that
3429 leave the current scope.
3431 If CLEANUP is nonzero and DECL is zero, we record a cleanup
3432 that is not associated with any particular variable. */
3435 expand_decl_cleanup (decl, cleanup)
3436 tree decl, cleanup;
3438 struct nesting *thisblock = block_stack;
3440 /* Error if we are not in any block. */
3441 if (thisblock == 0)
3442 return 0;
3444 /* Record the cleanup if there is one. */
3446 if (cleanup != 0)
3448 tree t;
3449 rtx seq;
3450 tree *cleanups = &thisblock->data.block.cleanups;
3451 int cond_context = conditional_context ();
3453 if (cond_context)
3455 rtx flag = gen_reg_rtx (word_mode);
3456 rtx set_flag_0;
3457 tree cond;
3459 start_sequence ();
3460 emit_move_insn (flag, const0_rtx);
3461 set_flag_0 = get_insns ();
3462 end_sequence ();
3464 thisblock->data.block.last_unconditional_cleanup
3465 = emit_insns_after (set_flag_0,
3466 thisblock->data.block.last_unconditional_cleanup);
3468 emit_move_insn (flag, const1_rtx);
3470 /* All cleanups must be on the function_obstack. */
3471 push_obstacks_nochange ();
3472 resume_temporary_allocation ();
3474 cond = build_decl (VAR_DECL, NULL_TREE, type_for_mode (word_mode, 1));
3475 DECL_RTL (cond) = flag;
3477 /* Conditionalize the cleanup. */
3478 cleanup = build (COND_EXPR, void_type_node,
3479 truthvalue_conversion (cond),
3480 cleanup, integer_zero_node);
3481 cleanup = fold (cleanup);
3483 pop_obstacks ();
3485 cleanups = thisblock->data.block.cleanup_ptr;
3488 /* All cleanups must be on the function_obstack. */
3489 push_obstacks_nochange ();
3490 resume_temporary_allocation ();
3491 cleanup = unsave_expr (cleanup);
3492 pop_obstacks ();
3494 t = *cleanups = temp_tree_cons (decl, cleanup, *cleanups);
3496 if (! cond_context)
3497 /* If this block has a cleanup, it belongs in stack_block_stack. */
3498 stack_block_stack = thisblock;
3500 if (cond_context)
3502 start_sequence ();
3505 /* If this was optimized so that there is no exception region for the
3506 cleanup, then mark the TREE_LIST node, so that we can later tell
3507 if we need to call expand_eh_region_end. */
3508 if (! using_eh_for_cleanups_p
3509 || expand_eh_region_start_tree (decl, cleanup))
3510 TREE_ADDRESSABLE (t) = 1;
3511 /* If that started a new EH region, we're in a new block. */
3512 thisblock = block_stack;
3514 if (cond_context)
3516 seq = get_insns ();
3517 end_sequence ();
3518 if (seq)
3519 thisblock->data.block.last_unconditional_cleanup
3520 = emit_insns_after (seq,
3521 thisblock->data.block.last_unconditional_cleanup);
3523 else
3525 thisblock->data.block.last_unconditional_cleanup
3526 = get_last_insn ();
3527 thisblock->data.block.cleanup_ptr = &thisblock->data.block.cleanups;
3530 return 1;
3533 /* Like expand_decl_cleanup, but suppress generating an exception handler
3534 to perform the cleanup. */
3537 expand_decl_cleanup_no_eh (decl, cleanup)
3538 tree decl, cleanup;
3540 int save_eh = using_eh_for_cleanups_p;
3541 int result;
3543 using_eh_for_cleanups_p = 0;
3544 result = expand_decl_cleanup (decl, cleanup);
3545 using_eh_for_cleanups_p = save_eh;
3547 return result;
3550 /* Arrange for the top element of the dynamic cleanup chain to be
3551 popped if we exit the current binding contour. DECL is the
3552 associated declaration, if any, otherwise NULL_TREE. If the
3553 current contour is left via an exception, then __sjthrow will pop
3554 the top element off the dynamic cleanup chain. The code that
3555 avoids doing the action we push into the cleanup chain in the
3556 exceptional case is contained in expand_cleanups.
3558 This routine is only used by expand_eh_region_start, and that is
3559 the only way in which an exception region should be started. This
3560 routine is only used when using the setjmp/longjmp codegen method
3561 for exception handling. */
3564 expand_dcc_cleanup (decl)
3565 tree decl;
3567 struct nesting *thisblock = block_stack;
3568 tree cleanup;
3570 /* Error if we are not in any block. */
3571 if (thisblock == 0)
3572 return 0;
3574 /* Record the cleanup for the dynamic handler chain. */
3576 /* All cleanups must be on the function_obstack. */
3577 push_obstacks_nochange ();
3578 resume_temporary_allocation ();
3579 cleanup = make_node (POPDCC_EXPR);
3580 pop_obstacks ();
3582 /* Add the cleanup in a manner similar to expand_decl_cleanup. */
3583 thisblock->data.block.cleanups
3584 = temp_tree_cons (decl, cleanup, thisblock->data.block.cleanups);
3586 /* If this block has a cleanup, it belongs in stack_block_stack. */
3587 stack_block_stack = thisblock;
3588 return 1;
3591 /* Arrange for the top element of the dynamic handler chain to be
3592 popped if we exit the current binding contour. DECL is the
3593 associated declaration, if any, otherwise NULL_TREE. If the current
3594 contour is left via an exception, then __sjthrow will pop the top
3595 element off the dynamic handler chain. The code that avoids doing
3596 the action we push into the handler chain in the exceptional case
3597 is contained in expand_cleanups.
3599 This routine is only used by expand_eh_region_start, and that is
3600 the only way in which an exception region should be started. This
3601 routine is only used when using the setjmp/longjmp codegen method
3602 for exception handling. */
3605 expand_dhc_cleanup (decl)
3606 tree decl;
3608 struct nesting *thisblock = block_stack;
3609 tree cleanup;
3611 /* Error if we are not in any block. */
3612 if (thisblock == 0)
3613 return 0;
3615 /* Record the cleanup for the dynamic handler chain. */
3617 /* All cleanups must be on the function_obstack. */
3618 push_obstacks_nochange ();
3619 resume_temporary_allocation ();
3620 cleanup = make_node (POPDHC_EXPR);
3621 pop_obstacks ();
3623 /* Add the cleanup in a manner similar to expand_decl_cleanup. */
3624 thisblock->data.block.cleanups
3625 = temp_tree_cons (decl, cleanup, thisblock->data.block.cleanups);
3627 /* If this block has a cleanup, it belongs in stack_block_stack. */
3628 stack_block_stack = thisblock;
3629 return 1;
3632 /* DECL is an anonymous union. CLEANUP is a cleanup for DECL.
3633 DECL_ELTS is the list of elements that belong to DECL's type.
3634 In each, the TREE_VALUE is a VAR_DECL, and the TREE_PURPOSE a cleanup. */
3636 void
3637 expand_anon_union_decl (decl, cleanup, decl_elts)
3638 tree decl, cleanup, decl_elts;
3640 struct nesting *thisblock = block_stack;
3641 rtx x;
3643 expand_decl (decl);
3644 expand_decl_cleanup (decl, cleanup);
3645 x = DECL_RTL (decl);
3647 while (decl_elts)
3649 tree decl_elt = TREE_VALUE (decl_elts);
3650 tree cleanup_elt = TREE_PURPOSE (decl_elts);
3651 enum machine_mode mode = TYPE_MODE (TREE_TYPE (decl_elt));
3653 /* Propagate the union's alignment to the elements. */
3654 DECL_ALIGN (decl_elt) = DECL_ALIGN (decl);
3656 /* If the element has BLKmode and the union doesn't, the union is
3657 aligned such that the element doesn't need to have BLKmode, so
3658 change the element's mode to the appropriate one for its size. */
3659 if (mode == BLKmode && DECL_MODE (decl) != BLKmode)
3660 DECL_MODE (decl_elt) = mode
3661 = mode_for_size (TREE_INT_CST_LOW (DECL_SIZE (decl_elt)),
3662 MODE_INT, 1);
3664 /* (SUBREG (MEM ...)) at RTL generation time is invalid, so we
3665 instead create a new MEM rtx with the proper mode. */
3666 if (GET_CODE (x) == MEM)
3668 if (mode == GET_MODE (x))
3669 DECL_RTL (decl_elt) = x;
3670 else
3672 DECL_RTL (decl_elt) = gen_rtx_MEM (mode, copy_rtx (XEXP (x, 0)));
3673 MEM_IN_STRUCT_P (DECL_RTL (decl_elt)) = MEM_IN_STRUCT_P (x);
3674 RTX_UNCHANGING_P (DECL_RTL (decl_elt)) = RTX_UNCHANGING_P (x);
3677 else if (GET_CODE (x) == REG)
3679 if (mode == GET_MODE (x))
3680 DECL_RTL (decl_elt) = x;
3681 else
3682 DECL_RTL (decl_elt) = gen_rtx_SUBREG (mode, x, 0);
3684 else
3685 abort ();
3687 /* Record the cleanup if there is one. */
3689 if (cleanup != 0)
3690 thisblock->data.block.cleanups
3691 = temp_tree_cons (decl_elt, cleanup_elt,
3692 thisblock->data.block.cleanups);
3694 decl_elts = TREE_CHAIN (decl_elts);
3698 /* Expand a list of cleanups LIST.
3699 Elements may be expressions or may be nested lists.
3701 If DONT_DO is nonnull, then any list-element
3702 whose TREE_PURPOSE matches DONT_DO is omitted.
3703 This is sometimes used to avoid a cleanup associated with
3704 a value that is being returned out of the scope.
3706 If IN_FIXUP is non-zero, we are generating this cleanup for a fixup
3707 goto and handle protection regions specially in that case.
3709 If REACHABLE, we emit code, otherwise just inform the exception handling
3710 code about this finalization. */
3712 static void
3713 expand_cleanups (list, dont_do, in_fixup, reachable)
3714 tree list;
3715 tree dont_do;
3716 int in_fixup;
3717 int reachable;
3719 tree tail;
3720 for (tail = list; tail; tail = TREE_CHAIN (tail))
3721 if (dont_do == 0 || TREE_PURPOSE (tail) != dont_do)
3723 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3724 expand_cleanups (TREE_VALUE (tail), dont_do, in_fixup, reachable);
3725 else
3727 if (! in_fixup)
3729 tree cleanup = TREE_VALUE (tail);
3731 /* See expand_d{h,c}c_cleanup for why we avoid this. */
3732 if (TREE_CODE (cleanup) != POPDHC_EXPR
3733 && TREE_CODE (cleanup) != POPDCC_EXPR
3734 /* See expand_eh_region_start_tree for this case. */
3735 && ! TREE_ADDRESSABLE (tail))
3737 cleanup = protect_with_terminate (cleanup);
3738 expand_eh_region_end (cleanup);
3742 if (reachable)
3744 /* Cleanups may be run multiple times. For example,
3745 when exiting a binding contour, we expand the
3746 cleanups associated with that contour. When a goto
3747 within that binding contour has a target outside that
3748 contour, it will expand all cleanups from its scope to
3749 the target. Though the cleanups are expanded multiple
3750 times, the control paths are non-overlapping so the
3751 cleanups will not be executed twice. */
3753 /* We may need to protect fixups with rethrow regions. */
3754 int protect = (in_fixup && ! TREE_ADDRESSABLE (tail));
3756 if (protect)
3757 expand_fixup_region_start ();
3759 expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
3760 if (protect)
3761 expand_fixup_region_end (TREE_VALUE (tail));
3762 free_temp_slots ();
3768 /* Mark when the context we are emitting RTL for as a conditional
3769 context, so that any cleanup actions we register with
3770 expand_decl_init will be properly conditionalized when those
3771 cleanup actions are later performed. Must be called before any
3772 expression (tree) is expanded that is within a conditional context. */
3774 void
3775 start_cleanup_deferral ()
3777 /* block_stack can be NULL if we are inside the parameter list. It is
3778 OK to do nothing, because cleanups aren't possible here. */
3779 if (block_stack)
3780 ++block_stack->data.block.conditional_code;
3783 /* Mark the end of a conditional region of code. Because cleanup
3784 deferrals may be nested, we may still be in a conditional region
3785 after we end the currently deferred cleanups, only after we end all
3786 deferred cleanups, are we back in unconditional code. */
3788 void
3789 end_cleanup_deferral ()
3791 /* block_stack can be NULL if we are inside the parameter list. It is
3792 OK to do nothing, because cleanups aren't possible here. */
3793 if (block_stack)
3794 --block_stack->data.block.conditional_code;
3797 /* Move all cleanups from the current block_stack
3798 to the containing block_stack, where they are assumed to
3799 have been created. If anything can cause a temporary to
3800 be created, but not expanded for more than one level of
3801 block_stacks, then this code will have to change. */
3803 void
3804 move_cleanups_up ()
3806 struct nesting *block = block_stack;
3807 struct nesting *outer = block->next;
3809 outer->data.block.cleanups
3810 = chainon (block->data.block.cleanups,
3811 outer->data.block.cleanups);
3812 block->data.block.cleanups = 0;
3815 tree
3816 last_cleanup_this_contour ()
3818 if (block_stack == 0)
3819 return 0;
3821 return block_stack->data.block.cleanups;
3824 /* Return 1 if there are any pending cleanups at this point.
3825 If THIS_CONTOUR is nonzero, check the current contour as well.
3826 Otherwise, look only at the contours that enclose this one. */
3829 any_pending_cleanups (this_contour)
3830 int this_contour;
3832 struct nesting *block;
3834 if (block_stack == 0)
3835 return 0;
3837 if (this_contour && block_stack->data.block.cleanups != NULL)
3838 return 1;
3839 if (block_stack->data.block.cleanups == 0
3840 && block_stack->data.block.outer_cleanups == 0)
3841 return 0;
3843 for (block = block_stack->next; block; block = block->next)
3844 if (block->data.block.cleanups != 0)
3845 return 1;
3847 return 0;
3850 /* Enter a case (Pascal) or switch (C) statement.
3851 Push a block onto case_stack and nesting_stack
3852 to accumulate the case-labels that are seen
3853 and to record the labels generated for the statement.
3855 EXIT_FLAG is nonzero if `exit_something' should exit this case stmt.
3856 Otherwise, this construct is transparent for `exit_something'.
3858 EXPR is the index-expression to be dispatched on.
3859 TYPE is its nominal type. We could simply convert EXPR to this type,
3860 but instead we take short cuts. */
3862 void
3863 expand_start_case (exit_flag, expr, type, printname)
3864 int exit_flag;
3865 tree expr;
3866 tree type;
3867 char *printname;
3869 register struct nesting *thiscase = ALLOC_NESTING ();
3871 /* Make an entry on case_stack for the case we are entering. */
3873 thiscase->next = case_stack;
3874 thiscase->all = nesting_stack;
3875 thiscase->depth = ++nesting_depth;
3876 thiscase->exit_label = exit_flag ? gen_label_rtx () : 0;
3877 thiscase->data.case_stmt.case_list = 0;
3878 thiscase->data.case_stmt.index_expr = expr;
3879 thiscase->data.case_stmt.nominal_type = type;
3880 thiscase->data.case_stmt.default_label = 0;
3881 thiscase->data.case_stmt.num_ranges = 0;
3882 thiscase->data.case_stmt.printname = printname;
3883 thiscase->data.case_stmt.line_number_status = force_line_numbers ();
3884 case_stack = thiscase;
3885 nesting_stack = thiscase;
3887 do_pending_stack_adjust ();
3889 /* Make sure case_stmt.start points to something that won't
3890 need any transformation before expand_end_case. */
3891 if (GET_CODE (get_last_insn ()) != NOTE)
3892 emit_note (NULL_PTR, NOTE_INSN_DELETED);
3894 thiscase->data.case_stmt.start = get_last_insn ();
3896 start_cleanup_deferral ();
3900 /* Start a "dummy case statement" within which case labels are invalid
3901 and are not connected to any larger real case statement.
3902 This can be used if you don't want to let a case statement jump
3903 into the middle of certain kinds of constructs. */
3905 void
3906 expand_start_case_dummy ()
3908 register struct nesting *thiscase = ALLOC_NESTING ();
3910 /* Make an entry on case_stack for the dummy. */
3912 thiscase->next = case_stack;
3913 thiscase->all = nesting_stack;
3914 thiscase->depth = ++nesting_depth;
3915 thiscase->exit_label = 0;
3916 thiscase->data.case_stmt.case_list = 0;
3917 thiscase->data.case_stmt.start = 0;
3918 thiscase->data.case_stmt.nominal_type = 0;
3919 thiscase->data.case_stmt.default_label = 0;
3920 thiscase->data.case_stmt.num_ranges = 0;
3921 case_stack = thiscase;
3922 nesting_stack = thiscase;
3923 start_cleanup_deferral ();
3926 /* End a dummy case statement. */
3928 void
3929 expand_end_case_dummy ()
3931 end_cleanup_deferral ();
3932 POPSTACK (case_stack);
3935 /* Return the data type of the index-expression
3936 of the innermost case statement, or null if none. */
3938 tree
3939 case_index_expr_type ()
3941 if (case_stack)
3942 return TREE_TYPE (case_stack->data.case_stmt.index_expr);
3943 return 0;
3946 static void
3947 check_seenlabel ()
3949 /* If this is the first label, warn if any insns have been emitted. */
3950 if (case_stack->data.case_stmt.line_number_status >= 0)
3952 rtx insn;
3954 restore_line_number_status
3955 (case_stack->data.case_stmt.line_number_status);
3956 case_stack->data.case_stmt.line_number_status = -1;
3958 for (insn = case_stack->data.case_stmt.start;
3959 insn;
3960 insn = NEXT_INSN (insn))
3962 if (GET_CODE (insn) == CODE_LABEL)
3963 break;
3964 if (GET_CODE (insn) != NOTE
3965 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn)) != USE))
3968 insn = PREV_INSN (insn);
3969 while (insn && (GET_CODE (insn) != NOTE || NOTE_LINE_NUMBER (insn) < 0));
3971 /* If insn is zero, then there must have been a syntax error. */
3972 if (insn)
3973 warning_with_file_and_line (NOTE_SOURCE_FILE(insn),
3974 NOTE_LINE_NUMBER(insn),
3975 "unreachable code at beginning of %s",
3976 case_stack->data.case_stmt.printname);
3977 break;
3983 /* Accumulate one case or default label inside a case or switch statement.
3984 VALUE is the value of the case (a null pointer, for a default label).
3985 The function CONVERTER, when applied to arguments T and V,
3986 converts the value V to the type T.
3988 If not currently inside a case or switch statement, return 1 and do
3989 nothing. The caller will print a language-specific error message.
3990 If VALUE is a duplicate or overlaps, return 2 and do nothing
3991 except store the (first) duplicate node in *DUPLICATE.
3992 If VALUE is out of range, return 3 and do nothing.
3993 If we are jumping into the scope of a cleanup or var-sized array, return 5.
3994 Return 0 on success.
3996 Extended to handle range statements. */
3999 pushcase (value, converter, label, duplicate)
4000 register tree value;
4001 tree (*converter) PROTO((tree, tree));
4002 register tree label;
4003 tree *duplicate;
4005 tree index_type;
4006 tree nominal_type;
4008 /* Fail if not inside a real case statement. */
4009 if (! (case_stack && case_stack->data.case_stmt.start))
4010 return 1;
4012 if (stack_block_stack
4013 && stack_block_stack->depth > case_stack->depth)
4014 return 5;
4016 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4017 nominal_type = case_stack->data.case_stmt.nominal_type;
4019 /* If the index is erroneous, avoid more problems: pretend to succeed. */
4020 if (index_type == error_mark_node)
4021 return 0;
4023 /* Convert VALUE to the type in which the comparisons are nominally done. */
4024 if (value != 0)
4025 value = (*converter) (nominal_type, value);
4027 check_seenlabel ();
4029 /* Fail if this value is out of range for the actual type of the index
4030 (which may be narrower than NOMINAL_TYPE). */
4031 if (value != 0 && ! int_fits_type_p (value, index_type))
4032 return 3;
4034 /* Fail if this is a duplicate or overlaps another entry. */
4035 if (value == 0)
4037 if (case_stack->data.case_stmt.default_label != 0)
4039 *duplicate = case_stack->data.case_stmt.default_label;
4040 return 2;
4042 case_stack->data.case_stmt.default_label = label;
4044 else
4045 return add_case_node (value, value, label, duplicate);
4047 expand_label (label);
4048 return 0;
4051 /* Like pushcase but this case applies to all values between VALUE1 and
4052 VALUE2 (inclusive). If VALUE1 is NULL, the range starts at the lowest
4053 value of the index type and ends at VALUE2. If VALUE2 is NULL, the range
4054 starts at VALUE1 and ends at the highest value of the index type.
4055 If both are NULL, this case applies to all values.
4057 The return value is the same as that of pushcase but there is one
4058 additional error code: 4 means the specified range was empty. */
4061 pushcase_range (value1, value2, converter, label, duplicate)
4062 register tree value1, value2;
4063 tree (*converter) PROTO((tree, tree));
4064 register tree label;
4065 tree *duplicate;
4067 tree index_type;
4068 tree nominal_type;
4070 /* Fail if not inside a real case statement. */
4071 if (! (case_stack && case_stack->data.case_stmt.start))
4072 return 1;
4074 if (stack_block_stack
4075 && stack_block_stack->depth > case_stack->depth)
4076 return 5;
4078 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4079 nominal_type = case_stack->data.case_stmt.nominal_type;
4081 /* If the index is erroneous, avoid more problems: pretend to succeed. */
4082 if (index_type == error_mark_node)
4083 return 0;
4085 check_seenlabel ();
4087 /* Convert VALUEs to type in which the comparisons are nominally done
4088 and replace any unspecified value with the corresponding bound. */
4089 if (value1 == 0)
4090 value1 = TYPE_MIN_VALUE (index_type);
4091 if (value2 == 0)
4092 value2 = TYPE_MAX_VALUE (index_type);
4094 /* Fail if the range is empty. Do this before any conversion since
4095 we want to allow out-of-range empty ranges. */
4096 if (value2 && tree_int_cst_lt (value2, value1))
4097 return 4;
4099 value1 = (*converter) (nominal_type, value1);
4101 /* If the max was unbounded, use the max of the nominal_type we are
4102 converting to. Do this after the < check above to suppress false
4103 positives. */
4104 if (!value2)
4105 value2 = TYPE_MAX_VALUE (nominal_type);
4106 value2 = (*converter) (nominal_type, value2);
4108 /* Fail if these values are out of range. */
4109 if (TREE_CONSTANT_OVERFLOW (value1)
4110 || ! int_fits_type_p (value1, index_type))
4111 return 3;
4113 if (TREE_CONSTANT_OVERFLOW (value2)
4114 || ! int_fits_type_p (value2, index_type))
4115 return 3;
4117 return add_case_node (value1, value2, label, duplicate);
4120 /* Do the actual insertion of a case label for pushcase and pushcase_range
4121 into case_stack->data.case_stmt.case_list. Use an AVL tree to avoid
4122 slowdown for large switch statements. */
4124 static int
4125 add_case_node (low, high, label, duplicate)
4126 tree low, high;
4127 tree label;
4128 tree *duplicate;
4130 struct case_node *p, **q, *r;
4132 q = &case_stack->data.case_stmt.case_list;
4133 p = *q;
4135 while ((r = *q))
4137 p = r;
4139 /* Keep going past elements distinctly greater than HIGH. */
4140 if (tree_int_cst_lt (high, p->low))
4141 q = &p->left;
4143 /* or distinctly less than LOW. */
4144 else if (tree_int_cst_lt (p->high, low))
4145 q = &p->right;
4147 else
4149 /* We have an overlap; this is an error. */
4150 *duplicate = p->code_label;
4151 return 2;
4155 /* Add this label to the chain, and succeed.
4156 Copy LOW, HIGH so they are on temporary rather than momentary
4157 obstack and will thus survive till the end of the case statement. */
4159 r = (struct case_node *) oballoc (sizeof (struct case_node));
4160 r->low = copy_node (low);
4162 /* If the bounds are equal, turn this into the one-value case. */
4164 if (tree_int_cst_equal (low, high))
4165 r->high = r->low;
4166 else
4168 r->high = copy_node (high);
4169 case_stack->data.case_stmt.num_ranges++;
4172 r->code_label = label;
4173 expand_label (label);
4175 *q = r;
4176 r->parent = p;
4177 r->left = 0;
4178 r->right = 0;
4179 r->balance = 0;
4181 while (p)
4183 struct case_node *s;
4185 if (r == p->left)
4187 int b;
4189 if (! (b = p->balance))
4190 /* Growth propagation from left side. */
4191 p->balance = -1;
4192 else if (b < 0)
4194 if (r->balance < 0)
4196 /* R-Rotation */
4197 if ((p->left = s = r->right))
4198 s->parent = p;
4200 r->right = p;
4201 p->balance = 0;
4202 r->balance = 0;
4203 s = p->parent;
4204 p->parent = r;
4206 if ((r->parent = s))
4208 if (s->left == p)
4209 s->left = r;
4210 else
4211 s->right = r;
4213 else
4214 case_stack->data.case_stmt.case_list = r;
4216 else
4217 /* r->balance == +1 */
4219 /* LR-Rotation */
4221 int b2;
4222 struct case_node *t = r->right;
4224 if ((p->left = s = t->right))
4225 s->parent = p;
4227 t->right = p;
4228 if ((r->right = s = t->left))
4229 s->parent = r;
4231 t->left = r;
4232 b = t->balance;
4233 b2 = b < 0;
4234 p->balance = b2;
4235 b2 = -b2 - b;
4236 r->balance = b2;
4237 t->balance = 0;
4238 s = p->parent;
4239 p->parent = t;
4240 r->parent = t;
4242 if ((t->parent = s))
4244 if (s->left == p)
4245 s->left = t;
4246 else
4247 s->right = t;
4249 else
4250 case_stack->data.case_stmt.case_list = t;
4252 break;
4255 else
4257 /* p->balance == +1; growth of left side balances the node. */
4258 p->balance = 0;
4259 break;
4262 else
4263 /* r == p->right */
4265 int b;
4267 if (! (b = p->balance))
4268 /* Growth propagation from right side. */
4269 p->balance++;
4270 else if (b > 0)
4272 if (r->balance > 0)
4274 /* L-Rotation */
4276 if ((p->right = s = r->left))
4277 s->parent = p;
4279 r->left = p;
4280 p->balance = 0;
4281 r->balance = 0;
4282 s = p->parent;
4283 p->parent = r;
4284 if ((r->parent = s))
4286 if (s->left == p)
4287 s->left = r;
4288 else
4289 s->right = r;
4292 else
4293 case_stack->data.case_stmt.case_list = r;
4296 else
4297 /* r->balance == -1 */
4299 /* RL-Rotation */
4300 int b2;
4301 struct case_node *t = r->left;
4303 if ((p->right = s = t->left))
4304 s->parent = p;
4306 t->left = p;
4308 if ((r->left = s = t->right))
4309 s->parent = r;
4311 t->right = r;
4312 b = t->balance;
4313 b2 = b < 0;
4314 r->balance = b2;
4315 b2 = -b2 - b;
4316 p->balance = b2;
4317 t->balance = 0;
4318 s = p->parent;
4319 p->parent = t;
4320 r->parent = t;
4322 if ((t->parent = s))
4324 if (s->left == p)
4325 s->left = t;
4326 else
4327 s->right = t;
4330 else
4331 case_stack->data.case_stmt.case_list = t;
4333 break;
4335 else
4337 /* p->balance == -1; growth of right side balances the node. */
4338 p->balance = 0;
4339 break;
4343 r = p;
4344 p = p->parent;
4347 return 0;
4351 /* Returns the number of possible values of TYPE.
4352 Returns -1 if the number is unknown or variable.
4353 Returns -2 if the number does not fit in a HOST_WIDE_INT.
4354 Sets *SPARENESS to 2 if TYPE is an ENUMERAL_TYPE whose values
4355 do not increase monotonically (there may be duplicates);
4356 to 1 if the values increase monotonically, but not always by 1;
4357 otherwise sets it to 0. */
4359 HOST_WIDE_INT
4360 all_cases_count (type, spareness)
4361 tree type;
4362 int *spareness;
4364 HOST_WIDE_INT count;
4365 *spareness = 0;
4367 switch (TREE_CODE (type))
4369 tree t;
4370 case BOOLEAN_TYPE:
4371 count = 2;
4372 break;
4373 case CHAR_TYPE:
4374 count = 1 << BITS_PER_UNIT;
4375 break;
4376 default:
4377 case INTEGER_TYPE:
4378 if (TREE_CODE (TYPE_MIN_VALUE (type)) != INTEGER_CST
4379 || TYPE_MAX_VALUE (type) == NULL
4380 || TREE_CODE (TYPE_MAX_VALUE (type)) != INTEGER_CST)
4381 return -1;
4382 else
4384 /* count
4385 = TREE_INT_CST_LOW (TYPE_MAX_VALUE (type))
4386 - TREE_INT_CST_LOW (TYPE_MIN_VALUE (type)) + 1
4387 but with overflow checking. */
4388 tree mint = TYPE_MIN_VALUE (type);
4389 tree maxt = TYPE_MAX_VALUE (type);
4390 HOST_WIDE_INT lo, hi;
4391 neg_double(TREE_INT_CST_LOW (mint), TREE_INT_CST_HIGH (mint),
4392 &lo, &hi);
4393 add_double(TREE_INT_CST_LOW (maxt), TREE_INT_CST_HIGH (maxt),
4394 lo, hi, &lo, &hi);
4395 add_double (lo, hi, 1, 0, &lo, &hi);
4396 if (hi != 0 || lo < 0)
4397 return -2;
4398 count = lo;
4400 break;
4401 case ENUMERAL_TYPE:
4402 count = 0;
4403 for (t = TYPE_VALUES (type); t != NULL_TREE; t = TREE_CHAIN (t))
4405 if (TREE_CODE (TYPE_MIN_VALUE (type)) != INTEGER_CST
4406 || TREE_CODE (TREE_VALUE (t)) != INTEGER_CST
4407 || TREE_INT_CST_LOW (TYPE_MIN_VALUE (type)) + count
4408 != TREE_INT_CST_LOW (TREE_VALUE (t)))
4409 *spareness = 1;
4410 count++;
4412 if (*spareness == 1)
4414 tree prev = TREE_VALUE (TYPE_VALUES (type));
4415 for (t = TYPE_VALUES (type); t = TREE_CHAIN (t), t != NULL_TREE; )
4417 if (! tree_int_cst_lt (prev, TREE_VALUE (t)))
4419 *spareness = 2;
4420 break;
4422 prev = TREE_VALUE (t);
4427 return count;
4431 #define BITARRAY_TEST(ARRAY, INDEX) \
4432 ((ARRAY)[(unsigned) (INDEX) / HOST_BITS_PER_CHAR]\
4433 & (1 << ((unsigned) (INDEX) % HOST_BITS_PER_CHAR)))
4434 #define BITARRAY_SET(ARRAY, INDEX) \
4435 ((ARRAY)[(unsigned) (INDEX) / HOST_BITS_PER_CHAR]\
4436 |= 1 << ((unsigned) (INDEX) % HOST_BITS_PER_CHAR))
4438 /* Set the elements of the bitstring CASES_SEEN (which has length COUNT),
4439 with the case values we have seen, assuming the case expression
4440 has the given TYPE.
4441 SPARSENESS is as determined by all_cases_count.
4443 The time needed is proportional to COUNT, unless
4444 SPARSENESS is 2, in which case quadratic time is needed. */
4446 void
4447 mark_seen_cases (type, cases_seen, count, sparseness)
4448 tree type;
4449 unsigned char *cases_seen;
4450 long count;
4451 int sparseness;
4453 tree next_node_to_try = NULL_TREE;
4454 long next_node_offset = 0;
4456 register struct case_node *n, *root = case_stack->data.case_stmt.case_list;
4457 tree val = make_node (INTEGER_CST);
4458 TREE_TYPE (val) = type;
4459 if (! root)
4460 ; /* Do nothing */
4461 else if (sparseness == 2)
4463 tree t;
4464 HOST_WIDE_INT xlo;
4466 /* This less efficient loop is only needed to handle
4467 duplicate case values (multiple enum constants
4468 with the same value). */
4469 TREE_TYPE (val) = TREE_TYPE (root->low);
4470 for (t = TYPE_VALUES (type), xlo = 0; t != NULL_TREE;
4471 t = TREE_CHAIN (t), xlo++)
4473 TREE_INT_CST_LOW (val) = TREE_INT_CST_LOW (TREE_VALUE (t));
4474 TREE_INT_CST_HIGH (val) = TREE_INT_CST_HIGH (TREE_VALUE (t));
4475 n = root;
4478 /* Keep going past elements distinctly greater than VAL. */
4479 if (tree_int_cst_lt (val, n->low))
4480 n = n->left;
4482 /* or distinctly less than VAL. */
4483 else if (tree_int_cst_lt (n->high, val))
4484 n = n->right;
4486 else
4488 /* We have found a matching range. */
4489 BITARRAY_SET (cases_seen, xlo);
4490 break;
4493 while (n);
4496 else
4498 if (root->left)
4499 case_stack->data.case_stmt.case_list = root = case_tree2list (root, 0);
4500 for (n = root; n; n = n->right)
4502 TREE_INT_CST_LOW (val) = TREE_INT_CST_LOW (n->low);
4503 TREE_INT_CST_HIGH (val) = TREE_INT_CST_HIGH (n->low);
4504 while ( ! tree_int_cst_lt (n->high, val))
4506 /* Calculate (into xlo) the "offset" of the integer (val).
4507 The element with lowest value has offset 0, the next smallest
4508 element has offset 1, etc. */
4510 HOST_WIDE_INT xlo, xhi;
4511 tree t;
4512 if (sparseness && TYPE_VALUES (type) != NULL_TREE)
4514 /* The TYPE_VALUES will be in increasing order, so
4515 starting searching where we last ended. */
4516 t = next_node_to_try;
4517 xlo = next_node_offset;
4518 xhi = 0;
4519 for (;;)
4521 if (t == NULL_TREE)
4523 t = TYPE_VALUES (type);
4524 xlo = 0;
4526 if (tree_int_cst_equal (val, TREE_VALUE (t)))
4528 next_node_to_try = TREE_CHAIN (t);
4529 next_node_offset = xlo + 1;
4530 break;
4532 xlo++;
4533 t = TREE_CHAIN (t);
4534 if (t == next_node_to_try)
4536 xlo = -1;
4537 break;
4541 else
4543 t = TYPE_MIN_VALUE (type);
4544 if (t)
4545 neg_double (TREE_INT_CST_LOW (t), TREE_INT_CST_HIGH (t),
4546 &xlo, &xhi);
4547 else
4548 xlo = xhi = 0;
4549 add_double (xlo, xhi,
4550 TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
4551 &xlo, &xhi);
4554 if (xhi == 0 && xlo >= 0 && xlo < count)
4555 BITARRAY_SET (cases_seen, xlo);
4556 add_double (TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
4557 1, 0,
4558 &TREE_INT_CST_LOW (val), &TREE_INT_CST_HIGH (val));
4564 /* Called when the index of a switch statement is an enumerated type
4565 and there is no default label.
4567 Checks that all enumeration literals are covered by the case
4568 expressions of a switch. Also, warn if there are any extra
4569 switch cases that are *not* elements of the enumerated type.
4571 If all enumeration literals were covered by the case expressions,
4572 turn one of the expressions into the default expression since it should
4573 not be possible to fall through such a switch. */
4575 void
4576 check_for_full_enumeration_handling (type)
4577 tree type;
4579 register struct case_node *n;
4580 register tree chain;
4581 #if 0 /* variable used by 'if 0'ed code below. */
4582 register struct case_node **l;
4583 int all_values = 1;
4584 #endif
4586 /* True iff the selector type is a numbered set mode. */
4587 int sparseness = 0;
4589 /* The number of possible selector values. */
4590 HOST_WIDE_INT size;
4592 /* For each possible selector value. a one iff it has been matched
4593 by a case value alternative. */
4594 unsigned char *cases_seen;
4596 /* The allocated size of cases_seen, in chars. */
4597 long bytes_needed;
4599 if (! warn_switch)
4600 return;
4602 size = all_cases_count (type, &sparseness);
4603 bytes_needed = (size + HOST_BITS_PER_CHAR) / HOST_BITS_PER_CHAR;
4605 if (size > 0 && size < 600000
4606 /* We deliberately use malloc here - not xmalloc. */
4607 && (cases_seen = (unsigned char *) malloc (bytes_needed)) != NULL)
4609 long i;
4610 tree v = TYPE_VALUES (type);
4611 bzero (cases_seen, bytes_needed);
4613 /* The time complexity of this code is normally O(N), where
4614 N being the number of members in the enumerated type.
4615 However, if type is a ENUMERAL_TYPE whose values do not
4616 increase monotonically, O(N*log(N)) time may be needed. */
4618 mark_seen_cases (type, cases_seen, size, sparseness);
4620 for (i = 0; v != NULL_TREE && i < size; i++, v = TREE_CHAIN (v))
4622 if (BITARRAY_TEST(cases_seen, i) == 0)
4623 warning ("enumeration value `%s' not handled in switch",
4624 IDENTIFIER_POINTER (TREE_PURPOSE (v)));
4627 free (cases_seen);
4630 /* Now we go the other way around; we warn if there are case
4631 expressions that don't correspond to enumerators. This can
4632 occur since C and C++ don't enforce type-checking of
4633 assignments to enumeration variables. */
4635 if (case_stack->data.case_stmt.case_list
4636 && case_stack->data.case_stmt.case_list->left)
4637 case_stack->data.case_stmt.case_list
4638 = case_tree2list (case_stack->data.case_stmt.case_list, 0);
4639 if (warn_switch)
4640 for (n = case_stack->data.case_stmt.case_list; n; n = n->right)
4642 for (chain = TYPE_VALUES (type);
4643 chain && !tree_int_cst_equal (n->low, TREE_VALUE (chain));
4644 chain = TREE_CHAIN (chain))
4647 if (!chain)
4649 if (TYPE_NAME (type) == 0)
4650 warning ("case value `%ld' not in enumerated type",
4651 (long) TREE_INT_CST_LOW (n->low));
4652 else
4653 warning ("case value `%ld' not in enumerated type `%s'",
4654 (long) TREE_INT_CST_LOW (n->low),
4655 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
4656 == IDENTIFIER_NODE)
4657 ? TYPE_NAME (type)
4658 : DECL_NAME (TYPE_NAME (type))));
4660 if (!tree_int_cst_equal (n->low, n->high))
4662 for (chain = TYPE_VALUES (type);
4663 chain && !tree_int_cst_equal (n->high, TREE_VALUE (chain));
4664 chain = TREE_CHAIN (chain))
4667 if (!chain)
4669 if (TYPE_NAME (type) == 0)
4670 warning ("case value `%ld' not in enumerated type",
4671 (long) TREE_INT_CST_LOW (n->high));
4672 else
4673 warning ("case value `%ld' not in enumerated type `%s'",
4674 (long) TREE_INT_CST_LOW (n->high),
4675 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
4676 == IDENTIFIER_NODE)
4677 ? TYPE_NAME (type)
4678 : DECL_NAME (TYPE_NAME (type))));
4683 #if 0
4684 /* ??? This optimization is disabled because it causes valid programs to
4685 fail. ANSI C does not guarantee that an expression with enum type
4686 will have a value that is the same as one of the enumeration literals. */
4688 /* If all values were found as case labels, make one of them the default
4689 label. Thus, this switch will never fall through. We arbitrarily pick
4690 the last one to make the default since this is likely the most
4691 efficient choice. */
4693 if (all_values)
4695 for (l = &case_stack->data.case_stmt.case_list;
4696 (*l)->right != 0;
4697 l = &(*l)->right)
4700 case_stack->data.case_stmt.default_label = (*l)->code_label;
4701 *l = 0;
4703 #endif /* 0 */
4707 /* Terminate a case (Pascal) or switch (C) statement
4708 in which ORIG_INDEX is the expression to be tested.
4709 Generate the code to test it and jump to the right place. */
4711 void
4712 expand_end_case (orig_index)
4713 tree orig_index;
4715 tree minval, maxval, range, orig_minval;
4716 rtx default_label = 0;
4717 register struct case_node *n;
4718 unsigned int count;
4719 rtx index;
4720 rtx table_label;
4721 int ncases;
4722 rtx *labelvec;
4723 register int i;
4724 rtx before_case;
4725 register struct nesting *thiscase = case_stack;
4726 tree index_expr, index_type;
4727 int unsignedp;
4729 table_label = gen_label_rtx ();
4730 index_expr = thiscase->data.case_stmt.index_expr;
4731 index_type = TREE_TYPE (index_expr);
4732 unsignedp = TREE_UNSIGNED (index_type);
4734 do_pending_stack_adjust ();
4736 /* This might get an spurious warning in the presence of a syntax error;
4737 it could be fixed by moving the call to check_seenlabel after the
4738 check for error_mark_node, and copying the code of check_seenlabel that
4739 deals with case_stack->data.case_stmt.line_number_status /
4740 restore_line_number_status in front of the call to end_cleanup_deferral;
4741 However, this might miss some useful warnings in the presence of
4742 non-syntax errors. */
4743 check_seenlabel ();
4745 /* An ERROR_MARK occurs for various reasons including invalid data type. */
4746 if (index_type != error_mark_node)
4748 /* If switch expression was an enumerated type, check that all
4749 enumeration literals are covered by the cases.
4750 No sense trying this if there's a default case, however. */
4752 if (!thiscase->data.case_stmt.default_label
4753 && TREE_CODE (TREE_TYPE (orig_index)) == ENUMERAL_TYPE
4754 && TREE_CODE (index_expr) != INTEGER_CST)
4755 check_for_full_enumeration_handling (TREE_TYPE (orig_index));
4757 /* If we don't have a default-label, create one here,
4758 after the body of the switch. */
4759 if (thiscase->data.case_stmt.default_label == 0)
4761 thiscase->data.case_stmt.default_label
4762 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
4763 expand_label (thiscase->data.case_stmt.default_label);
4765 default_label = label_rtx (thiscase->data.case_stmt.default_label);
4767 before_case = get_last_insn ();
4769 if (thiscase->data.case_stmt.case_list
4770 && thiscase->data.case_stmt.case_list->left)
4771 thiscase->data.case_stmt.case_list
4772 = case_tree2list(thiscase->data.case_stmt.case_list, 0);
4774 /* Simplify the case-list before we count it. */
4775 group_case_nodes (thiscase->data.case_stmt.case_list);
4777 /* Get upper and lower bounds of case values.
4778 Also convert all the case values to the index expr's data type. */
4780 count = 0;
4781 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
4783 /* Check low and high label values are integers. */
4784 if (TREE_CODE (n->low) != INTEGER_CST)
4785 abort ();
4786 if (TREE_CODE (n->high) != INTEGER_CST)
4787 abort ();
4789 n->low = convert (index_type, n->low);
4790 n->high = convert (index_type, n->high);
4792 /* Count the elements and track the largest and smallest
4793 of them (treating them as signed even if they are not). */
4794 if (count++ == 0)
4796 minval = n->low;
4797 maxval = n->high;
4799 else
4801 if (INT_CST_LT (n->low, minval))
4802 minval = n->low;
4803 if (INT_CST_LT (maxval, n->high))
4804 maxval = n->high;
4806 /* A range counts double, since it requires two compares. */
4807 if (! tree_int_cst_equal (n->low, n->high))
4808 count++;
4811 orig_minval = minval;
4813 /* Compute span of values. */
4814 if (count != 0)
4815 range = fold (build (MINUS_EXPR, index_type, maxval, minval));
4817 end_cleanup_deferral ();
4819 if (count == 0)
4821 expand_expr (index_expr, const0_rtx, VOIDmode, 0);
4822 emit_queue ();
4823 emit_jump (default_label);
4826 /* If range of values is much bigger than number of values,
4827 make a sequence of conditional branches instead of a dispatch.
4828 If the switch-index is a constant, do it this way
4829 because we can optimize it. */
4831 #ifndef CASE_VALUES_THRESHOLD
4832 #ifdef HAVE_casesi
4833 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
4834 #else
4835 /* If machine does not have a case insn that compares the
4836 bounds, this means extra overhead for dispatch tables
4837 which raises the threshold for using them. */
4838 #define CASE_VALUES_THRESHOLD 5
4839 #endif /* HAVE_casesi */
4840 #endif /* CASE_VALUES_THRESHOLD */
4842 else if (TREE_INT_CST_HIGH (range) != 0
4843 || count < CASE_VALUES_THRESHOLD
4844 || ((unsigned HOST_WIDE_INT) (TREE_INT_CST_LOW (range))
4845 > 10 * count)
4846 #ifndef ASM_OUTPUT_ADDR_DIFF_ELT
4847 || flag_pic
4848 #endif
4849 || TREE_CODE (index_expr) == INTEGER_CST
4850 /* These will reduce to a constant. */
4851 || (TREE_CODE (index_expr) == CALL_EXPR
4852 && TREE_CODE (TREE_OPERAND (index_expr, 0)) == ADDR_EXPR
4853 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == FUNCTION_DECL
4854 && DECL_FUNCTION_CODE (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == BUILT_IN_CLASSIFY_TYPE)
4855 || (TREE_CODE (index_expr) == COMPOUND_EXPR
4856 && TREE_CODE (TREE_OPERAND (index_expr, 1)) == INTEGER_CST))
4858 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
4860 /* If the index is a short or char that we do not have
4861 an insn to handle comparisons directly, convert it to
4862 a full integer now, rather than letting each comparison
4863 generate the conversion. */
4865 if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT
4866 && (cmp_optab->handlers[(int) GET_MODE(index)].insn_code
4867 == CODE_FOR_nothing))
4869 enum machine_mode wider_mode;
4870 for (wider_mode = GET_MODE (index); wider_mode != VOIDmode;
4871 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
4872 if (cmp_optab->handlers[(int) wider_mode].insn_code
4873 != CODE_FOR_nothing)
4875 index = convert_to_mode (wider_mode, index, unsignedp);
4876 break;
4880 emit_queue ();
4881 do_pending_stack_adjust ();
4883 index = protect_from_queue (index, 0);
4884 if (GET_CODE (index) == MEM)
4885 index = copy_to_reg (index);
4886 if (GET_CODE (index) == CONST_INT
4887 || TREE_CODE (index_expr) == INTEGER_CST)
4889 /* Make a tree node with the proper constant value
4890 if we don't already have one. */
4891 if (TREE_CODE (index_expr) != INTEGER_CST)
4893 index_expr
4894 = build_int_2 (INTVAL (index),
4895 unsignedp || INTVAL (index) >= 0 ? 0 : -1);
4896 index_expr = convert (index_type, index_expr);
4899 /* For constant index expressions we need only
4900 issue a unconditional branch to the appropriate
4901 target code. The job of removing any unreachable
4902 code is left to the optimisation phase if the
4903 "-O" option is specified. */
4904 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
4905 if (! tree_int_cst_lt (index_expr, n->low)
4906 && ! tree_int_cst_lt (n->high, index_expr))
4907 break;
4909 if (n)
4910 emit_jump (label_rtx (n->code_label));
4911 else
4912 emit_jump (default_label);
4914 else
4916 /* If the index expression is not constant we generate
4917 a binary decision tree to select the appropriate
4918 target code. This is done as follows:
4920 The list of cases is rearranged into a binary tree,
4921 nearly optimal assuming equal probability for each case.
4923 The tree is transformed into RTL, eliminating
4924 redundant test conditions at the same time.
4926 If program flow could reach the end of the
4927 decision tree an unconditional jump to the
4928 default code is emitted. */
4930 use_cost_table
4931 = (TREE_CODE (TREE_TYPE (orig_index)) != ENUMERAL_TYPE
4932 && estimate_case_costs (thiscase->data.case_stmt.case_list));
4933 balance_case_nodes (&thiscase->data.case_stmt.case_list,
4934 NULL_PTR);
4935 emit_case_nodes (index, thiscase->data.case_stmt.case_list,
4936 default_label, index_type);
4937 emit_jump_if_reachable (default_label);
4940 else
4942 int win = 0;
4943 #ifdef HAVE_casesi
4944 if (HAVE_casesi)
4946 enum machine_mode index_mode = SImode;
4947 int index_bits = GET_MODE_BITSIZE (index_mode);
4948 rtx op1, op2;
4949 enum machine_mode op_mode;
4951 /* Convert the index to SImode. */
4952 if (GET_MODE_BITSIZE (TYPE_MODE (index_type))
4953 > GET_MODE_BITSIZE (index_mode))
4955 enum machine_mode omode = TYPE_MODE (index_type);
4956 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
4958 /* We must handle the endpoints in the original mode. */
4959 index_expr = build (MINUS_EXPR, index_type,
4960 index_expr, minval);
4961 minval = integer_zero_node;
4962 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
4963 emit_cmp_insn (rangertx, index, LTU, NULL_RTX, omode, 1, 0);
4964 emit_jump_insn (gen_bltu (default_label));
4965 /* Now we can safely truncate. */
4966 index = convert_to_mode (index_mode, index, 0);
4968 else
4970 if (TYPE_MODE (index_type) != index_mode)
4972 index_expr = convert (type_for_size (index_bits, 0),
4973 index_expr);
4974 index_type = TREE_TYPE (index_expr);
4977 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
4979 emit_queue ();
4980 index = protect_from_queue (index, 0);
4981 do_pending_stack_adjust ();
4983 op_mode = insn_operand_mode[(int)CODE_FOR_casesi][0];
4984 if (! (*insn_operand_predicate[(int)CODE_FOR_casesi][0])
4985 (index, op_mode))
4986 index = copy_to_mode_reg (op_mode, index);
4988 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
4990 op_mode = insn_operand_mode[(int)CODE_FOR_casesi][1];
4991 if (! (*insn_operand_predicate[(int)CODE_FOR_casesi][1])
4992 (op1, op_mode))
4993 op1 = copy_to_mode_reg (op_mode, op1);
4995 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
4997 op_mode = insn_operand_mode[(int)CODE_FOR_casesi][2];
4998 if (! (*insn_operand_predicate[(int)CODE_FOR_casesi][2])
4999 (op2, op_mode))
5000 op2 = copy_to_mode_reg (op_mode, op2);
5002 emit_jump_insn (gen_casesi (index, op1, op2,
5003 table_label, default_label));
5004 win = 1;
5006 #endif
5007 #ifdef HAVE_tablejump
5008 if (! win && HAVE_tablejump)
5010 index_expr = convert (thiscase->data.case_stmt.nominal_type,
5011 fold (build (MINUS_EXPR, index_type,
5012 index_expr, minval)));
5013 index_type = TREE_TYPE (index_expr);
5014 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5015 emit_queue ();
5016 index = protect_from_queue (index, 0);
5017 do_pending_stack_adjust ();
5019 do_tablejump (index, TYPE_MODE (index_type),
5020 expand_expr (range, NULL_RTX, VOIDmode, 0),
5021 table_label, default_label);
5022 win = 1;
5024 #endif
5025 if (! win)
5026 abort ();
5028 /* Get table of labels to jump to, in order of case index. */
5030 ncases = TREE_INT_CST_LOW (range) + 1;
5031 labelvec = (rtx *) alloca (ncases * sizeof (rtx));
5032 bzero ((char *) labelvec, ncases * sizeof (rtx));
5034 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5036 register HOST_WIDE_INT i
5037 = TREE_INT_CST_LOW (n->low) - TREE_INT_CST_LOW (orig_minval);
5039 while (1)
5041 labelvec[i]
5042 = gen_rtx_LABEL_REF (Pmode, label_rtx (n->code_label));
5043 if (i + TREE_INT_CST_LOW (orig_minval)
5044 == TREE_INT_CST_LOW (n->high))
5045 break;
5046 i++;
5050 /* Fill in the gaps with the default. */
5051 for (i = 0; i < ncases; i++)
5052 if (labelvec[i] == 0)
5053 labelvec[i] = gen_rtx_LABEL_REF (Pmode, default_label);
5055 /* Output the table */
5056 emit_label (table_label);
5058 if (CASE_VECTOR_PC_RELATIVE || flag_pic)
5059 emit_jump_insn (gen_rtx_ADDR_DIFF_VEC (CASE_VECTOR_MODE,
5060 gen_rtx_LABEL_REF (Pmode, table_label),
5061 gen_rtvec_v (ncases, labelvec),
5062 const0_rtx, const0_rtx, 0));
5063 else
5064 emit_jump_insn (gen_rtx_ADDR_VEC (CASE_VECTOR_MODE,
5065 gen_rtvec_v (ncases, labelvec)));
5067 /* If the case insn drops through the table,
5068 after the table we must jump to the default-label.
5069 Otherwise record no drop-through after the table. */
5070 #ifdef CASE_DROPS_THROUGH
5071 emit_jump (default_label);
5072 #else
5073 emit_barrier ();
5074 #endif
5077 before_case = squeeze_notes (NEXT_INSN (before_case), get_last_insn ());
5078 reorder_insns (before_case, get_last_insn (),
5079 thiscase->data.case_stmt.start);
5081 else
5082 end_cleanup_deferral ();
5084 if (thiscase->exit_label)
5085 emit_label (thiscase->exit_label);
5087 POPSTACK (case_stack);
5089 free_temp_slots ();
5092 /* Convert the tree NODE into a list linked by the right field, with the left
5093 field zeroed. RIGHT is used for recursion; it is a list to be placed
5094 rightmost in the resulting list. */
5096 static struct case_node *
5097 case_tree2list (node, right)
5098 struct case_node *node, *right;
5100 struct case_node *left;
5102 if (node->right)
5103 right = case_tree2list (node->right, right);
5105 node->right = right;
5106 if ((left = node->left))
5108 node->left = 0;
5109 return case_tree2list (left, node);
5112 return node;
5115 /* Generate code to jump to LABEL if OP1 and OP2 are equal. */
5117 static void
5118 do_jump_if_equal (op1, op2, label, unsignedp)
5119 rtx op1, op2, label;
5120 int unsignedp;
5122 if (GET_CODE (op1) == CONST_INT
5123 && GET_CODE (op2) == CONST_INT)
5125 if (INTVAL (op1) == INTVAL (op2))
5126 emit_jump (label);
5128 else
5130 enum machine_mode mode = GET_MODE (op1);
5131 if (mode == VOIDmode)
5132 mode = GET_MODE (op2);
5133 emit_cmp_insn (op1, op2, EQ, NULL_RTX, mode, unsignedp, 0);
5134 emit_jump_insn (gen_beq (label));
5138 /* Not all case values are encountered equally. This function
5139 uses a heuristic to weight case labels, in cases where that
5140 looks like a reasonable thing to do.
5142 Right now, all we try to guess is text, and we establish the
5143 following weights:
5145 chars above space: 16
5146 digits: 16
5147 default: 12
5148 space, punct: 8
5149 tab: 4
5150 newline: 2
5151 other "\" chars: 1
5152 remaining chars: 0
5154 If we find any cases in the switch that are not either -1 or in the range
5155 of valid ASCII characters, or are control characters other than those
5156 commonly used with "\", don't treat this switch scanning text.
5158 Return 1 if these nodes are suitable for cost estimation, otherwise
5159 return 0. */
5161 static int
5162 estimate_case_costs (node)
5163 case_node_ptr node;
5165 tree min_ascii = build_int_2 (-1, -1);
5166 tree max_ascii = convert (TREE_TYPE (node->high), build_int_2 (127, 0));
5167 case_node_ptr n;
5168 int i;
5170 /* If we haven't already made the cost table, make it now. Note that the
5171 lower bound of the table is -1, not zero. */
5173 if (cost_table == NULL)
5175 cost_table = ((short *) xmalloc (129 * sizeof (short))) + 1;
5176 bzero ((char *) (cost_table - 1), 129 * sizeof (short));
5178 for (i = 0; i < 128; i++)
5180 if (ISALNUM (i))
5181 cost_table[i] = 16;
5182 else if (ISPUNCT (i))
5183 cost_table[i] = 8;
5184 else if (ISCNTRL (i))
5185 cost_table[i] = -1;
5188 cost_table[' '] = 8;
5189 cost_table['\t'] = 4;
5190 cost_table['\0'] = 4;
5191 cost_table['\n'] = 2;
5192 cost_table['\f'] = 1;
5193 cost_table['\v'] = 1;
5194 cost_table['\b'] = 1;
5197 /* See if all the case expressions look like text. It is text if the
5198 constant is >= -1 and the highest constant is <= 127. Do all comparisons
5199 as signed arithmetic since we don't want to ever access cost_table with a
5200 value less than -1. Also check that none of the constants in a range
5201 are strange control characters. */
5203 for (n = node; n; n = n->right)
5205 if ((INT_CST_LT (n->low, min_ascii)) || INT_CST_LT (max_ascii, n->high))
5206 return 0;
5208 for (i = TREE_INT_CST_LOW (n->low); i <= TREE_INT_CST_LOW (n->high); i++)
5209 if (cost_table[i] < 0)
5210 return 0;
5213 /* All interesting values are within the range of interesting
5214 ASCII characters. */
5215 return 1;
5218 /* Scan an ordered list of case nodes
5219 combining those with consecutive values or ranges.
5221 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
5223 static void
5224 group_case_nodes (head)
5225 case_node_ptr head;
5227 case_node_ptr node = head;
5229 while (node)
5231 rtx lb = next_real_insn (label_rtx (node->code_label));
5232 rtx lb2;
5233 case_node_ptr np = node;
5235 /* Try to group the successors of NODE with NODE. */
5236 while (((np = np->right) != 0)
5237 /* Do they jump to the same place? */
5238 && ((lb2 = next_real_insn (label_rtx (np->code_label))) == lb
5239 || (lb != 0 && lb2 != 0
5240 && simplejump_p (lb)
5241 && simplejump_p (lb2)
5242 && rtx_equal_p (SET_SRC (PATTERN (lb)),
5243 SET_SRC (PATTERN (lb2)))))
5244 /* Are their ranges consecutive? */
5245 && tree_int_cst_equal (np->low,
5246 fold (build (PLUS_EXPR,
5247 TREE_TYPE (node->high),
5248 node->high,
5249 integer_one_node)))
5250 /* An overflow is not consecutive. */
5251 && tree_int_cst_lt (node->high,
5252 fold (build (PLUS_EXPR,
5253 TREE_TYPE (node->high),
5254 node->high,
5255 integer_one_node))))
5257 node->high = np->high;
5259 /* NP is the first node after NODE which can't be grouped with it.
5260 Delete the nodes in between, and move on to that node. */
5261 node->right = np;
5262 node = np;
5266 /* Take an ordered list of case nodes
5267 and transform them into a near optimal binary tree,
5268 on the assumption that any target code selection value is as
5269 likely as any other.
5271 The transformation is performed by splitting the ordered
5272 list into two equal sections plus a pivot. The parts are
5273 then attached to the pivot as left and right branches. Each
5274 branch is then transformed recursively. */
5276 static void
5277 balance_case_nodes (head, parent)
5278 case_node_ptr *head;
5279 case_node_ptr parent;
5281 register case_node_ptr np;
5283 np = *head;
5284 if (np)
5286 int cost = 0;
5287 int i = 0;
5288 int ranges = 0;
5289 register case_node_ptr *npp;
5290 case_node_ptr left;
5292 /* Count the number of entries on branch. Also count the ranges. */
5294 while (np)
5296 if (!tree_int_cst_equal (np->low, np->high))
5298 ranges++;
5299 if (use_cost_table)
5300 cost += cost_table[TREE_INT_CST_LOW (np->high)];
5303 if (use_cost_table)
5304 cost += cost_table[TREE_INT_CST_LOW (np->low)];
5306 i++;
5307 np = np->right;
5310 if (i > 2)
5312 /* Split this list if it is long enough for that to help. */
5313 npp = head;
5314 left = *npp;
5315 if (use_cost_table)
5317 /* Find the place in the list that bisects the list's total cost,
5318 Here I gets half the total cost. */
5319 int n_moved = 0;
5320 i = (cost + 1) / 2;
5321 while (1)
5323 /* Skip nodes while their cost does not reach that amount. */
5324 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5325 i -= cost_table[TREE_INT_CST_LOW ((*npp)->high)];
5326 i -= cost_table[TREE_INT_CST_LOW ((*npp)->low)];
5327 if (i <= 0)
5328 break;
5329 npp = &(*npp)->right;
5330 n_moved += 1;
5332 if (n_moved == 0)
5334 /* Leave this branch lopsided, but optimize left-hand
5335 side and fill in `parent' fields for right-hand side. */
5336 np = *head;
5337 np->parent = parent;
5338 balance_case_nodes (&np->left, np);
5339 for (; np->right; np = np->right)
5340 np->right->parent = np;
5341 return;
5344 /* If there are just three nodes, split at the middle one. */
5345 else if (i == 3)
5346 npp = &(*npp)->right;
5347 else
5349 /* Find the place in the list that bisects the list's total cost,
5350 where ranges count as 2.
5351 Here I gets half the total cost. */
5352 i = (i + ranges + 1) / 2;
5353 while (1)
5355 /* Skip nodes while their cost does not reach that amount. */
5356 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5357 i--;
5358 i--;
5359 if (i <= 0)
5360 break;
5361 npp = &(*npp)->right;
5364 *head = np = *npp;
5365 *npp = 0;
5366 np->parent = parent;
5367 np->left = left;
5369 /* Optimize each of the two split parts. */
5370 balance_case_nodes (&np->left, np);
5371 balance_case_nodes (&np->right, np);
5373 else
5375 /* Else leave this branch as one level,
5376 but fill in `parent' fields. */
5377 np = *head;
5378 np->parent = parent;
5379 for (; np->right; np = np->right)
5380 np->right->parent = np;
5385 /* Search the parent sections of the case node tree
5386 to see if a test for the lower bound of NODE would be redundant.
5387 INDEX_TYPE is the type of the index expression.
5389 The instructions to generate the case decision tree are
5390 output in the same order as nodes are processed so it is
5391 known that if a parent node checks the range of the current
5392 node minus one that the current node is bounded at its lower
5393 span. Thus the test would be redundant. */
5395 static int
5396 node_has_low_bound (node, index_type)
5397 case_node_ptr node;
5398 tree index_type;
5400 tree low_minus_one;
5401 case_node_ptr pnode;
5403 /* If the lower bound of this node is the lowest value in the index type,
5404 we need not test it. */
5406 if (tree_int_cst_equal (node->low, TYPE_MIN_VALUE (index_type)))
5407 return 1;
5409 /* If this node has a left branch, the value at the left must be less
5410 than that at this node, so it cannot be bounded at the bottom and
5411 we need not bother testing any further. */
5413 if (node->left)
5414 return 0;
5416 low_minus_one = fold (build (MINUS_EXPR, TREE_TYPE (node->low),
5417 node->low, integer_one_node));
5419 /* If the subtraction above overflowed, we can't verify anything.
5420 Otherwise, look for a parent that tests our value - 1. */
5422 if (! tree_int_cst_lt (low_minus_one, node->low))
5423 return 0;
5425 for (pnode = node->parent; pnode; pnode = pnode->parent)
5426 if (tree_int_cst_equal (low_minus_one, pnode->high))
5427 return 1;
5429 return 0;
5432 /* Search the parent sections of the case node tree
5433 to see if a test for the upper bound of NODE would be redundant.
5434 INDEX_TYPE is the type of the index expression.
5436 The instructions to generate the case decision tree are
5437 output in the same order as nodes are processed so it is
5438 known that if a parent node checks the range of the current
5439 node plus one that the current node is bounded at its upper
5440 span. Thus the test would be redundant. */
5442 static int
5443 node_has_high_bound (node, index_type)
5444 case_node_ptr node;
5445 tree index_type;
5447 tree high_plus_one;
5448 case_node_ptr pnode;
5450 /* If there is no upper bound, obviously no test is needed. */
5452 if (TYPE_MAX_VALUE (index_type) == NULL)
5453 return 1;
5455 /* If the upper bound of this node is the highest value in the type
5456 of the index expression, we need not test against it. */
5458 if (tree_int_cst_equal (node->high, TYPE_MAX_VALUE (index_type)))
5459 return 1;
5461 /* If this node has a right branch, the value at the right must be greater
5462 than that at this node, so it cannot be bounded at the top and
5463 we need not bother testing any further. */
5465 if (node->right)
5466 return 0;
5468 high_plus_one = fold (build (PLUS_EXPR, TREE_TYPE (node->high),
5469 node->high, integer_one_node));
5471 /* If the addition above overflowed, we can't verify anything.
5472 Otherwise, look for a parent that tests our value + 1. */
5474 if (! tree_int_cst_lt (node->high, high_plus_one))
5475 return 0;
5477 for (pnode = node->parent; pnode; pnode = pnode->parent)
5478 if (tree_int_cst_equal (high_plus_one, pnode->low))
5479 return 1;
5481 return 0;
5484 /* Search the parent sections of the
5485 case node tree to see if both tests for the upper and lower
5486 bounds of NODE would be redundant. */
5488 static int
5489 node_is_bounded (node, index_type)
5490 case_node_ptr node;
5491 tree index_type;
5493 return (node_has_low_bound (node, index_type)
5494 && node_has_high_bound (node, index_type));
5497 /* Emit an unconditional jump to LABEL unless it would be dead code. */
5499 static void
5500 emit_jump_if_reachable (label)
5501 rtx label;
5503 if (GET_CODE (get_last_insn ()) != BARRIER)
5504 emit_jump (label);
5507 /* Emit step-by-step code to select a case for the value of INDEX.
5508 The thus generated decision tree follows the form of the
5509 case-node binary tree NODE, whose nodes represent test conditions.
5510 INDEX_TYPE is the type of the index of the switch.
5512 Care is taken to prune redundant tests from the decision tree
5513 by detecting any boundary conditions already checked by
5514 emitted rtx. (See node_has_high_bound, node_has_low_bound
5515 and node_is_bounded, above.)
5517 Where the test conditions can be shown to be redundant we emit
5518 an unconditional jump to the target code. As a further
5519 optimization, the subordinates of a tree node are examined to
5520 check for bounded nodes. In this case conditional and/or
5521 unconditional jumps as a result of the boundary check for the
5522 current node are arranged to target the subordinates associated
5523 code for out of bound conditions on the current node.
5525 We can assume that when control reaches the code generated here,
5526 the index value has already been compared with the parents
5527 of this node, and determined to be on the same side of each parent
5528 as this node is. Thus, if this node tests for the value 51,
5529 and a parent tested for 52, we don't need to consider
5530 the possibility of a value greater than 51. If another parent
5531 tests for the value 50, then this node need not test anything. */
5533 static void
5534 emit_case_nodes (index, node, default_label, index_type)
5535 rtx index;
5536 case_node_ptr node;
5537 rtx default_label;
5538 tree index_type;
5540 /* If INDEX has an unsigned type, we must make unsigned branches. */
5541 int unsignedp = TREE_UNSIGNED (index_type);
5542 typedef rtx rtx_fn ();
5543 rtx_fn *gen_bgt_pat = unsignedp ? gen_bgtu : gen_bgt;
5544 rtx_fn *gen_bge_pat = unsignedp ? gen_bgeu : gen_bge;
5545 rtx_fn *gen_blt_pat = unsignedp ? gen_bltu : gen_blt;
5546 rtx_fn *gen_ble_pat = unsignedp ? gen_bleu : gen_ble;
5547 enum machine_mode mode = GET_MODE (index);
5549 /* See if our parents have already tested everything for us.
5550 If they have, emit an unconditional jump for this node. */
5551 if (node_is_bounded (node, index_type))
5552 emit_jump (label_rtx (node->code_label));
5554 else if (tree_int_cst_equal (node->low, node->high))
5556 /* Node is single valued. First see if the index expression matches
5557 this node and then check our children, if any. */
5559 do_jump_if_equal (index, expand_expr (node->low, NULL_RTX, VOIDmode, 0),
5560 label_rtx (node->code_label), unsignedp);
5562 if (node->right != 0 && node->left != 0)
5564 /* This node has children on both sides.
5565 Dispatch to one side or the other
5566 by comparing the index value with this node's value.
5567 If one subtree is bounded, check that one first,
5568 so we can avoid real branches in the tree. */
5570 if (node_is_bounded (node->right, index_type))
5572 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5573 VOIDmode, 0),
5574 GT, NULL_RTX, mode, unsignedp, 0);
5576 emit_jump_insn ((*gen_bgt_pat) (label_rtx (node->right->code_label)));
5577 emit_case_nodes (index, node->left, default_label, index_type);
5580 else if (node_is_bounded (node->left, index_type))
5582 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5583 VOIDmode, 0),
5584 LT, NULL_RTX, mode, unsignedp, 0);
5585 emit_jump_insn ((*gen_blt_pat) (label_rtx (node->left->code_label)));
5586 emit_case_nodes (index, node->right, default_label, index_type);
5589 else
5591 /* Neither node is bounded. First distinguish the two sides;
5592 then emit the code for one side at a time. */
5594 tree test_label
5595 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
5597 /* See if the value is on the right. */
5598 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5599 VOIDmode, 0),
5600 GT, NULL_RTX, mode, unsignedp, 0);
5601 emit_jump_insn ((*gen_bgt_pat) (label_rtx (test_label)));
5603 /* Value must be on the left.
5604 Handle the left-hand subtree. */
5605 emit_case_nodes (index, node->left, default_label, index_type);
5606 /* If left-hand subtree does nothing,
5607 go to default. */
5608 emit_jump_if_reachable (default_label);
5610 /* Code branches here for the right-hand subtree. */
5611 expand_label (test_label);
5612 emit_case_nodes (index, node->right, default_label, index_type);
5616 else if (node->right != 0 && node->left == 0)
5618 /* Here we have a right child but no left so we issue conditional
5619 branch to default and process the right child.
5621 Omit the conditional branch to default if we it avoid only one
5622 right child; it costs too much space to save so little time. */
5624 if (node->right->right || node->right->left
5625 || !tree_int_cst_equal (node->right->low, node->right->high))
5627 if (!node_has_low_bound (node, index_type))
5629 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5630 VOIDmode, 0),
5631 LT, NULL_RTX, mode, unsignedp, 0);
5632 emit_jump_insn ((*gen_blt_pat) (default_label));
5635 emit_case_nodes (index, node->right, default_label, index_type);
5637 else
5638 /* We cannot process node->right normally
5639 since we haven't ruled out the numbers less than
5640 this node's value. So handle node->right explicitly. */
5641 do_jump_if_equal (index,
5642 expand_expr (node->right->low, NULL_RTX,
5643 VOIDmode, 0),
5644 label_rtx (node->right->code_label), unsignedp);
5647 else if (node->right == 0 && node->left != 0)
5649 /* Just one subtree, on the left. */
5651 #if 0 /* The following code and comment were formerly part
5652 of the condition here, but they didn't work
5653 and I don't understand what the idea was. -- rms. */
5654 /* If our "most probable entry" is less probable
5655 than the default label, emit a jump to
5656 the default label using condition codes
5657 already lying around. With no right branch,
5658 a branch-greater-than will get us to the default
5659 label correctly. */
5660 if (use_cost_table
5661 && cost_table[TREE_INT_CST_LOW (node->high)] < 12)
5663 #endif /* 0 */
5664 if (node->left->left || node->left->right
5665 || !tree_int_cst_equal (node->left->low, node->left->high))
5667 if (!node_has_high_bound (node, index_type))
5669 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5670 VOIDmode, 0),
5671 GT, NULL_RTX, mode, unsignedp, 0);
5672 emit_jump_insn ((*gen_bgt_pat) (default_label));
5675 emit_case_nodes (index, node->left, default_label, index_type);
5677 else
5678 /* We cannot process node->left normally
5679 since we haven't ruled out the numbers less than
5680 this node's value. So handle node->left explicitly. */
5681 do_jump_if_equal (index,
5682 expand_expr (node->left->low, NULL_RTX,
5683 VOIDmode, 0),
5684 label_rtx (node->left->code_label), unsignedp);
5687 else
5689 /* Node is a range. These cases are very similar to those for a single
5690 value, except that we do not start by testing whether this node
5691 is the one to branch to. */
5693 if (node->right != 0 && node->left != 0)
5695 /* Node has subtrees on both sides.
5696 If the right-hand subtree is bounded,
5697 test for it first, since we can go straight there.
5698 Otherwise, we need to make a branch in the control structure,
5699 then handle the two subtrees. */
5700 tree test_label = 0;
5702 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5703 VOIDmode, 0),
5704 GT, NULL_RTX, mode, unsignedp, 0);
5706 if (node_is_bounded (node->right, index_type))
5707 /* Right hand node is fully bounded so we can eliminate any
5708 testing and branch directly to the target code. */
5709 emit_jump_insn ((*gen_bgt_pat) (label_rtx (node->right->code_label)));
5710 else
5712 /* Right hand node requires testing.
5713 Branch to a label where we will handle it later. */
5715 test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
5716 emit_jump_insn ((*gen_bgt_pat) (label_rtx (test_label)));
5719 /* Value belongs to this node or to the left-hand subtree. */
5721 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX, VOIDmode, 0),
5722 GE, NULL_RTX, mode, unsignedp, 0);
5723 emit_jump_insn ((*gen_bge_pat) (label_rtx (node->code_label)));
5725 /* Handle the left-hand subtree. */
5726 emit_case_nodes (index, node->left, default_label, index_type);
5728 /* If right node had to be handled later, do that now. */
5730 if (test_label)
5732 /* If the left-hand subtree fell through,
5733 don't let it fall into the right-hand subtree. */
5734 emit_jump_if_reachable (default_label);
5736 expand_label (test_label);
5737 emit_case_nodes (index, node->right, default_label, index_type);
5741 else if (node->right != 0 && node->left == 0)
5743 /* Deal with values to the left of this node,
5744 if they are possible. */
5745 if (!node_has_low_bound (node, index_type))
5747 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX,
5748 VOIDmode, 0),
5749 LT, NULL_RTX, mode, unsignedp, 0);
5750 emit_jump_insn ((*gen_blt_pat) (default_label));
5753 /* Value belongs to this node or to the right-hand subtree. */
5755 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5756 VOIDmode, 0),
5757 LE, NULL_RTX, mode, unsignedp, 0);
5758 emit_jump_insn ((*gen_ble_pat) (label_rtx (node->code_label)));
5760 emit_case_nodes (index, node->right, default_label, index_type);
5763 else if (node->right == 0 && node->left != 0)
5765 /* Deal with values to the right of this node,
5766 if they are possible. */
5767 if (!node_has_high_bound (node, index_type))
5769 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5770 VOIDmode, 0),
5771 GT, NULL_RTX, mode, unsignedp, 0);
5772 emit_jump_insn ((*gen_bgt_pat) (default_label));
5775 /* Value belongs to this node or to the left-hand subtree. */
5777 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX, VOIDmode, 0),
5778 GE, NULL_RTX, mode, unsignedp, 0);
5779 emit_jump_insn ((*gen_bge_pat) (label_rtx (node->code_label)));
5781 emit_case_nodes (index, node->left, default_label, index_type);
5784 else
5786 /* Node has no children so we check low and high bounds to remove
5787 redundant tests. Only one of the bounds can exist,
5788 since otherwise this node is bounded--a case tested already. */
5790 if (!node_has_high_bound (node, index_type))
5792 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5793 VOIDmode, 0),
5794 GT, NULL_RTX, mode, unsignedp, 0);
5795 emit_jump_insn ((*gen_bgt_pat) (default_label));
5798 if (!node_has_low_bound (node, index_type))
5800 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX,
5801 VOIDmode, 0),
5802 LT, NULL_RTX, mode, unsignedp, 0);
5803 emit_jump_insn ((*gen_blt_pat) (default_label));
5806 emit_jump (label_rtx (node->code_label));
5811 /* These routines are used by the loop unrolling code. They copy BLOCK trees
5812 so that the debugging info will be correct for the unrolled loop. */
5814 /* Indexed by block number, contains a pointer to the N'th block node.
5816 Allocated by the call to identify_blocks, then released after the call
5817 to reorder_blocks in the function unroll_block_trees. */
5819 static tree *block_vector;
5821 void
5822 find_loop_tree_blocks ()
5824 tree block = DECL_INITIAL (current_function_decl);
5826 block_vector = identify_blocks (block, get_insns ());
5829 void
5830 unroll_block_trees ()
5832 tree block = DECL_INITIAL (current_function_decl);
5834 reorder_blocks (block_vector, block, get_insns ());
5836 /* Release any memory allocated by identify_blocks. */
5837 if (block_vector)
5838 free (block_vector);