Call fatal_insn_not_found instead of abort
[official-gcc.git] / gcc / stmt.c
blob43cf7f82ec264e75e5a6dc7d500f721e244da0d5
1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 88, 89, 92-97, 1998 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
22 /* This file handles the generation of rtl code from tree structure
23 above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
24 It also creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
27 The functions whose names start with `expand_' are called by the
28 parser to generate RTL instructions for various kinds of constructs.
30 Some control and binding constructs require calling several such
31 functions at different times. For example, a simple if-then
32 is expanded by calling `expand_start_cond' (with the condition-expression
33 as argument) before parsing the then-clause and calling `expand_end_cond'
34 after parsing the then-clause. */
36 #include "config.h"
37 #include "system.h"
39 #include "rtl.h"
40 #include "tree.h"
41 #include "flags.h"
42 #include "except.h"
43 #include "function.h"
44 #include "insn-flags.h"
45 #include "insn-config.h"
46 #include "insn-codes.h"
47 #include "expr.h"
48 #include "hard-reg-set.h"
49 #include "obstack.h"
50 #include "loop.h"
51 #include "recog.h"
52 #include "machmode.h"
53 #include "toplev.h"
54 #include "output.h"
56 #define obstack_chunk_alloc xmalloc
57 #define obstack_chunk_free free
58 struct obstack stmt_obstack;
60 /* Assume that case vectors are not pc-relative. */
61 #ifndef CASE_VECTOR_PC_RELATIVE
62 #define CASE_VECTOR_PC_RELATIVE 0
63 #endif
65 /* Filename and line number of last line-number note,
66 whether we actually emitted it or not. */
67 char *emit_filename;
68 int emit_lineno;
70 /* Nonzero if within a ({...}) grouping, in which case we must
71 always compute a value for each expr-stmt in case it is the last one. */
73 int expr_stmts_for_value;
75 /* Each time we expand an expression-statement,
76 record the expr's type and its RTL value here. */
78 static tree last_expr_type;
79 static rtx last_expr_value;
81 /* Each time we expand the end of a binding contour (in `expand_end_bindings')
82 and we emit a new NOTE_INSN_BLOCK_END note, we save a pointer to it here.
83 This is used by the `remember_end_note' function to record the endpoint
84 of each generated block in its associated BLOCK node. */
86 static rtx last_block_end_note;
88 /* Number of binding contours started so far in this function. */
90 int block_start_count;
92 /* Nonzero if function being compiled needs to
93 return the address of where it has put a structure value. */
95 extern int current_function_returns_pcc_struct;
97 /* Label that will go on parm cleanup code, if any.
98 Jumping to this label runs cleanup code for parameters, if
99 such code must be run. Following this code is the logical return label. */
101 extern rtx cleanup_label;
103 /* Label that will go on function epilogue.
104 Jumping to this label serves as a "return" instruction
105 on machines which require execution of the epilogue on all returns. */
107 extern rtx return_label;
109 /* Offset to end of allocated area of stack frame.
110 If stack grows down, this is the address of the last stack slot allocated.
111 If stack grows up, this is the address for the next slot. */
112 extern int frame_offset;
114 /* Label to jump back to for tail recursion, or 0 if we have
115 not yet needed one for this function. */
116 extern rtx tail_recursion_label;
118 /* Place after which to insert the tail_recursion_label if we need one. */
119 extern rtx tail_recursion_reentry;
121 /* Location at which to save the argument pointer if it will need to be
122 referenced. There are two cases where this is done: if nonlocal gotos
123 exist, or if vars whose is an offset from the argument pointer will be
124 needed by inner routines. */
126 extern rtx arg_pointer_save_area;
128 /* Chain of all RTL_EXPRs that have insns in them. */
129 extern tree rtl_expr_chain;
131 /* Functions and data structures for expanding case statements. */
133 /* Case label structure, used to hold info on labels within case
134 statements. We handle "range" labels; for a single-value label
135 as in C, the high and low limits are the same.
137 An AVL tree of case nodes is initially created, and later transformed
138 to a list linked via the RIGHT fields in the nodes. Nodes with
139 higher case values are later in the list.
141 Switch statements can be output in one of two forms. A branch table
142 is used if there are more than a few labels and the labels are dense
143 within the range between the smallest and largest case value. If a
144 branch table is used, no further manipulations are done with the case
145 node chain.
147 The alternative to the use of a branch table is to generate a series
148 of compare and jump insns. When that is done, we use the LEFT, RIGHT,
149 and PARENT fields to hold a binary tree. Initially the tree is
150 totally unbalanced, with everything on the right. We balance the tree
151 with nodes on the left having lower case values than the parent
152 and nodes on the right having higher values. We then output the tree
153 in order. */
155 struct case_node
157 struct case_node *left; /* Left son in binary tree */
158 struct case_node *right; /* Right son in binary tree; also node chain */
159 struct case_node *parent; /* Parent of node in binary tree */
160 tree low; /* Lowest index value for this label */
161 tree high; /* Highest index value for this label */
162 tree code_label; /* Label to jump to when node matches */
163 int balance;
166 typedef struct case_node case_node;
167 typedef struct case_node *case_node_ptr;
169 /* These are used by estimate_case_costs and balance_case_nodes. */
171 /* This must be a signed type, and non-ANSI compilers lack signed char. */
172 static short *cost_table;
173 static int use_cost_table;
175 /* Stack of control and binding constructs we are currently inside.
177 These constructs begin when you call `expand_start_WHATEVER'
178 and end when you call `expand_end_WHATEVER'. This stack records
179 info about how the construct began that tells the end-function
180 what to do. It also may provide information about the construct
181 to alter the behavior of other constructs within the body.
182 For example, they may affect the behavior of C `break' and `continue'.
184 Each construct gets one `struct nesting' object.
185 All of these objects are chained through the `all' field.
186 `nesting_stack' points to the first object (innermost construct).
187 The position of an entry on `nesting_stack' is in its `depth' field.
189 Each type of construct has its own individual stack.
190 For example, loops have `loop_stack'. Each object points to the
191 next object of the same type through the `next' field.
193 Some constructs are visible to `break' exit-statements and others
194 are not. Which constructs are visible depends on the language.
195 Therefore, the data structure allows each construct to be visible
196 or not, according to the args given when the construct is started.
197 The construct is visible if the `exit_label' field is non-null.
198 In that case, the value should be a CODE_LABEL rtx. */
200 struct nesting
202 struct nesting *all;
203 struct nesting *next;
204 int depth;
205 rtx exit_label;
206 union
208 /* For conds (if-then and if-then-else statements). */
209 struct
211 /* Label for the end of the if construct.
212 There is none if EXITFLAG was not set
213 and no `else' has been seen yet. */
214 rtx endif_label;
215 /* Label for the end of this alternative.
216 This may be the end of the if or the next else/elseif. */
217 rtx next_label;
218 } cond;
219 /* For loops. */
220 struct
222 /* Label at the top of the loop; place to loop back to. */
223 rtx start_label;
224 /* Label at the end of the whole construct. */
225 rtx end_label;
226 /* Label before a jump that branches to the end of the whole
227 construct. This is where destructors go if any. */
228 rtx alt_end_label;
229 /* Label for `continue' statement to jump to;
230 this is in front of the stepper of the loop. */
231 rtx continue_label;
232 } loop;
233 /* For variable binding contours. */
234 struct
236 /* Sequence number of this binding contour within the function,
237 in order of entry. */
238 int block_start_count;
239 /* Nonzero => value to restore stack to on exit. */
240 rtx stack_level;
241 /* The NOTE that starts this contour.
242 Used by expand_goto to check whether the destination
243 is within each contour or not. */
244 rtx first_insn;
245 /* Innermost containing binding contour that has a stack level. */
246 struct nesting *innermost_stack_block;
247 /* List of cleanups to be run on exit from this contour.
248 This is a list of expressions to be evaluated.
249 The TREE_PURPOSE of each link is the ..._DECL node
250 which the cleanup pertains to. */
251 tree cleanups;
252 /* List of cleanup-lists of blocks containing this block,
253 as they were at the locus where this block appears.
254 There is an element for each containing block,
255 ordered innermost containing block first.
256 The tail of this list can be 0,
257 if all remaining elements would be empty lists.
258 The element's TREE_VALUE is the cleanup-list of that block,
259 which may be null. */
260 tree outer_cleanups;
261 /* Chain of labels defined inside this binding contour.
262 For contours that have stack levels or cleanups. */
263 struct label_chain *label_chain;
264 /* Number of function calls seen, as of start of this block. */
265 int function_call_count;
266 /* Nonzero if this is associated with a EH region. */
267 int exception_region;
268 /* The saved target_temp_slot_level from our outer block.
269 We may reset target_temp_slot_level to be the level of
270 this block, if that is done, target_temp_slot_level
271 reverts to the saved target_temp_slot_level at the very
272 end of the block. */
273 int target_temp_slot_level;
274 /* True if we are currently emitting insns in an area of
275 output code that is controlled by a conditional
276 expression. This is used by the cleanup handling code to
277 generate conditional cleanup actions. */
278 int conditional_code;
279 /* A place to move the start of the exception region for any
280 of the conditional cleanups, must be at the end or after
281 the start of the last unconditional cleanup, and before any
282 conditional branch points. */
283 rtx last_unconditional_cleanup;
284 /* When in a conditional context, this is the specific
285 cleanup list associated with last_unconditional_cleanup,
286 where we place the conditionalized cleanups. */
287 tree *cleanup_ptr;
288 } block;
289 /* For switch (C) or case (Pascal) statements,
290 and also for dummies (see `expand_start_case_dummy'). */
291 struct
293 /* The insn after which the case dispatch should finally
294 be emitted. Zero for a dummy. */
295 rtx start;
296 /* A list of case labels; it is first built as an AVL tree.
297 During expand_end_case, this is converted to a list, and may be
298 rearranged into a nearly balanced binary tree. */
299 struct case_node *case_list;
300 /* Label to jump to if no case matches. */
301 tree default_label;
302 /* The expression to be dispatched on. */
303 tree index_expr;
304 /* Type that INDEX_EXPR should be converted to. */
305 tree nominal_type;
306 /* Number of range exprs in case statement. */
307 int num_ranges;
308 /* Name of this kind of statement, for warnings. */
309 char *printname;
310 /* Used to save no_line_numbers till we see the first case label.
311 We set this to -1 when we see the first case label in this
312 case statement. */
313 int line_number_status;
314 } case_stmt;
315 } data;
318 /* Chain of all pending binding contours. */
319 struct nesting *block_stack;
321 /* If any new stacks are added here, add them to POPSTACKS too. */
323 /* Chain of all pending binding contours that restore stack levels
324 or have cleanups. */
325 struct nesting *stack_block_stack;
327 /* Chain of all pending conditional statements. */
328 struct nesting *cond_stack;
330 /* Chain of all pending loops. */
331 struct nesting *loop_stack;
333 /* Chain of all pending case or switch statements. */
334 struct nesting *case_stack;
336 /* Separate chain including all of the above,
337 chained through the `all' field. */
338 struct nesting *nesting_stack;
340 /* Number of entries on nesting_stack now. */
341 int nesting_depth;
343 /* Allocate and return a new `struct nesting'. */
345 #define ALLOC_NESTING() \
346 (struct nesting *) obstack_alloc (&stmt_obstack, sizeof (struct nesting))
348 /* Pop the nesting stack element by element until we pop off
349 the element which is at the top of STACK.
350 Update all the other stacks, popping off elements from them
351 as we pop them from nesting_stack. */
353 #define POPSTACK(STACK) \
354 do { struct nesting *target = STACK; \
355 struct nesting *this; \
356 do { this = nesting_stack; \
357 if (loop_stack == this) \
358 loop_stack = loop_stack->next; \
359 if (cond_stack == this) \
360 cond_stack = cond_stack->next; \
361 if (block_stack == this) \
362 block_stack = block_stack->next; \
363 if (stack_block_stack == this) \
364 stack_block_stack = stack_block_stack->next; \
365 if (case_stack == this) \
366 case_stack = case_stack->next; \
367 nesting_depth = nesting_stack->depth - 1; \
368 nesting_stack = this->all; \
369 obstack_free (&stmt_obstack, this); } \
370 while (this != target); } while (0)
372 /* In some cases it is impossible to generate code for a forward goto
373 until the label definition is seen. This happens when it may be necessary
374 for the goto to reset the stack pointer: we don't yet know how to do that.
375 So expand_goto puts an entry on this fixup list.
376 Each time a binding contour that resets the stack is exited,
377 we check each fixup.
378 If the target label has now been defined, we can insert the proper code. */
380 struct goto_fixup
382 /* Points to following fixup. */
383 struct goto_fixup *next;
384 /* Points to the insn before the jump insn.
385 If more code must be inserted, it goes after this insn. */
386 rtx before_jump;
387 /* The LABEL_DECL that this jump is jumping to, or 0
388 for break, continue or return. */
389 tree target;
390 /* The BLOCK for the place where this goto was found. */
391 tree context;
392 /* The CODE_LABEL rtx that this is jumping to. */
393 rtx target_rtl;
394 /* Number of binding contours started in current function
395 before the label reference. */
396 int block_start_count;
397 /* The outermost stack level that should be restored for this jump.
398 Each time a binding contour that resets the stack is exited,
399 if the target label is *not* yet defined, this slot is updated. */
400 rtx stack_level;
401 /* List of lists of cleanup expressions to be run by this goto.
402 There is one element for each block that this goto is within.
403 The tail of this list can be 0,
404 if all remaining elements would be empty.
405 The TREE_VALUE contains the cleanup list of that block as of the
406 time this goto was seen.
407 The TREE_ADDRESSABLE flag is 1 for a block that has been exited. */
408 tree cleanup_list_list;
411 static struct goto_fixup *goto_fixup_chain;
413 /* Within any binding contour that must restore a stack level,
414 all labels are recorded with a chain of these structures. */
416 struct label_chain
418 /* Points to following fixup. */
419 struct label_chain *next;
420 tree label;
424 /* Non-zero if we are using EH to handle cleanus. */
425 static int using_eh_for_cleanups_p = 0;
428 static void expand_goto_internal PROTO((tree, rtx, rtx));
429 static int expand_fixup PROTO((tree, rtx, rtx));
430 static void fixup_gotos PROTO((struct nesting *, rtx, tree,
431 rtx, int));
432 static void expand_null_return_1 PROTO((rtx, int));
433 static void expand_value_return PROTO((rtx));
434 static int tail_recursion_args PROTO((tree, tree));
435 static void expand_cleanups PROTO((tree, tree, int, int));
436 static void check_seenlabel PROTO((void));
437 static void do_jump_if_equal PROTO((rtx, rtx, rtx, int));
438 static int estimate_case_costs PROTO((case_node_ptr));
439 static void group_case_nodes PROTO((case_node_ptr));
440 static void balance_case_nodes PROTO((case_node_ptr *,
441 case_node_ptr));
442 static int node_has_low_bound PROTO((case_node_ptr, tree));
443 static int node_has_high_bound PROTO((case_node_ptr, tree));
444 static int node_is_bounded PROTO((case_node_ptr, tree));
445 static void emit_jump_if_reachable PROTO((rtx));
446 static void emit_case_nodes PROTO((rtx, case_node_ptr, rtx, tree));
447 static int add_case_node PROTO((tree, tree, tree, tree *));
448 static struct case_node *case_tree2list PROTO((case_node *, case_node *));
450 void
451 using_eh_for_cleanups ()
453 using_eh_for_cleanups_p = 1;
456 void
457 init_stmt ()
459 gcc_obstack_init (&stmt_obstack);
460 init_eh ();
463 void
464 init_stmt_for_function ()
466 /* We are not currently within any block, conditional, loop or case. */
467 block_stack = 0;
468 stack_block_stack = 0;
469 loop_stack = 0;
470 case_stack = 0;
471 cond_stack = 0;
472 nesting_stack = 0;
473 nesting_depth = 0;
475 block_start_count = 0;
477 /* No gotos have been expanded yet. */
478 goto_fixup_chain = 0;
480 /* We are not processing a ({...}) grouping. */
481 expr_stmts_for_value = 0;
482 last_expr_type = 0;
484 init_eh_for_function ();
487 void
488 save_stmt_status (p)
489 struct function *p;
491 p->block_stack = block_stack;
492 p->stack_block_stack = stack_block_stack;
493 p->cond_stack = cond_stack;
494 p->loop_stack = loop_stack;
495 p->case_stack = case_stack;
496 p->nesting_stack = nesting_stack;
497 p->nesting_depth = nesting_depth;
498 p->block_start_count = block_start_count;
499 p->last_expr_type = last_expr_type;
500 p->last_expr_value = last_expr_value;
501 p->expr_stmts_for_value = expr_stmts_for_value;
502 p->emit_filename = emit_filename;
503 p->emit_lineno = emit_lineno;
504 p->goto_fixup_chain = goto_fixup_chain;
505 save_eh_status (p);
508 void
509 restore_stmt_status (p)
510 struct function *p;
512 block_stack = p->block_stack;
513 stack_block_stack = p->stack_block_stack;
514 cond_stack = p->cond_stack;
515 loop_stack = p->loop_stack;
516 case_stack = p->case_stack;
517 nesting_stack = p->nesting_stack;
518 nesting_depth = p->nesting_depth;
519 block_start_count = p->block_start_count;
520 last_expr_type = p->last_expr_type;
521 last_expr_value = p->last_expr_value;
522 expr_stmts_for_value = p->expr_stmts_for_value;
523 emit_filename = p->emit_filename;
524 emit_lineno = p->emit_lineno;
525 goto_fixup_chain = p->goto_fixup_chain;
526 restore_eh_status (p);
529 /* Emit a no-op instruction. */
531 void
532 emit_nop ()
534 rtx last_insn;
536 last_insn = get_last_insn ();
537 if (!optimize
538 && (GET_CODE (last_insn) == CODE_LABEL
539 || (GET_CODE (last_insn) == NOTE
540 && prev_real_insn (last_insn) == 0)))
541 emit_insn (gen_nop ());
544 /* Return the rtx-label that corresponds to a LABEL_DECL,
545 creating it if necessary. */
548 label_rtx (label)
549 tree label;
551 if (TREE_CODE (label) != LABEL_DECL)
552 abort ();
554 if (DECL_RTL (label))
555 return DECL_RTL (label);
557 return DECL_RTL (label) = gen_label_rtx ();
560 /* Add an unconditional jump to LABEL as the next sequential instruction. */
562 void
563 emit_jump (label)
564 rtx label;
566 do_pending_stack_adjust ();
567 emit_jump_insn (gen_jump (label));
568 emit_barrier ();
571 /* Emit code to jump to the address
572 specified by the pointer expression EXP. */
574 void
575 expand_computed_goto (exp)
576 tree exp;
578 rtx x = expand_expr (exp, NULL_RTX, VOIDmode, 0);
580 #ifdef POINTERS_EXTEND_UNSIGNED
581 x = convert_memory_address (Pmode, x);
582 #endif
584 emit_queue ();
585 /* Be sure the function is executable. */
586 if (flag_check_memory_usage)
587 emit_library_call (chkr_check_exec_libfunc, 1,
588 VOIDmode, 1, x, ptr_mode);
590 do_pending_stack_adjust ();
591 emit_indirect_jump (x);
594 /* Handle goto statements and the labels that they can go to. */
596 /* Specify the location in the RTL code of a label LABEL,
597 which is a LABEL_DECL tree node.
599 This is used for the kind of label that the user can jump to with a
600 goto statement, and for alternatives of a switch or case statement.
601 RTL labels generated for loops and conditionals don't go through here;
602 they are generated directly at the RTL level, by other functions below.
604 Note that this has nothing to do with defining label *names*.
605 Languages vary in how they do that and what that even means. */
607 void
608 expand_label (label)
609 tree label;
611 struct label_chain *p;
613 do_pending_stack_adjust ();
614 emit_label (label_rtx (label));
615 if (DECL_NAME (label))
616 LABEL_NAME (DECL_RTL (label)) = IDENTIFIER_POINTER (DECL_NAME (label));
618 if (stack_block_stack != 0)
620 p = (struct label_chain *) oballoc (sizeof (struct label_chain));
621 p->next = stack_block_stack->data.block.label_chain;
622 stack_block_stack->data.block.label_chain = p;
623 p->label = label;
627 /* Declare that LABEL (a LABEL_DECL) may be used for nonlocal gotos
628 from nested functions. */
630 void
631 declare_nonlocal_label (label)
632 tree label;
634 nonlocal_labels = tree_cons (NULL_TREE, label, nonlocal_labels);
635 LABEL_PRESERVE_P (label_rtx (label)) = 1;
636 if (nonlocal_goto_handler_slot == 0)
638 nonlocal_goto_handler_slot
639 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
640 emit_stack_save (SAVE_NONLOCAL,
641 &nonlocal_goto_stack_level,
642 PREV_INSN (tail_recursion_reentry));
646 /* Generate RTL code for a `goto' statement with target label LABEL.
647 LABEL should be a LABEL_DECL tree node that was or will later be
648 defined with `expand_label'. */
650 void
651 expand_goto (label)
652 tree label;
654 tree context;
656 /* Check for a nonlocal goto to a containing function. */
657 context = decl_function_context (label);
658 if (context != 0 && context != current_function_decl)
660 struct function *p = find_function_data (context);
661 rtx label_ref = gen_rtx_LABEL_REF (Pmode, label_rtx (label));
662 rtx temp;
664 p->has_nonlocal_label = 1;
665 current_function_has_nonlocal_goto = 1;
666 LABEL_REF_NONLOCAL_P (label_ref) = 1;
668 /* Copy the rtl for the slots so that they won't be shared in
669 case the virtual stack vars register gets instantiated differently
670 in the parent than in the child. */
672 #if HAVE_nonlocal_goto
673 if (HAVE_nonlocal_goto)
674 emit_insn (gen_nonlocal_goto (lookup_static_chain (label),
675 copy_rtx (p->nonlocal_goto_handler_slot),
676 copy_rtx (p->nonlocal_goto_stack_level),
677 label_ref));
678 else
679 #endif
681 rtx addr;
683 /* Restore frame pointer for containing function.
684 This sets the actual hard register used for the frame pointer
685 to the location of the function's incoming static chain info.
686 The non-local goto handler will then adjust it to contain the
687 proper value and reload the argument pointer, if needed. */
688 emit_move_insn (hard_frame_pointer_rtx, lookup_static_chain (label));
690 /* We have now loaded the frame pointer hardware register with
691 the address of that corresponds to the start of the virtual
692 stack vars. So replace virtual_stack_vars_rtx in all
693 addresses we use with stack_pointer_rtx. */
695 /* Get addr of containing function's current nonlocal goto handler,
696 which will do any cleanups and then jump to the label. */
697 addr = copy_rtx (p->nonlocal_goto_handler_slot);
698 temp = copy_to_reg (replace_rtx (addr, virtual_stack_vars_rtx,
699 hard_frame_pointer_rtx));
701 /* Restore the stack pointer. Note this uses fp just restored. */
702 addr = p->nonlocal_goto_stack_level;
703 if (addr)
704 addr = replace_rtx (copy_rtx (addr),
705 virtual_stack_vars_rtx,
706 hard_frame_pointer_rtx);
708 emit_stack_restore (SAVE_NONLOCAL, addr, NULL_RTX);
710 /* Put in the static chain register the nonlocal label address. */
711 emit_move_insn (static_chain_rtx, label_ref);
712 /* USE of hard_frame_pointer_rtx added for consistency; not clear if
713 really needed. */
714 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
715 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
716 emit_insn (gen_rtx_USE (VOIDmode, static_chain_rtx));
717 emit_indirect_jump (temp);
720 else
721 expand_goto_internal (label, label_rtx (label), NULL_RTX);
724 /* Generate RTL code for a `goto' statement with target label BODY.
725 LABEL should be a LABEL_REF.
726 LAST_INSN, if non-0, is the rtx we should consider as the last
727 insn emitted (for the purposes of cleaning up a return). */
729 static void
730 expand_goto_internal (body, label, last_insn)
731 tree body;
732 rtx label;
733 rtx last_insn;
735 struct nesting *block;
736 rtx stack_level = 0;
738 if (GET_CODE (label) != CODE_LABEL)
739 abort ();
741 /* If label has already been defined, we can tell now
742 whether and how we must alter the stack level. */
744 if (PREV_INSN (label) != 0)
746 /* Find the innermost pending block that contains the label.
747 (Check containment by comparing insn-uids.)
748 Then restore the outermost stack level within that block,
749 and do cleanups of all blocks contained in it. */
750 for (block = block_stack; block; block = block->next)
752 if (INSN_UID (block->data.block.first_insn) < INSN_UID (label))
753 break;
754 if (block->data.block.stack_level != 0)
755 stack_level = block->data.block.stack_level;
756 /* Execute the cleanups for blocks we are exiting. */
757 if (block->data.block.cleanups != 0)
759 expand_cleanups (block->data.block.cleanups, NULL_TREE, 1, 1);
760 do_pending_stack_adjust ();
764 if (stack_level)
766 /* Ensure stack adjust isn't done by emit_jump, as this
767 would clobber the stack pointer. This one should be
768 deleted as dead by flow. */
769 clear_pending_stack_adjust ();
770 do_pending_stack_adjust ();
771 emit_stack_restore (SAVE_BLOCK, stack_level, NULL_RTX);
774 if (body != 0 && DECL_TOO_LATE (body))
775 error ("jump to `%s' invalidly jumps into binding contour",
776 IDENTIFIER_POINTER (DECL_NAME (body)));
778 /* Label not yet defined: may need to put this goto
779 on the fixup list. */
780 else if (! expand_fixup (body, label, last_insn))
782 /* No fixup needed. Record that the label is the target
783 of at least one goto that has no fixup. */
784 if (body != 0)
785 TREE_ADDRESSABLE (body) = 1;
788 emit_jump (label);
791 /* Generate if necessary a fixup for a goto
792 whose target label in tree structure (if any) is TREE_LABEL
793 and whose target in rtl is RTL_LABEL.
795 If LAST_INSN is nonzero, we pretend that the jump appears
796 after insn LAST_INSN instead of at the current point in the insn stream.
798 The fixup will be used later to insert insns just before the goto.
799 Those insns will restore the stack level as appropriate for the
800 target label, and will (in the case of C++) also invoke any object
801 destructors which have to be invoked when we exit the scopes which
802 are exited by the goto.
804 Value is nonzero if a fixup is made. */
806 static int
807 expand_fixup (tree_label, rtl_label, last_insn)
808 tree tree_label;
809 rtx rtl_label;
810 rtx last_insn;
812 struct nesting *block, *end_block;
814 /* See if we can recognize which block the label will be output in.
815 This is possible in some very common cases.
816 If we succeed, set END_BLOCK to that block.
817 Otherwise, set it to 0. */
819 if (cond_stack
820 && (rtl_label == cond_stack->data.cond.endif_label
821 || rtl_label == cond_stack->data.cond.next_label))
822 end_block = cond_stack;
823 /* If we are in a loop, recognize certain labels which
824 are likely targets. This reduces the number of fixups
825 we need to create. */
826 else if (loop_stack
827 && (rtl_label == loop_stack->data.loop.start_label
828 || rtl_label == loop_stack->data.loop.end_label
829 || rtl_label == loop_stack->data.loop.continue_label))
830 end_block = loop_stack;
831 else
832 end_block = 0;
834 /* Now set END_BLOCK to the binding level to which we will return. */
836 if (end_block)
838 struct nesting *next_block = end_block->all;
839 block = block_stack;
841 /* First see if the END_BLOCK is inside the innermost binding level.
842 If so, then no cleanups or stack levels are relevant. */
843 while (next_block && next_block != block)
844 next_block = next_block->all;
846 if (next_block)
847 return 0;
849 /* Otherwise, set END_BLOCK to the innermost binding level
850 which is outside the relevant control-structure nesting. */
851 next_block = block_stack->next;
852 for (block = block_stack; block != end_block; block = block->all)
853 if (block == next_block)
854 next_block = next_block->next;
855 end_block = next_block;
858 /* Does any containing block have a stack level or cleanups?
859 If not, no fixup is needed, and that is the normal case
860 (the only case, for standard C). */
861 for (block = block_stack; block != end_block; block = block->next)
862 if (block->data.block.stack_level != 0
863 || block->data.block.cleanups != 0)
864 break;
866 if (block != end_block)
868 /* Ok, a fixup is needed. Add a fixup to the list of such. */
869 struct goto_fixup *fixup
870 = (struct goto_fixup *) oballoc (sizeof (struct goto_fixup));
871 /* In case an old stack level is restored, make sure that comes
872 after any pending stack adjust. */
873 /* ?? If the fixup isn't to come at the present position,
874 doing the stack adjust here isn't useful. Doing it with our
875 settings at that location isn't useful either. Let's hope
876 someone does it! */
877 if (last_insn == 0)
878 do_pending_stack_adjust ();
879 fixup->target = tree_label;
880 fixup->target_rtl = rtl_label;
882 /* Create a BLOCK node and a corresponding matched set of
883 NOTE_INSN_BEGIN_BLOCK and NOTE_INSN_END_BLOCK notes at
884 this point. The notes will encapsulate any and all fixup
885 code which we might later insert at this point in the insn
886 stream. Also, the BLOCK node will be the parent (i.e. the
887 `SUPERBLOCK') of any other BLOCK nodes which we might create
888 later on when we are expanding the fixup code. */
891 register rtx original_before_jump
892 = last_insn ? last_insn : get_last_insn ();
894 start_sequence ();
895 pushlevel (0);
896 fixup->before_jump = emit_note (NULL_PTR, NOTE_INSN_BLOCK_BEG);
897 last_block_end_note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_END);
898 fixup->context = poplevel (1, 0, 0); /* Create the BLOCK node now! */
899 end_sequence ();
900 emit_insns_after (fixup->before_jump, original_before_jump);
903 fixup->block_start_count = block_start_count;
904 fixup->stack_level = 0;
905 fixup->cleanup_list_list
906 = ((block->data.block.outer_cleanups
907 || block->data.block.cleanups)
908 ? tree_cons (NULL_TREE, block->data.block.cleanups,
909 block->data.block.outer_cleanups)
910 : 0);
911 fixup->next = goto_fixup_chain;
912 goto_fixup_chain = fixup;
915 return block != 0;
920 /* Expand any needed fixups in the outputmost binding level of the
921 function. FIRST_INSN is the first insn in the function. */
923 void
924 expand_fixups (first_insn)
925 rtx first_insn;
927 fixup_gotos (NULL_PTR, NULL_RTX, NULL_TREE, first_insn, 0);
930 /* When exiting a binding contour, process all pending gotos requiring fixups.
931 THISBLOCK is the structure that describes the block being exited.
932 STACK_LEVEL is the rtx for the stack level to restore exiting this contour.
933 CLEANUP_LIST is a list of expressions to evaluate on exiting this contour.
934 FIRST_INSN is the insn that began this contour.
936 Gotos that jump out of this contour must restore the
937 stack level and do the cleanups before actually jumping.
939 DONT_JUMP_IN nonzero means report error there is a jump into this
940 contour from before the beginning of the contour.
941 This is also done if STACK_LEVEL is nonzero. */
943 static void
944 fixup_gotos (thisblock, stack_level, cleanup_list, first_insn, dont_jump_in)
945 struct nesting *thisblock;
946 rtx stack_level;
947 tree cleanup_list;
948 rtx first_insn;
949 int dont_jump_in;
951 register struct goto_fixup *f, *prev;
953 /* F is the fixup we are considering; PREV is the previous one. */
954 /* We run this loop in two passes so that cleanups of exited blocks
955 are run first, and blocks that are exited are marked so
956 afterwards. */
958 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
960 /* Test for a fixup that is inactive because it is already handled. */
961 if (f->before_jump == 0)
963 /* Delete inactive fixup from the chain, if that is easy to do. */
964 if (prev != 0)
965 prev->next = f->next;
967 /* Has this fixup's target label been defined?
968 If so, we can finalize it. */
969 else if (PREV_INSN (f->target_rtl) != 0)
971 register rtx cleanup_insns;
973 /* Get the first non-label after the label
974 this goto jumps to. If that's before this scope begins,
975 we don't have a jump into the scope. */
976 rtx after_label = f->target_rtl;
977 while (after_label != 0 && GET_CODE (after_label) == CODE_LABEL)
978 after_label = NEXT_INSN (after_label);
980 /* If this fixup jumped into this contour from before the beginning
981 of this contour, report an error. */
982 /* ??? Bug: this does not detect jumping in through intermediate
983 blocks that have stack levels or cleanups.
984 It detects only a problem with the innermost block
985 around the label. */
986 if (f->target != 0
987 && (dont_jump_in || stack_level || cleanup_list)
988 /* If AFTER_LABEL is 0, it means the jump goes to the end
989 of the rtl, which means it jumps into this scope. */
990 && (after_label == 0
991 || INSN_UID (first_insn) < INSN_UID (after_label))
992 && INSN_UID (first_insn) > INSN_UID (f->before_jump)
993 && ! DECL_ERROR_ISSUED (f->target))
995 error_with_decl (f->target,
996 "label `%s' used before containing binding contour");
997 /* Prevent multiple errors for one label. */
998 DECL_ERROR_ISSUED (f->target) = 1;
1001 /* We will expand the cleanups into a sequence of their own and
1002 then later on we will attach this new sequence to the insn
1003 stream just ahead of the actual jump insn. */
1005 start_sequence ();
1007 /* Temporarily restore the lexical context where we will
1008 logically be inserting the fixup code. We do this for the
1009 sake of getting the debugging information right. */
1011 pushlevel (0);
1012 set_block (f->context);
1014 /* Expand the cleanups for blocks this jump exits. */
1015 if (f->cleanup_list_list)
1017 tree lists;
1018 for (lists = f->cleanup_list_list; lists; lists = TREE_CHAIN (lists))
1019 /* Marked elements correspond to blocks that have been closed.
1020 Do their cleanups. */
1021 if (TREE_ADDRESSABLE (lists)
1022 && TREE_VALUE (lists) != 0)
1024 expand_cleanups (TREE_VALUE (lists), NULL_TREE, 1, 1);
1025 /* Pop any pushes done in the cleanups,
1026 in case function is about to return. */
1027 do_pending_stack_adjust ();
1031 /* Restore stack level for the biggest contour that this
1032 jump jumps out of. */
1033 if (f->stack_level)
1034 emit_stack_restore (SAVE_BLOCK, f->stack_level, f->before_jump);
1036 /* Finish up the sequence containing the insns which implement the
1037 necessary cleanups, and then attach that whole sequence to the
1038 insn stream just ahead of the actual jump insn. Attaching it
1039 at that point insures that any cleanups which are in fact
1040 implicit C++ object destructions (which must be executed upon
1041 leaving the block) appear (to the debugger) to be taking place
1042 in an area of the generated code where the object(s) being
1043 destructed are still "in scope". */
1045 cleanup_insns = get_insns ();
1046 poplevel (1, 0, 0);
1048 end_sequence ();
1049 emit_insns_after (cleanup_insns, f->before_jump);
1052 f->before_jump = 0;
1056 /* For any still-undefined labels, do the cleanups for this block now.
1057 We must do this now since items in the cleanup list may go out
1058 of scope when the block ends. */
1059 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1060 if (f->before_jump != 0
1061 && PREV_INSN (f->target_rtl) == 0
1062 /* Label has still not appeared. If we are exiting a block with
1063 a stack level to restore, that started before the fixup,
1064 mark this stack level as needing restoration
1065 when the fixup is later finalized. */
1066 && thisblock != 0
1067 /* Note: if THISBLOCK == 0 and we have a label that hasn't appeared, it
1068 means the label is undefined. That's erroneous, but possible. */
1069 && (thisblock->data.block.block_start_count
1070 <= f->block_start_count))
1072 tree lists = f->cleanup_list_list;
1073 rtx cleanup_insns;
1075 for (; lists; lists = TREE_CHAIN (lists))
1076 /* If the following elt. corresponds to our containing block
1077 then the elt. must be for this block. */
1078 if (TREE_CHAIN (lists) == thisblock->data.block.outer_cleanups)
1080 start_sequence ();
1081 pushlevel (0);
1082 set_block (f->context);
1083 expand_cleanups (TREE_VALUE (lists), NULL_TREE, 1, 1);
1084 do_pending_stack_adjust ();
1085 cleanup_insns = get_insns ();
1086 poplevel (1, 0, 0);
1087 end_sequence ();
1088 if (cleanup_insns != 0)
1089 f->before_jump
1090 = emit_insns_after (cleanup_insns, f->before_jump);
1092 f->cleanup_list_list = TREE_CHAIN (lists);
1095 if (stack_level)
1096 f->stack_level = stack_level;
1102 /* Generate RTL for an asm statement (explicit assembler code).
1103 BODY is a STRING_CST node containing the assembler code text,
1104 or an ADDR_EXPR containing a STRING_CST. */
1106 void
1107 expand_asm (body)
1108 tree body;
1110 if (flag_check_memory_usage)
1112 error ("`asm' cannot be used with `-fcheck-memory-usage'");
1113 return;
1116 if (TREE_CODE (body) == ADDR_EXPR)
1117 body = TREE_OPERAND (body, 0);
1119 emit_insn (gen_rtx_ASM_INPUT (VOIDmode,
1120 TREE_STRING_POINTER (body)));
1121 last_expr_type = 0;
1124 /* Generate RTL for an asm statement with arguments.
1125 STRING is the instruction template.
1126 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
1127 Each output or input has an expression in the TREE_VALUE and
1128 a constraint-string in the TREE_PURPOSE.
1129 CLOBBERS is a list of STRING_CST nodes each naming a hard register
1130 that is clobbered by this insn.
1132 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
1133 Some elements of OUTPUTS may be replaced with trees representing temporary
1134 values. The caller should copy those temporary values to the originally
1135 specified lvalues.
1137 VOL nonzero means the insn is volatile; don't optimize it. */
1139 void
1140 expand_asm_operands (string, outputs, inputs, clobbers, vol, filename, line)
1141 tree string, outputs, inputs, clobbers;
1142 int vol;
1143 char *filename;
1144 int line;
1146 rtvec argvec, constraints;
1147 rtx body;
1148 int ninputs = list_length (inputs);
1149 int noutputs = list_length (outputs);
1150 int ninout = 0;
1151 int nclobbers;
1152 tree tail;
1153 register int i;
1154 /* Vector of RTX's of evaluated output operands. */
1155 rtx *output_rtx = (rtx *) alloca (noutputs * sizeof (rtx));
1156 int *inout_opnum = (int *) alloca (noutputs * sizeof (int));
1157 enum machine_mode *inout_mode
1158 = (enum machine_mode *) alloca (noutputs * sizeof (enum machine_mode));
1159 /* The insn we have emitted. */
1160 rtx insn;
1162 /* An ASM with no outputs needs to be treated as volatile, for now. */
1163 if (noutputs == 0)
1164 vol = 1;
1166 if (flag_check_memory_usage)
1168 error ("`asm' cannot be used with `-fcheck-memory-usage'");
1169 return;
1172 /* Count the number of meaningful clobbered registers, ignoring what
1173 we would ignore later. */
1174 nclobbers = 0;
1175 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1177 char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1178 i = decode_reg_name (regname);
1179 if (i >= 0 || i == -4)
1180 ++nclobbers;
1181 else if (i == -2)
1182 error ("unknown register name `%s' in `asm'", regname);
1185 last_expr_type = 0;
1187 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1189 tree val = TREE_VALUE (tail);
1190 tree type = TREE_TYPE (val);
1191 int j;
1192 int found_equal = 0;
1193 int found_plus = 0;
1194 int allows_reg = 0;
1196 /* If there's an erroneous arg, emit no insn. */
1197 if (TREE_TYPE (val) == error_mark_node)
1198 return;
1200 /* Make sure constraint has `=' and does not have `+'. Also, see
1201 if it allows any register. Be liberal on the latter test, since
1202 the worst that happens if we get it wrong is we issue an error
1203 message. */
1205 for (j = 0; j < TREE_STRING_LENGTH (TREE_PURPOSE (tail)) - 1; j++)
1206 switch (TREE_STRING_POINTER (TREE_PURPOSE (tail))[j])
1208 case '+':
1209 /* Make sure we can specify the matching operand. */
1210 if (i > 9)
1212 error ("output operand constraint %d contains `+'", i);
1213 return;
1216 /* Replace '+' with '='. */
1217 TREE_STRING_POINTER (TREE_PURPOSE (tail))[j] = '=';
1218 found_plus = 1;
1219 break;
1221 case '=':
1222 found_equal = 1;
1223 break;
1225 case '?': case '!': case '*': case '%': case '&':
1226 case 'V': case 'm': case 'o': case '<': case '>':
1227 case 'E': case 'F': case 'G': case 'H': case 'X':
1228 case 's': case 'i': case 'n':
1229 case 'I': case 'J': case 'K': case 'L': case 'M':
1230 case 'N': case 'O': case 'P': case ',':
1231 #ifdef EXTRA_CONSTRAINT
1232 case 'Q': case 'R': case 'S': case 'T': case 'U':
1233 #endif
1234 break;
1236 case '0': case '1': case '2': case '3': case '4':
1237 case '5': case '6': case '7': case '8': case '9':
1238 error ("matching constraint not valid in output operand");
1239 break;
1241 case 'p': case 'g': case 'r':
1242 default:
1243 allows_reg = 1;
1244 break;
1247 if (! found_equal && ! found_plus)
1249 error ("output operand constraint lacks `='");
1250 return;
1253 /* If an output operand is not a decl or indirect ref and our constraint
1254 allows a register, make a temporary to act as an intermediate.
1255 Make the asm insn write into that, then our caller will copy it to
1256 the real output operand. Likewise for promoted variables. */
1258 if (TREE_CODE (val) == INDIRECT_REF
1259 || (TREE_CODE_CLASS (TREE_CODE (val)) == 'd'
1260 && ! (GET_CODE (DECL_RTL (val)) == REG
1261 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
1262 || ! allows_reg
1263 || found_plus)
1265 if (! allows_reg)
1266 mark_addressable (TREE_VALUE (tail));
1268 output_rtx[i]
1269 = expand_expr (TREE_VALUE (tail), NULL_RTX, VOIDmode,
1270 EXPAND_MEMORY_USE_WO);
1272 if (! allows_reg && GET_CODE (output_rtx[i]) != MEM)
1273 error ("output number %d not directly addressable", i);
1275 else
1277 output_rtx[i] = assign_temp (type, 0, 0, 0);
1278 TREE_VALUE (tail) = make_tree (type, output_rtx[i]);
1281 if (found_plus)
1283 inout_mode[ninout] = TYPE_MODE (TREE_TYPE (TREE_VALUE (tail)));
1284 inout_opnum[ninout++] = i;
1288 ninputs += ninout;
1289 if (ninputs + noutputs > MAX_RECOG_OPERANDS)
1291 error ("more than %d operands in `asm'", MAX_RECOG_OPERANDS);
1292 return;
1295 /* Make vectors for the expression-rtx and constraint strings. */
1297 argvec = rtvec_alloc (ninputs);
1298 constraints = rtvec_alloc (ninputs);
1300 body = gen_rtx_ASM_OPERANDS (VOIDmode,
1301 TREE_STRING_POINTER (string), "", 0, argvec,
1302 constraints, filename, line);
1304 MEM_VOLATILE_P (body) = vol;
1306 /* Eval the inputs and put them into ARGVEC.
1307 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
1309 i = 0;
1310 for (tail = inputs; tail; tail = TREE_CHAIN (tail))
1312 int j;
1313 int allows_reg = 0;
1315 /* If there's an erroneous arg, emit no insn,
1316 because the ASM_INPUT would get VOIDmode
1317 and that could cause a crash in reload. */
1318 if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
1319 return;
1320 if (TREE_PURPOSE (tail) == NULL_TREE)
1322 error ("hard register `%s' listed as input operand to `asm'",
1323 TREE_STRING_POINTER (TREE_VALUE (tail)) );
1324 return;
1327 /* Make sure constraint has neither `=' nor `+'. */
1329 for (j = 0; j < TREE_STRING_LENGTH (TREE_PURPOSE (tail)) - 1; j++)
1330 switch (TREE_STRING_POINTER (TREE_PURPOSE (tail))[j])
1332 case '+': case '=':
1333 error ("input operand constraint contains `%c'",
1334 TREE_STRING_POINTER (TREE_PURPOSE (tail))[j]);
1335 return;
1337 case '?': case '!': case '*': case '%': case '&':
1338 case 'V': case 'm': case 'o': case '<': case '>':
1339 case 'E': case 'F': case 'G': case 'H': case 'X':
1340 case 's': case 'i': case 'n':
1341 case 'I': case 'J': case 'K': case 'L': case 'M':
1342 case 'N': case 'O': case 'P': case ',':
1343 #ifdef EXTRA_CONSTRAINT
1344 case 'Q': case 'R': case 'S': case 'T': case 'U':
1345 #endif
1346 break;
1348 /* Whether or not a numeric constraint allows a register is
1349 decided by the matching constraint, and so there is no need
1350 to do anything special with them. We must handle them in
1351 the default case, so that we don't unnecessarily force
1352 operands to memory. */
1353 case '0': case '1': case '2': case '3': case '4':
1354 case '5': case '6': case '7': case '8': case '9':
1355 if (TREE_STRING_POINTER (TREE_PURPOSE (tail))[j]
1356 >= '0' + noutputs)
1358 error
1359 ("matching constraint references invalid operand number");
1360 return;
1363 /* ... fall through ... */
1365 case 'p': case 'g': case 'r':
1366 default:
1367 allows_reg = 1;
1368 break;
1371 if (! allows_reg)
1372 mark_addressable (TREE_VALUE (tail));
1374 XVECEXP (body, 3, i) /* argvec */
1375 = expand_expr (TREE_VALUE (tail), NULL_RTX, VOIDmode, 0);
1376 if (CONSTANT_P (XVECEXP (body, 3, i))
1377 && ! general_operand (XVECEXP (body, 3, i),
1378 TYPE_MODE (TREE_TYPE (TREE_VALUE (tail)))))
1380 if (allows_reg)
1381 XVECEXP (body, 3, i)
1382 = force_reg (TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))),
1383 XVECEXP (body, 3, i));
1384 else
1385 XVECEXP (body, 3, i)
1386 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))),
1387 XVECEXP (body, 3, i));
1390 if (! allows_reg
1391 && (GET_CODE (XVECEXP (body, 3, i)) == REG
1392 || GET_CODE (XVECEXP (body, 3, i)) == SUBREG
1393 || GET_CODE (XVECEXP (body, 3, i)) == CONCAT))
1395 tree type = TREE_TYPE (TREE_VALUE (tail));
1396 rtx memloc = assign_temp (type, 1, 1, 1);
1398 emit_move_insn (memloc, XVECEXP (body, 3, i));
1399 XVECEXP (body, 3, i) = memloc;
1402 XVECEXP (body, 4, i) /* constraints */
1403 = gen_rtx_ASM_INPUT (TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))),
1404 TREE_STRING_POINTER (TREE_PURPOSE (tail)));
1405 i++;
1408 /* Protect all the operands from the queue,
1409 now that they have all been evaluated. */
1411 for (i = 0; i < ninputs - ninout; i++)
1412 XVECEXP (body, 3, i) = protect_from_queue (XVECEXP (body, 3, i), 0);
1414 for (i = 0; i < noutputs; i++)
1415 output_rtx[i] = protect_from_queue (output_rtx[i], 1);
1417 /* For in-out operands, copy output rtx to input rtx. */
1418 for (i = 0; i < ninout; i++)
1420 static char match[9+1][2]
1421 = {"0", "1", "2", "3", "4", "5", "6", "7", "8", "9"};
1422 int j = inout_opnum[i];
1424 XVECEXP (body, 3, ninputs - ninout + i) /* argvec */
1425 = output_rtx[j];
1426 XVECEXP (body, 4, ninputs - ninout + i) /* constraints */
1427 = gen_rtx_ASM_INPUT (inout_mode[j], match[j]);
1430 /* Now, for each output, construct an rtx
1431 (set OUTPUT (asm_operands INSN OUTPUTNUMBER OUTPUTCONSTRAINT
1432 ARGVEC CONSTRAINTS))
1433 If there is more than one, put them inside a PARALLEL. */
1435 if (noutputs == 1 && nclobbers == 0)
1437 XSTR (body, 1) = TREE_STRING_POINTER (TREE_PURPOSE (outputs));
1438 insn = emit_insn (gen_rtx_SET (VOIDmode, output_rtx[0], body));
1440 else if (noutputs == 0 && nclobbers == 0)
1442 /* No output operands: put in a raw ASM_OPERANDS rtx. */
1443 insn = emit_insn (body);
1445 else
1447 rtx obody = body;
1448 int num = noutputs;
1449 if (num == 0) num = 1;
1450 body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
1452 /* For each output operand, store a SET. */
1454 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1456 XVECEXP (body, 0, i)
1457 = gen_rtx_SET (VOIDmode,
1458 output_rtx[i],
1459 gen_rtx_ASM_OPERANDS (VOIDmode,
1460 TREE_STRING_POINTER (string),
1461 TREE_STRING_POINTER (TREE_PURPOSE (tail)),
1462 i, argvec, constraints,
1463 filename, line));
1464 MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
1467 /* If there are no outputs (but there are some clobbers)
1468 store the bare ASM_OPERANDS into the PARALLEL. */
1470 if (i == 0)
1471 XVECEXP (body, 0, i++) = obody;
1473 /* Store (clobber REG) for each clobbered register specified. */
1475 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1477 char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1478 int j = decode_reg_name (regname);
1480 if (j < 0)
1482 if (j == -3) /* `cc', which is not a register */
1483 continue;
1485 if (j == -4) /* `memory', don't cache memory across asm */
1487 XVECEXP (body, 0, i++)
1488 = gen_rtx_CLOBBER (VOIDmode,
1489 gen_rtx_MEM (BLKmode,
1490 gen_rtx_SCRATCH (VOIDmode)));
1491 continue;
1494 /* Ignore unknown register, error already signaled. */
1495 continue;
1498 /* Use QImode since that's guaranteed to clobber just one reg. */
1499 XVECEXP (body, 0, i++)
1500 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (QImode, j));
1503 insn = emit_insn (body);
1506 free_temp_slots ();
1509 /* Generate RTL to evaluate the expression EXP
1510 and remember it in case this is the VALUE in a ({... VALUE; }) constr. */
1512 void
1513 expand_expr_stmt (exp)
1514 tree exp;
1516 /* If -W, warn about statements with no side effects,
1517 except for an explicit cast to void (e.g. for assert()), and
1518 except inside a ({...}) where they may be useful. */
1519 if (expr_stmts_for_value == 0 && exp != error_mark_node)
1521 if (! TREE_SIDE_EFFECTS (exp) && (extra_warnings || warn_unused)
1522 && !(TREE_CODE (exp) == CONVERT_EXPR
1523 && TREE_TYPE (exp) == void_type_node))
1524 warning_with_file_and_line (emit_filename, emit_lineno,
1525 "statement with no effect");
1526 else if (warn_unused)
1527 warn_if_unused_value (exp);
1530 /* If EXP is of function type and we are expanding statements for
1531 value, convert it to pointer-to-function. */
1532 if (expr_stmts_for_value && TREE_CODE (TREE_TYPE (exp)) == FUNCTION_TYPE)
1533 exp = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (exp)), exp);
1535 last_expr_type = TREE_TYPE (exp);
1536 if (! flag_syntax_only || expr_stmts_for_value)
1537 last_expr_value = expand_expr (exp,
1538 (expr_stmts_for_value
1539 ? NULL_RTX : const0_rtx),
1540 VOIDmode, 0);
1542 /* If all we do is reference a volatile value in memory,
1543 copy it to a register to be sure it is actually touched. */
1544 if (last_expr_value != 0 && GET_CODE (last_expr_value) == MEM
1545 && TREE_THIS_VOLATILE (exp))
1547 if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode)
1549 else if (TYPE_MODE (TREE_TYPE (exp)) != BLKmode)
1550 copy_to_reg (last_expr_value);
1551 else
1553 rtx lab = gen_label_rtx ();
1555 /* Compare the value with itself to reference it. */
1556 emit_cmp_insn (last_expr_value, last_expr_value, EQ,
1557 expand_expr (TYPE_SIZE (last_expr_type),
1558 NULL_RTX, VOIDmode, 0),
1559 BLKmode, 0,
1560 TYPE_ALIGN (last_expr_type) / BITS_PER_UNIT);
1561 emit_jump_insn ((*bcc_gen_fctn[(int) EQ]) (lab));
1562 emit_label (lab);
1566 /* If this expression is part of a ({...}) and is in memory, we may have
1567 to preserve temporaries. */
1568 preserve_temp_slots (last_expr_value);
1570 /* Free any temporaries used to evaluate this expression. Any temporary
1571 used as a result of this expression will already have been preserved
1572 above. */
1573 free_temp_slots ();
1575 emit_queue ();
1578 /* Warn if EXP contains any computations whose results are not used.
1579 Return 1 if a warning is printed; 0 otherwise. */
1582 warn_if_unused_value (exp)
1583 tree exp;
1585 if (TREE_USED (exp))
1586 return 0;
1588 switch (TREE_CODE (exp))
1590 case PREINCREMENT_EXPR:
1591 case POSTINCREMENT_EXPR:
1592 case PREDECREMENT_EXPR:
1593 case POSTDECREMENT_EXPR:
1594 case MODIFY_EXPR:
1595 case INIT_EXPR:
1596 case TARGET_EXPR:
1597 case CALL_EXPR:
1598 case METHOD_CALL_EXPR:
1599 case RTL_EXPR:
1600 case TRY_CATCH_EXPR:
1601 case WITH_CLEANUP_EXPR:
1602 case EXIT_EXPR:
1603 /* We don't warn about COND_EXPR because it may be a useful
1604 construct if either arm contains a side effect. */
1605 case COND_EXPR:
1606 return 0;
1608 case BIND_EXPR:
1609 /* For a binding, warn if no side effect within it. */
1610 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1612 case SAVE_EXPR:
1613 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1615 case TRUTH_ORIF_EXPR:
1616 case TRUTH_ANDIF_EXPR:
1617 /* In && or ||, warn if 2nd operand has no side effect. */
1618 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1620 case COMPOUND_EXPR:
1621 if (TREE_NO_UNUSED_WARNING (exp))
1622 return 0;
1623 if (warn_if_unused_value (TREE_OPERAND (exp, 0)))
1624 return 1;
1625 /* Let people do `(foo (), 0)' without a warning. */
1626 if (TREE_CONSTANT (TREE_OPERAND (exp, 1)))
1627 return 0;
1628 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1630 case NOP_EXPR:
1631 case CONVERT_EXPR:
1632 case NON_LVALUE_EXPR:
1633 /* Don't warn about values cast to void. */
1634 if (TREE_TYPE (exp) == void_type_node)
1635 return 0;
1636 /* Don't warn about conversions not explicit in the user's program. */
1637 if (TREE_NO_UNUSED_WARNING (exp))
1638 return 0;
1639 /* Assignment to a cast usually results in a cast of a modify.
1640 Don't complain about that. There can be an arbitrary number of
1641 casts before the modify, so we must loop until we find the first
1642 non-cast expression and then test to see if that is a modify. */
1644 tree tem = TREE_OPERAND (exp, 0);
1646 while (TREE_CODE (tem) == CONVERT_EXPR || TREE_CODE (tem) == NOP_EXPR)
1647 tem = TREE_OPERAND (tem, 0);
1649 if (TREE_CODE (tem) == MODIFY_EXPR || TREE_CODE (tem) == INIT_EXPR
1650 || TREE_CODE (tem) == CALL_EXPR)
1651 return 0;
1653 goto warn;
1655 case INDIRECT_REF:
1656 /* Don't warn about automatic dereferencing of references, since
1657 the user cannot control it. */
1658 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == REFERENCE_TYPE)
1659 return warn_if_unused_value (TREE_OPERAND (exp, 0));
1660 /* ... fall through ... */
1662 default:
1663 /* Referencing a volatile value is a side effect, so don't warn. */
1664 if ((TREE_CODE_CLASS (TREE_CODE (exp)) == 'd'
1665 || TREE_CODE_CLASS (TREE_CODE (exp)) == 'r')
1666 && TREE_THIS_VOLATILE (exp))
1667 return 0;
1668 warn:
1669 warning_with_file_and_line (emit_filename, emit_lineno,
1670 "value computed is not used");
1671 return 1;
1675 /* Clear out the memory of the last expression evaluated. */
1677 void
1678 clear_last_expr ()
1680 last_expr_type = 0;
1683 /* Begin a statement which will return a value.
1684 Return the RTL_EXPR for this statement expr.
1685 The caller must save that value and pass it to expand_end_stmt_expr. */
1687 tree
1688 expand_start_stmt_expr ()
1690 int momentary;
1691 tree t;
1693 /* Make the RTL_EXPR node temporary, not momentary,
1694 so that rtl_expr_chain doesn't become garbage. */
1695 momentary = suspend_momentary ();
1696 t = make_node (RTL_EXPR);
1697 resume_momentary (momentary);
1698 do_pending_stack_adjust ();
1699 start_sequence_for_rtl_expr (t);
1700 NO_DEFER_POP;
1701 expr_stmts_for_value++;
1702 return t;
1705 /* Restore the previous state at the end of a statement that returns a value.
1706 Returns a tree node representing the statement's value and the
1707 insns to compute the value.
1709 The nodes of that expression have been freed by now, so we cannot use them.
1710 But we don't want to do that anyway; the expression has already been
1711 evaluated and now we just want to use the value. So generate a RTL_EXPR
1712 with the proper type and RTL value.
1714 If the last substatement was not an expression,
1715 return something with type `void'. */
1717 tree
1718 expand_end_stmt_expr (t)
1719 tree t;
1721 OK_DEFER_POP;
1723 if (last_expr_type == 0)
1725 last_expr_type = void_type_node;
1726 last_expr_value = const0_rtx;
1728 else if (last_expr_value == 0)
1729 /* There are some cases where this can happen, such as when the
1730 statement is void type. */
1731 last_expr_value = const0_rtx;
1732 else if (GET_CODE (last_expr_value) != REG && ! CONSTANT_P (last_expr_value))
1733 /* Remove any possible QUEUED. */
1734 last_expr_value = protect_from_queue (last_expr_value, 0);
1736 emit_queue ();
1738 TREE_TYPE (t) = last_expr_type;
1739 RTL_EXPR_RTL (t) = last_expr_value;
1740 RTL_EXPR_SEQUENCE (t) = get_insns ();
1742 rtl_expr_chain = tree_cons (NULL_TREE, t, rtl_expr_chain);
1744 end_sequence ();
1746 /* Don't consider deleting this expr or containing exprs at tree level. */
1747 TREE_SIDE_EFFECTS (t) = 1;
1748 /* Propagate volatility of the actual RTL expr. */
1749 TREE_THIS_VOLATILE (t) = volatile_refs_p (last_expr_value);
1751 last_expr_type = 0;
1752 expr_stmts_for_value--;
1754 return t;
1757 /* Generate RTL for the start of an if-then. COND is the expression
1758 whose truth should be tested.
1760 If EXITFLAG is nonzero, this conditional is visible to
1761 `exit_something'. */
1763 void
1764 expand_start_cond (cond, exitflag)
1765 tree cond;
1766 int exitflag;
1768 struct nesting *thiscond = ALLOC_NESTING ();
1770 /* Make an entry on cond_stack for the cond we are entering. */
1772 thiscond->next = cond_stack;
1773 thiscond->all = nesting_stack;
1774 thiscond->depth = ++nesting_depth;
1775 thiscond->data.cond.next_label = gen_label_rtx ();
1776 /* Before we encounter an `else', we don't need a separate exit label
1777 unless there are supposed to be exit statements
1778 to exit this conditional. */
1779 thiscond->exit_label = exitflag ? gen_label_rtx () : 0;
1780 thiscond->data.cond.endif_label = thiscond->exit_label;
1781 cond_stack = thiscond;
1782 nesting_stack = thiscond;
1784 do_jump (cond, thiscond->data.cond.next_label, NULL_RTX);
1787 /* Generate RTL between then-clause and the elseif-clause
1788 of an if-then-elseif-.... */
1790 void
1791 expand_start_elseif (cond)
1792 tree cond;
1794 if (cond_stack->data.cond.endif_label == 0)
1795 cond_stack->data.cond.endif_label = gen_label_rtx ();
1796 emit_jump (cond_stack->data.cond.endif_label);
1797 emit_label (cond_stack->data.cond.next_label);
1798 cond_stack->data.cond.next_label = gen_label_rtx ();
1799 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
1802 /* Generate RTL between the then-clause and the else-clause
1803 of an if-then-else. */
1805 void
1806 expand_start_else ()
1808 if (cond_stack->data.cond.endif_label == 0)
1809 cond_stack->data.cond.endif_label = gen_label_rtx ();
1811 emit_jump (cond_stack->data.cond.endif_label);
1812 emit_label (cond_stack->data.cond.next_label);
1813 cond_stack->data.cond.next_label = 0; /* No more _else or _elseif calls. */
1816 /* After calling expand_start_else, turn this "else" into an "else if"
1817 by providing another condition. */
1819 void
1820 expand_elseif (cond)
1821 tree cond;
1823 cond_stack->data.cond.next_label = gen_label_rtx ();
1824 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
1827 /* Generate RTL for the end of an if-then.
1828 Pop the record for it off of cond_stack. */
1830 void
1831 expand_end_cond ()
1833 struct nesting *thiscond = cond_stack;
1835 do_pending_stack_adjust ();
1836 if (thiscond->data.cond.next_label)
1837 emit_label (thiscond->data.cond.next_label);
1838 if (thiscond->data.cond.endif_label)
1839 emit_label (thiscond->data.cond.endif_label);
1841 POPSTACK (cond_stack);
1842 last_expr_type = 0;
1847 /* Generate RTL for the start of a loop. EXIT_FLAG is nonzero if this
1848 loop should be exited by `exit_something'. This is a loop for which
1849 `expand_continue' will jump to the top of the loop.
1851 Make an entry on loop_stack to record the labels associated with
1852 this loop. */
1854 struct nesting *
1855 expand_start_loop (exit_flag)
1856 int exit_flag;
1858 register struct nesting *thisloop = ALLOC_NESTING ();
1860 /* Make an entry on loop_stack for the loop we are entering. */
1862 thisloop->next = loop_stack;
1863 thisloop->all = nesting_stack;
1864 thisloop->depth = ++nesting_depth;
1865 thisloop->data.loop.start_label = gen_label_rtx ();
1866 thisloop->data.loop.end_label = gen_label_rtx ();
1867 thisloop->data.loop.alt_end_label = 0;
1868 thisloop->data.loop.continue_label = thisloop->data.loop.start_label;
1869 thisloop->exit_label = exit_flag ? thisloop->data.loop.end_label : 0;
1870 loop_stack = thisloop;
1871 nesting_stack = thisloop;
1873 do_pending_stack_adjust ();
1874 emit_queue ();
1875 emit_note (NULL_PTR, NOTE_INSN_LOOP_BEG);
1876 emit_label (thisloop->data.loop.start_label);
1878 return thisloop;
1881 /* Like expand_start_loop but for a loop where the continuation point
1882 (for expand_continue_loop) will be specified explicitly. */
1884 struct nesting *
1885 expand_start_loop_continue_elsewhere (exit_flag)
1886 int exit_flag;
1888 struct nesting *thisloop = expand_start_loop (exit_flag);
1889 loop_stack->data.loop.continue_label = gen_label_rtx ();
1890 return thisloop;
1893 /* Specify the continuation point for a loop started with
1894 expand_start_loop_continue_elsewhere.
1895 Use this at the point in the code to which a continue statement
1896 should jump. */
1898 void
1899 expand_loop_continue_here ()
1901 do_pending_stack_adjust ();
1902 emit_note (NULL_PTR, NOTE_INSN_LOOP_CONT);
1903 emit_label (loop_stack->data.loop.continue_label);
1906 /* Finish a loop. Generate a jump back to the top and the loop-exit label.
1907 Pop the block off of loop_stack. */
1909 void
1910 expand_end_loop ()
1912 register rtx insn;
1913 register rtx start_label;
1914 rtx last_test_insn = 0;
1915 int num_insns = 0;
1917 insn = get_last_insn ();
1918 start_label = loop_stack->data.loop.start_label;
1920 /* Mark the continue-point at the top of the loop if none elsewhere. */
1921 if (start_label == loop_stack->data.loop.continue_label)
1922 emit_note_before (NOTE_INSN_LOOP_CONT, start_label);
1924 do_pending_stack_adjust ();
1926 /* If optimizing, perhaps reorder the loop. If the loop
1927 starts with a conditional exit, roll that to the end
1928 where it will optimize together with the jump back.
1930 We look for the last conditional branch to the exit that we encounter
1931 before hitting 30 insns or a CALL_INSN. If we see an unconditional
1932 branch to the exit first, use it.
1934 We must also stop at NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes
1935 because moving them is not valid. */
1937 if (optimize
1939 ! (GET_CODE (insn) == JUMP_INSN
1940 && GET_CODE (PATTERN (insn)) == SET
1941 && SET_DEST (PATTERN (insn)) == pc_rtx
1942 && GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE))
1944 /* Scan insns from the top of the loop looking for a qualified
1945 conditional exit. */
1946 for (insn = NEXT_INSN (loop_stack->data.loop.start_label); insn;
1947 insn = NEXT_INSN (insn))
1949 if (GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == CODE_LABEL)
1950 break;
1952 if (GET_CODE (insn) == NOTE
1953 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
1954 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
1955 break;
1957 if (GET_CODE (insn) == JUMP_INSN || GET_CODE (insn) == INSN)
1958 num_insns++;
1960 if (last_test_insn && num_insns > 30)
1961 break;
1963 if (GET_CODE (insn) == JUMP_INSN && GET_CODE (PATTERN (insn)) == SET
1964 && SET_DEST (PATTERN (insn)) == pc_rtx
1965 && GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE
1966 && ((GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 1)) == LABEL_REF
1967 && ((XEXP (XEXP (SET_SRC (PATTERN (insn)), 1), 0)
1968 == loop_stack->data.loop.end_label)
1969 || (XEXP (XEXP (SET_SRC (PATTERN (insn)), 1), 0)
1970 == loop_stack->data.loop.alt_end_label)))
1971 || (GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 2)) == LABEL_REF
1972 && ((XEXP (XEXP (SET_SRC (PATTERN (insn)), 2), 0)
1973 == loop_stack->data.loop.end_label)
1974 || (XEXP (XEXP (SET_SRC (PATTERN (insn)), 2), 0)
1975 == loop_stack->data.loop.alt_end_label)))))
1976 last_test_insn = insn;
1978 if (last_test_insn == 0 && GET_CODE (insn) == JUMP_INSN
1979 && GET_CODE (PATTERN (insn)) == SET
1980 && SET_DEST (PATTERN (insn)) == pc_rtx
1981 && GET_CODE (SET_SRC (PATTERN (insn))) == LABEL_REF
1982 && ((XEXP (SET_SRC (PATTERN (insn)), 0)
1983 == loop_stack->data.loop.end_label)
1984 || (XEXP (SET_SRC (PATTERN (insn)), 0)
1985 == loop_stack->data.loop.alt_end_label)))
1986 /* Include BARRIER. */
1987 last_test_insn = NEXT_INSN (insn);
1990 if (last_test_insn != 0 && last_test_insn != get_last_insn ())
1992 /* We found one. Move everything from there up
1993 to the end of the loop, and add a jump into the loop
1994 to jump to there. */
1995 register rtx newstart_label = gen_label_rtx ();
1996 register rtx start_move = start_label;
1998 /* If the start label is preceded by a NOTE_INSN_LOOP_CONT note,
1999 then we want to move this note also. */
2000 if (GET_CODE (PREV_INSN (start_move)) == NOTE
2001 && (NOTE_LINE_NUMBER (PREV_INSN (start_move))
2002 == NOTE_INSN_LOOP_CONT))
2003 start_move = PREV_INSN (start_move);
2005 emit_label_after (newstart_label, PREV_INSN (start_move));
2006 reorder_insns (start_move, last_test_insn, get_last_insn ());
2007 emit_jump_insn_after (gen_jump (start_label),
2008 PREV_INSN (newstart_label));
2009 emit_barrier_after (PREV_INSN (newstart_label));
2010 start_label = newstart_label;
2014 emit_jump (start_label);
2015 emit_note (NULL_PTR, NOTE_INSN_LOOP_END);
2016 emit_label (loop_stack->data.loop.end_label);
2018 POPSTACK (loop_stack);
2020 last_expr_type = 0;
2023 /* Generate a jump to the current loop's continue-point.
2024 This is usually the top of the loop, but may be specified
2025 explicitly elsewhere. If not currently inside a loop,
2026 return 0 and do nothing; caller will print an error message. */
2029 expand_continue_loop (whichloop)
2030 struct nesting *whichloop;
2032 last_expr_type = 0;
2033 if (whichloop == 0)
2034 whichloop = loop_stack;
2035 if (whichloop == 0)
2036 return 0;
2037 expand_goto_internal (NULL_TREE, whichloop->data.loop.continue_label,
2038 NULL_RTX);
2039 return 1;
2042 /* Generate a jump to exit the current loop. If not currently inside a loop,
2043 return 0 and do nothing; caller will print an error message. */
2046 expand_exit_loop (whichloop)
2047 struct nesting *whichloop;
2049 last_expr_type = 0;
2050 if (whichloop == 0)
2051 whichloop = loop_stack;
2052 if (whichloop == 0)
2053 return 0;
2054 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label, NULL_RTX);
2055 return 1;
2058 /* Generate a conditional jump to exit the current loop if COND
2059 evaluates to zero. If not currently inside a loop,
2060 return 0 and do nothing; caller will print an error message. */
2063 expand_exit_loop_if_false (whichloop, cond)
2064 struct nesting *whichloop;
2065 tree cond;
2067 rtx label = gen_label_rtx ();
2068 rtx last_insn;
2069 last_expr_type = 0;
2071 if (whichloop == 0)
2072 whichloop = loop_stack;
2073 if (whichloop == 0)
2074 return 0;
2075 /* In order to handle fixups, we actually create a conditional jump
2076 around a unconditional branch to exit the loop. If fixups are
2077 necessary, they go before the unconditional branch. */
2080 do_jump (cond, NULL_RTX, label);
2081 last_insn = get_last_insn ();
2082 if (GET_CODE (last_insn) == CODE_LABEL)
2083 whichloop->data.loop.alt_end_label = last_insn;
2084 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label,
2085 NULL_RTX);
2086 emit_label (label);
2088 return 1;
2091 /* Return non-zero if we should preserve sub-expressions as separate
2092 pseudos. We never do so if we aren't optimizing. We always do so
2093 if -fexpensive-optimizations.
2095 Otherwise, we only do so if we are in the "early" part of a loop. I.e.,
2096 the loop may still be a small one. */
2099 preserve_subexpressions_p ()
2101 rtx insn;
2103 if (flag_expensive_optimizations)
2104 return 1;
2106 if (optimize == 0 || loop_stack == 0)
2107 return 0;
2109 insn = get_last_insn_anywhere ();
2111 return (insn
2112 && (INSN_UID (insn) - INSN_UID (loop_stack->data.loop.start_label)
2113 < n_non_fixed_regs * 3));
2117 /* Generate a jump to exit the current loop, conditional, binding contour
2118 or case statement. Not all such constructs are visible to this function,
2119 only those started with EXIT_FLAG nonzero. Individual languages use
2120 the EXIT_FLAG parameter to control which kinds of constructs you can
2121 exit this way.
2123 If not currently inside anything that can be exited,
2124 return 0 and do nothing; caller will print an error message. */
2127 expand_exit_something ()
2129 struct nesting *n;
2130 last_expr_type = 0;
2131 for (n = nesting_stack; n; n = n->all)
2132 if (n->exit_label != 0)
2134 expand_goto_internal (NULL_TREE, n->exit_label, NULL_RTX);
2135 return 1;
2138 return 0;
2141 /* Generate RTL to return from the current function, with no value.
2142 (That is, we do not do anything about returning any value.) */
2144 void
2145 expand_null_return ()
2147 struct nesting *block = block_stack;
2148 rtx last_insn = 0;
2150 /* Does any pending block have cleanups? */
2152 while (block && block->data.block.cleanups == 0)
2153 block = block->next;
2155 /* If yes, use a goto to return, since that runs cleanups. */
2157 expand_null_return_1 (last_insn, block != 0);
2160 /* Generate RTL to return from the current function, with value VAL. */
2162 static void
2163 expand_value_return (val)
2164 rtx val;
2166 struct nesting *block = block_stack;
2167 rtx last_insn = get_last_insn ();
2168 rtx return_reg = DECL_RTL (DECL_RESULT (current_function_decl));
2170 /* Copy the value to the return location
2171 unless it's already there. */
2173 if (return_reg != val)
2175 #ifdef PROMOTE_FUNCTION_RETURN
2176 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
2177 int unsignedp = TREE_UNSIGNED (type);
2178 enum machine_mode mode
2179 = promote_mode (type, DECL_MODE (DECL_RESULT (current_function_decl)),
2180 &unsignedp, 1);
2182 if (GET_MODE (val) != VOIDmode && GET_MODE (val) != mode)
2183 convert_move (return_reg, val, unsignedp);
2184 else
2185 #endif
2186 emit_move_insn (return_reg, val);
2188 if (GET_CODE (return_reg) == REG
2189 && REGNO (return_reg) < FIRST_PSEUDO_REGISTER)
2190 emit_insn (gen_rtx_USE (VOIDmode, return_reg));
2191 /* Handle calls that return values in multiple non-contiguous locations.
2192 The Irix 6 ABI has examples of this. */
2193 else if (GET_CODE (return_reg) == PARALLEL)
2195 int i;
2197 for (i = 0; i < XVECLEN (return_reg, 0); i++)
2199 rtx x = XEXP (XVECEXP (return_reg, 0, i), 0);
2201 if (GET_CODE (x) == REG
2202 && REGNO (x) < FIRST_PSEUDO_REGISTER)
2203 emit_insn (gen_rtx_USE (VOIDmode, x));
2207 /* Does any pending block have cleanups? */
2209 while (block && block->data.block.cleanups == 0)
2210 block = block->next;
2212 /* If yes, use a goto to return, since that runs cleanups.
2213 Use LAST_INSN to put cleanups *before* the move insn emitted above. */
2215 expand_null_return_1 (last_insn, block != 0);
2218 /* Output a return with no value. If LAST_INSN is nonzero,
2219 pretend that the return takes place after LAST_INSN.
2220 If USE_GOTO is nonzero then don't use a return instruction;
2221 go to the return label instead. This causes any cleanups
2222 of pending blocks to be executed normally. */
2224 static void
2225 expand_null_return_1 (last_insn, use_goto)
2226 rtx last_insn;
2227 int use_goto;
2229 rtx end_label = cleanup_label ? cleanup_label : return_label;
2231 clear_pending_stack_adjust ();
2232 do_pending_stack_adjust ();
2233 last_expr_type = 0;
2235 /* PCC-struct return always uses an epilogue. */
2236 if (current_function_returns_pcc_struct || use_goto)
2238 if (end_label == 0)
2239 end_label = return_label = gen_label_rtx ();
2240 expand_goto_internal (NULL_TREE, end_label, last_insn);
2241 return;
2244 /* Otherwise output a simple return-insn if one is available,
2245 unless it won't do the job. */
2246 #ifdef HAVE_return
2247 if (HAVE_return && use_goto == 0 && cleanup_label == 0)
2249 emit_jump_insn (gen_return ());
2250 emit_barrier ();
2251 return;
2253 #endif
2255 /* Otherwise jump to the epilogue. */
2256 expand_goto_internal (NULL_TREE, end_label, last_insn);
2259 /* Generate RTL to evaluate the expression RETVAL and return it
2260 from the current function. */
2262 void
2263 expand_return (retval)
2264 tree retval;
2266 /* If there are any cleanups to be performed, then they will
2267 be inserted following LAST_INSN. It is desirable
2268 that the last_insn, for such purposes, should be the
2269 last insn before computing the return value. Otherwise, cleanups
2270 which call functions can clobber the return value. */
2271 /* ??? rms: I think that is erroneous, because in C++ it would
2272 run destructors on variables that might be used in the subsequent
2273 computation of the return value. */
2274 rtx last_insn = 0;
2275 register rtx val = 0;
2276 register rtx op0;
2277 tree retval_rhs;
2278 int cleanups;
2280 /* If function wants no value, give it none. */
2281 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
2283 expand_expr (retval, NULL_RTX, VOIDmode, 0);
2284 emit_queue ();
2285 expand_null_return ();
2286 return;
2289 /* Are any cleanups needed? E.g. C++ destructors to be run? */
2290 /* This is not sufficient. We also need to watch for cleanups of the
2291 expression we are about to expand. Unfortunately, we cannot know
2292 if it has cleanups until we expand it, and we want to change how we
2293 expand it depending upon if we need cleanups. We can't win. */
2294 #if 0
2295 cleanups = any_pending_cleanups (1);
2296 #else
2297 cleanups = 1;
2298 #endif
2300 if (TREE_CODE (retval) == RESULT_DECL)
2301 retval_rhs = retval;
2302 else if ((TREE_CODE (retval) == MODIFY_EXPR || TREE_CODE (retval) == INIT_EXPR)
2303 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
2304 retval_rhs = TREE_OPERAND (retval, 1);
2305 else if (TREE_TYPE (retval) == void_type_node)
2306 /* Recognize tail-recursive call to void function. */
2307 retval_rhs = retval;
2308 else
2309 retval_rhs = NULL_TREE;
2311 /* Only use `last_insn' if there are cleanups which must be run. */
2312 if (cleanups || cleanup_label != 0)
2313 last_insn = get_last_insn ();
2315 /* Distribute return down conditional expr if either of the sides
2316 may involve tail recursion (see test below). This enhances the number
2317 of tail recursions we see. Don't do this always since it can produce
2318 sub-optimal code in some cases and we distribute assignments into
2319 conditional expressions when it would help. */
2321 if (optimize && retval_rhs != 0
2322 && frame_offset == 0
2323 && TREE_CODE (retval_rhs) == COND_EXPR
2324 && (TREE_CODE (TREE_OPERAND (retval_rhs, 1)) == CALL_EXPR
2325 || TREE_CODE (TREE_OPERAND (retval_rhs, 2)) == CALL_EXPR))
2327 rtx label = gen_label_rtx ();
2328 tree expr;
2330 do_jump (TREE_OPERAND (retval_rhs, 0), label, NULL_RTX);
2331 expr = build (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (current_function_decl)),
2332 DECL_RESULT (current_function_decl),
2333 TREE_OPERAND (retval_rhs, 1));
2334 TREE_SIDE_EFFECTS (expr) = 1;
2335 expand_return (expr);
2336 emit_label (label);
2338 expr = build (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (current_function_decl)),
2339 DECL_RESULT (current_function_decl),
2340 TREE_OPERAND (retval_rhs, 2));
2341 TREE_SIDE_EFFECTS (expr) = 1;
2342 expand_return (expr);
2343 return;
2346 /* For tail-recursive call to current function,
2347 just jump back to the beginning.
2348 It's unsafe if any auto variable in this function
2349 has its address taken; for simplicity,
2350 require stack frame to be empty. */
2351 if (optimize && retval_rhs != 0
2352 && frame_offset == 0
2353 && TREE_CODE (retval_rhs) == CALL_EXPR
2354 && TREE_CODE (TREE_OPERAND (retval_rhs, 0)) == ADDR_EXPR
2355 && TREE_OPERAND (TREE_OPERAND (retval_rhs, 0), 0) == current_function_decl
2356 /* Finish checking validity, and if valid emit code
2357 to set the argument variables for the new call. */
2358 && tail_recursion_args (TREE_OPERAND (retval_rhs, 1),
2359 DECL_ARGUMENTS (current_function_decl)))
2361 if (tail_recursion_label == 0)
2363 tail_recursion_label = gen_label_rtx ();
2364 emit_label_after (tail_recursion_label,
2365 tail_recursion_reentry);
2367 emit_queue ();
2368 expand_goto_internal (NULL_TREE, tail_recursion_label, last_insn);
2369 emit_barrier ();
2370 return;
2372 #ifdef HAVE_return
2373 /* This optimization is safe if there are local cleanups
2374 because expand_null_return takes care of them.
2375 ??? I think it should also be safe when there is a cleanup label,
2376 because expand_null_return takes care of them, too.
2377 Any reason why not? */
2378 if (HAVE_return && cleanup_label == 0
2379 && ! current_function_returns_pcc_struct
2380 && BRANCH_COST <= 1)
2382 /* If this is return x == y; then generate
2383 if (x == y) return 1; else return 0;
2384 if we can do it with explicit return insns and branches are cheap,
2385 but not if we have the corresponding scc insn. */
2386 int has_scc = 0;
2387 if (retval_rhs)
2388 switch (TREE_CODE (retval_rhs))
2390 case EQ_EXPR:
2391 #ifdef HAVE_seq
2392 has_scc = HAVE_seq;
2393 #endif
2394 case NE_EXPR:
2395 #ifdef HAVE_sne
2396 has_scc = HAVE_sne;
2397 #endif
2398 case GT_EXPR:
2399 #ifdef HAVE_sgt
2400 has_scc = HAVE_sgt;
2401 #endif
2402 case GE_EXPR:
2403 #ifdef HAVE_sge
2404 has_scc = HAVE_sge;
2405 #endif
2406 case LT_EXPR:
2407 #ifdef HAVE_slt
2408 has_scc = HAVE_slt;
2409 #endif
2410 case LE_EXPR:
2411 #ifdef HAVE_sle
2412 has_scc = HAVE_sle;
2413 #endif
2414 case TRUTH_ANDIF_EXPR:
2415 case TRUTH_ORIF_EXPR:
2416 case TRUTH_AND_EXPR:
2417 case TRUTH_OR_EXPR:
2418 case TRUTH_NOT_EXPR:
2419 case TRUTH_XOR_EXPR:
2420 if (! has_scc)
2422 op0 = gen_label_rtx ();
2423 jumpifnot (retval_rhs, op0);
2424 expand_value_return (const1_rtx);
2425 emit_label (op0);
2426 expand_value_return (const0_rtx);
2427 return;
2429 break;
2431 default:
2432 break;
2435 #endif /* HAVE_return */
2437 /* If the result is an aggregate that is being returned in one (or more)
2438 registers, load the registers here. The compiler currently can't handle
2439 copying a BLKmode value into registers. We could put this code in a
2440 more general area (for use by everyone instead of just function
2441 call/return), but until this feature is generally usable it is kept here
2442 (and in expand_call). The value must go into a pseudo in case there
2443 are cleanups that will clobber the real return register. */
2445 if (retval_rhs != 0
2446 && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
2447 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG)
2449 int i, bitpos, xbitpos;
2450 int big_endian_correction = 0;
2451 int bytes = int_size_in_bytes (TREE_TYPE (retval_rhs));
2452 int n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
2453 int bitsize = MIN (TYPE_ALIGN (TREE_TYPE (retval_rhs)),BITS_PER_WORD);
2454 rtx *result_pseudos = (rtx *) alloca (sizeof (rtx) * n_regs);
2455 rtx result_reg, src = NULL_RTX, dst = NULL_RTX;
2456 rtx result_val = expand_expr (retval_rhs, NULL_RTX, VOIDmode, 0);
2457 enum machine_mode tmpmode, result_reg_mode;
2459 /* Structures whose size is not a multiple of a word are aligned
2460 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2461 machine, this means we must skip the empty high order bytes when
2462 calculating the bit offset. */
2463 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2464 big_endian_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2465 * BITS_PER_UNIT));
2467 /* Copy the structure BITSIZE bits at a time. */
2468 for (bitpos = 0, xbitpos = big_endian_correction;
2469 bitpos < bytes * BITS_PER_UNIT;
2470 bitpos += bitsize, xbitpos += bitsize)
2472 /* We need a new destination pseudo each time xbitpos is
2473 on a word boundary and when xbitpos == big_endian_correction
2474 (the first time through). */
2475 if (xbitpos % BITS_PER_WORD == 0
2476 || xbitpos == big_endian_correction)
2478 /* Generate an appropriate register. */
2479 dst = gen_reg_rtx (word_mode);
2480 result_pseudos[xbitpos / BITS_PER_WORD] = dst;
2482 /* Clobber the destination before we move anything into it. */
2483 emit_insn (gen_rtx_CLOBBER (VOIDmode, dst));
2486 /* We need a new source operand each time bitpos is on a word
2487 boundary. */
2488 if (bitpos % BITS_PER_WORD == 0)
2489 src = operand_subword_force (result_val,
2490 bitpos / BITS_PER_WORD,
2491 BLKmode);
2493 /* Use bitpos for the source extraction (left justified) and
2494 xbitpos for the destination store (right justified). */
2495 store_bit_field (dst, bitsize, xbitpos % BITS_PER_WORD, word_mode,
2496 extract_bit_field (src, bitsize,
2497 bitpos % BITS_PER_WORD, 1,
2498 NULL_RTX, word_mode,
2499 word_mode,
2500 bitsize / BITS_PER_UNIT,
2501 BITS_PER_WORD),
2502 bitsize / BITS_PER_UNIT, BITS_PER_WORD);
2505 /* Find the smallest integer mode large enough to hold the
2506 entire structure and use that mode instead of BLKmode
2507 on the USE insn for the return register. */
2508 bytes = int_size_in_bytes (TREE_TYPE (retval_rhs));
2509 for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2510 tmpmode != MAX_MACHINE_MODE;
2511 tmpmode = GET_MODE_WIDER_MODE (tmpmode))
2513 /* Have we found a large enough mode? */
2514 if (GET_MODE_SIZE (tmpmode) >= bytes)
2515 break;
2518 /* No suitable mode found. */
2519 if (tmpmode == MAX_MACHINE_MODE)
2520 abort ();
2522 PUT_MODE (DECL_RTL (DECL_RESULT (current_function_decl)), tmpmode);
2524 if (GET_MODE_SIZE (tmpmode) < GET_MODE_SIZE (word_mode))
2525 result_reg_mode = word_mode;
2526 else
2527 result_reg_mode = tmpmode;
2528 result_reg = gen_reg_rtx (result_reg_mode);
2530 emit_queue ();
2531 for (i = 0; i < n_regs; i++)
2532 emit_move_insn (operand_subword (result_reg, i, 0, result_reg_mode),
2533 result_pseudos[i]);
2535 if (tmpmode != result_reg_mode)
2536 result_reg = gen_lowpart (tmpmode, result_reg);
2538 expand_value_return (result_reg);
2540 else if (cleanups
2541 && retval_rhs != 0
2542 && TREE_TYPE (retval_rhs) != void_type_node
2543 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG)
2545 /* Calculate the return value into a pseudo reg. */
2546 val = gen_reg_rtx (DECL_MODE (DECL_RESULT (current_function_decl)));
2547 val = expand_expr (retval_rhs, val, GET_MODE (val), 0);
2548 val = force_not_mem (val);
2549 emit_queue ();
2550 /* Return the calculated value, doing cleanups first. */
2551 expand_value_return (val);
2553 else
2555 /* No cleanups or no hard reg used;
2556 calculate value into hard return reg. */
2557 expand_expr (retval, const0_rtx, VOIDmode, 0);
2558 emit_queue ();
2559 expand_value_return (DECL_RTL (DECL_RESULT (current_function_decl)));
2563 /* Return 1 if the end of the generated RTX is not a barrier.
2564 This means code already compiled can drop through. */
2567 drop_through_at_end_p ()
2569 rtx insn = get_last_insn ();
2570 while (insn && GET_CODE (insn) == NOTE)
2571 insn = PREV_INSN (insn);
2572 return insn && GET_CODE (insn) != BARRIER;
2575 /* Emit code to alter this function's formal parms for a tail-recursive call.
2576 ACTUALS is a list of actual parameter expressions (chain of TREE_LISTs).
2577 FORMALS is the chain of decls of formals.
2578 Return 1 if this can be done;
2579 otherwise return 0 and do not emit any code. */
2581 static int
2582 tail_recursion_args (actuals, formals)
2583 tree actuals, formals;
2585 register tree a = actuals, f = formals;
2586 register int i;
2587 register rtx *argvec;
2589 /* Check that number and types of actuals are compatible
2590 with the formals. This is not always true in valid C code.
2591 Also check that no formal needs to be addressable
2592 and that all formals are scalars. */
2594 /* Also count the args. */
2596 for (a = actuals, f = formals, i = 0; a && f; a = TREE_CHAIN (a), f = TREE_CHAIN (f), i++)
2598 if (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_VALUE (a)))
2599 != TYPE_MAIN_VARIANT (TREE_TYPE (f)))
2600 return 0;
2601 if (GET_CODE (DECL_RTL (f)) != REG || DECL_MODE (f) == BLKmode)
2602 return 0;
2604 if (a != 0 || f != 0)
2605 return 0;
2607 /* Compute all the actuals. */
2609 argvec = (rtx *) alloca (i * sizeof (rtx));
2611 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
2612 argvec[i] = expand_expr (TREE_VALUE (a), NULL_RTX, VOIDmode, 0);
2614 /* Find which actual values refer to current values of previous formals.
2615 Copy each of them now, before any formal is changed. */
2617 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
2619 int copy = 0;
2620 register int j;
2621 for (f = formals, j = 0; j < i; f = TREE_CHAIN (f), j++)
2622 if (reg_mentioned_p (DECL_RTL (f), argvec[i]))
2623 { copy = 1; break; }
2624 if (copy)
2625 argvec[i] = copy_to_reg (argvec[i]);
2628 /* Store the values of the actuals into the formals. */
2630 for (f = formals, a = actuals, i = 0; f;
2631 f = TREE_CHAIN (f), a = TREE_CHAIN (a), i++)
2633 if (GET_MODE (DECL_RTL (f)) == GET_MODE (argvec[i]))
2634 emit_move_insn (DECL_RTL (f), argvec[i]);
2635 else
2636 convert_move (DECL_RTL (f), argvec[i],
2637 TREE_UNSIGNED (TREE_TYPE (TREE_VALUE (a))));
2640 free_temp_slots ();
2641 return 1;
2644 /* Generate the RTL code for entering a binding contour.
2645 The variables are declared one by one, by calls to `expand_decl'.
2647 EXIT_FLAG is nonzero if this construct should be visible to
2648 `exit_something'. */
2650 void
2651 expand_start_bindings (exit_flag)
2652 int exit_flag;
2654 struct nesting *thisblock = ALLOC_NESTING ();
2655 rtx note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_BEG);
2657 /* Make an entry on block_stack for the block we are entering. */
2659 thisblock->next = block_stack;
2660 thisblock->all = nesting_stack;
2661 thisblock->depth = ++nesting_depth;
2662 thisblock->data.block.stack_level = 0;
2663 thisblock->data.block.cleanups = 0;
2664 thisblock->data.block.function_call_count = 0;
2665 thisblock->data.block.exception_region = 0;
2666 thisblock->data.block.target_temp_slot_level = target_temp_slot_level;
2668 thisblock->data.block.conditional_code = 0;
2669 thisblock->data.block.last_unconditional_cleanup = note;
2670 thisblock->data.block.cleanup_ptr = &thisblock->data.block.cleanups;
2672 if (block_stack
2673 && !(block_stack->data.block.cleanups == NULL_TREE
2674 && block_stack->data.block.outer_cleanups == NULL_TREE))
2675 thisblock->data.block.outer_cleanups
2676 = tree_cons (NULL_TREE, block_stack->data.block.cleanups,
2677 block_stack->data.block.outer_cleanups);
2678 else
2679 thisblock->data.block.outer_cleanups = 0;
2680 thisblock->data.block.label_chain = 0;
2681 thisblock->data.block.innermost_stack_block = stack_block_stack;
2682 thisblock->data.block.first_insn = note;
2683 thisblock->data.block.block_start_count = ++block_start_count;
2684 thisblock->exit_label = exit_flag ? gen_label_rtx () : 0;
2685 block_stack = thisblock;
2686 nesting_stack = thisblock;
2688 /* Make a new level for allocating stack slots. */
2689 push_temp_slots ();
2692 /* Specify the scope of temporaries created by TARGET_EXPRs. Similar
2693 to CLEANUP_POINT_EXPR, but handles cases when a series of calls to
2694 expand_expr are made. After we end the region, we know that all
2695 space for all temporaries that were created by TARGET_EXPRs will be
2696 destroyed and their space freed for reuse. */
2698 void
2699 expand_start_target_temps ()
2701 /* This is so that even if the result is preserved, the space
2702 allocated will be freed, as we know that it is no longer in use. */
2703 push_temp_slots ();
2705 /* Start a new binding layer that will keep track of all cleanup
2706 actions to be performed. */
2707 expand_start_bindings (0);
2709 target_temp_slot_level = temp_slot_level;
2712 void
2713 expand_end_target_temps ()
2715 expand_end_bindings (NULL_TREE, 0, 0);
2717 /* This is so that even if the result is preserved, the space
2718 allocated will be freed, as we know that it is no longer in use. */
2719 pop_temp_slots ();
2722 /* Mark top block of block_stack as an implicit binding for an
2723 exception region. This is used to prevent infinite recursion when
2724 ending a binding with expand_end_bindings. It is only ever called
2725 by expand_eh_region_start, as that it the only way to create a
2726 block stack for a exception region. */
2728 void
2729 mark_block_as_eh_region ()
2731 block_stack->data.block.exception_region = 1;
2732 if (block_stack->next
2733 && block_stack->next->data.block.conditional_code)
2735 block_stack->data.block.conditional_code
2736 = block_stack->next->data.block.conditional_code;
2737 block_stack->data.block.last_unconditional_cleanup
2738 = block_stack->next->data.block.last_unconditional_cleanup;
2739 block_stack->data.block.cleanup_ptr
2740 = block_stack->next->data.block.cleanup_ptr;
2744 /* True if we are currently emitting insns in an area of output code
2745 that is controlled by a conditional expression. This is used by
2746 the cleanup handling code to generate conditional cleanup actions. */
2749 conditional_context ()
2751 return block_stack && block_stack->data.block.conditional_code;
2754 /* Mark top block of block_stack as not for an implicit binding for an
2755 exception region. This is only ever done by expand_eh_region_end
2756 to let expand_end_bindings know that it is being called explicitly
2757 to end the binding layer for just the binding layer associated with
2758 the exception region, otherwise expand_end_bindings would try and
2759 end all implicit binding layers for exceptions regions, and then
2760 one normal binding layer. */
2762 void
2763 mark_block_as_not_eh_region ()
2765 block_stack->data.block.exception_region = 0;
2768 /* True if the top block of block_stack was marked as for an exception
2769 region by mark_block_as_eh_region. */
2772 is_eh_region ()
2774 return block_stack && block_stack->data.block.exception_region;
2777 /* Given a pointer to a BLOCK node, save a pointer to the most recently
2778 generated NOTE_INSN_BLOCK_END in the BLOCK_END_NOTE field of the given
2779 BLOCK node. */
2781 void
2782 remember_end_note (block)
2783 register tree block;
2785 BLOCK_END_NOTE (block) = last_block_end_note;
2786 last_block_end_note = NULL_RTX;
2789 /* Generate RTL code to terminate a binding contour.
2790 VARS is the chain of VAR_DECL nodes
2791 for the variables bound in this contour.
2792 MARK_ENDS is nonzero if we should put a note at the beginning
2793 and end of this binding contour.
2795 DONT_JUMP_IN is nonzero if it is not valid to jump into this contour.
2796 (That is true automatically if the contour has a saved stack level.) */
2798 void
2799 expand_end_bindings (vars, mark_ends, dont_jump_in)
2800 tree vars;
2801 int mark_ends;
2802 int dont_jump_in;
2804 register struct nesting *thisblock;
2805 register tree decl;
2807 while (block_stack->data.block.exception_region)
2809 /* Because we don't need or want a new temporary level and
2810 because we didn't create one in expand_eh_region_start,
2811 create a fake one now to avoid removing one in
2812 expand_end_bindings. */
2813 push_temp_slots ();
2815 block_stack->data.block.exception_region = 0;
2817 expand_end_bindings (NULL_TREE, 0, 0);
2820 /* Since expand_eh_region_start does an expand_start_bindings, we
2821 have to first end all the bindings that were created by
2822 expand_eh_region_start. */
2824 thisblock = block_stack;
2826 if (warn_unused)
2827 for (decl = vars; decl; decl = TREE_CHAIN (decl))
2828 if (! TREE_USED (decl) && TREE_CODE (decl) == VAR_DECL
2829 && ! DECL_IN_SYSTEM_HEADER (decl)
2830 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
2831 warning_with_decl (decl, "unused variable `%s'");
2833 if (thisblock->exit_label)
2835 do_pending_stack_adjust ();
2836 emit_label (thisblock->exit_label);
2839 /* If necessary, make a handler for nonlocal gotos taking
2840 place in the function calls in this block. */
2841 if (function_call_count != thisblock->data.block.function_call_count
2842 && nonlocal_labels
2843 /* Make handler for outermost block
2844 if there were any nonlocal gotos to this function. */
2845 && (thisblock->next == 0 ? current_function_has_nonlocal_label
2846 /* Make handler for inner block if it has something
2847 special to do when you jump out of it. */
2848 : (thisblock->data.block.cleanups != 0
2849 || thisblock->data.block.stack_level != 0)))
2851 tree link;
2852 rtx afterward = gen_label_rtx ();
2853 rtx handler_label = gen_label_rtx ();
2854 rtx save_receiver = gen_reg_rtx (Pmode);
2855 rtx insns;
2857 /* Don't let jump_optimize delete the handler. */
2858 LABEL_PRESERVE_P (handler_label) = 1;
2860 /* Record the handler address in the stack slot for that purpose,
2861 during this block, saving and restoring the outer value. */
2862 if (thisblock->next != 0)
2864 emit_move_insn (nonlocal_goto_handler_slot, save_receiver);
2866 start_sequence ();
2867 emit_move_insn (save_receiver, nonlocal_goto_handler_slot);
2868 insns = get_insns ();
2869 end_sequence ();
2870 emit_insns_before (insns, thisblock->data.block.first_insn);
2873 start_sequence ();
2874 emit_move_insn (nonlocal_goto_handler_slot,
2875 gen_rtx_LABEL_REF (Pmode, handler_label));
2876 insns = get_insns ();
2877 end_sequence ();
2878 emit_insns_before (insns, thisblock->data.block.first_insn);
2880 /* Jump around the handler; it runs only when specially invoked. */
2881 emit_jump (afterward);
2882 emit_label (handler_label);
2884 #ifdef HAVE_nonlocal_goto
2885 if (! HAVE_nonlocal_goto)
2886 #endif
2887 /* First adjust our frame pointer to its actual value. It was
2888 previously set to the start of the virtual area corresponding to
2889 the stacked variables when we branched here and now needs to be
2890 adjusted to the actual hardware fp value.
2892 Assignments are to virtual registers are converted by
2893 instantiate_virtual_regs into the corresponding assignment
2894 to the underlying register (fp in this case) that makes
2895 the original assignment true.
2896 So the following insn will actually be
2897 decrementing fp by STARTING_FRAME_OFFSET. */
2898 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
2900 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
2901 if (fixed_regs[ARG_POINTER_REGNUM])
2903 #ifdef ELIMINABLE_REGS
2904 /* If the argument pointer can be eliminated in favor of the
2905 frame pointer, we don't need to restore it. We assume here
2906 that if such an elimination is present, it can always be used.
2907 This is the case on all known machines; if we don't make this
2908 assumption, we do unnecessary saving on many machines. */
2909 static struct elims {int from, to;} elim_regs[] = ELIMINABLE_REGS;
2910 size_t i;
2912 for (i = 0; i < sizeof elim_regs / sizeof elim_regs[0]; i++)
2913 if (elim_regs[i].from == ARG_POINTER_REGNUM
2914 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
2915 break;
2917 if (i == sizeof elim_regs / sizeof elim_regs [0])
2918 #endif
2920 /* Now restore our arg pointer from the address at which it
2921 was saved in our stack frame.
2922 If there hasn't be space allocated for it yet, make
2923 some now. */
2924 if (arg_pointer_save_area == 0)
2925 arg_pointer_save_area
2926 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
2927 emit_move_insn (virtual_incoming_args_rtx,
2928 /* We need a pseudo here, or else
2929 instantiate_virtual_regs_1 complains. */
2930 copy_to_reg (arg_pointer_save_area));
2933 #endif
2935 #ifdef HAVE_nonlocal_goto_receiver
2936 if (HAVE_nonlocal_goto_receiver)
2937 emit_insn (gen_nonlocal_goto_receiver ());
2938 #endif
2940 /* The handler expects the desired label address in the static chain
2941 register. It tests the address and does an appropriate jump
2942 to whatever label is desired. */
2943 for (link = nonlocal_labels; link; link = TREE_CHAIN (link))
2944 /* Skip any labels we shouldn't be able to jump to from here. */
2945 if (! DECL_TOO_LATE (TREE_VALUE (link)))
2947 rtx not_this = gen_label_rtx ();
2948 rtx this = gen_label_rtx ();
2949 do_jump_if_equal (static_chain_rtx,
2950 gen_rtx_LABEL_REF (Pmode, DECL_RTL (TREE_VALUE (link))),
2951 this, 0);
2952 emit_jump (not_this);
2953 emit_label (this);
2954 expand_goto (TREE_VALUE (link));
2955 emit_label (not_this);
2957 /* If label is not recognized, abort. */
2958 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "abort"), 0,
2959 VOIDmode, 0);
2960 emit_barrier ();
2961 emit_label (afterward);
2964 /* Don't allow jumping into a block that has a stack level.
2965 Cleanups are allowed, though. */
2966 if (dont_jump_in
2967 || thisblock->data.block.stack_level != 0)
2969 struct label_chain *chain;
2971 /* Any labels in this block are no longer valid to go to.
2972 Mark them to cause an error message. */
2973 for (chain = thisblock->data.block.label_chain; chain; chain = chain->next)
2975 DECL_TOO_LATE (chain->label) = 1;
2976 /* If any goto without a fixup came to this label,
2977 that must be an error, because gotos without fixups
2978 come from outside all saved stack-levels. */
2979 if (TREE_ADDRESSABLE (chain->label))
2980 error_with_decl (chain->label,
2981 "label `%s' used before containing binding contour");
2985 /* Restore stack level in effect before the block
2986 (only if variable-size objects allocated). */
2987 /* Perform any cleanups associated with the block. */
2989 if (thisblock->data.block.stack_level != 0
2990 || thisblock->data.block.cleanups != 0)
2992 /* Only clean up here if this point can actually be reached. */
2993 int reachable = GET_CODE (get_last_insn ()) != BARRIER;
2995 /* Don't let cleanups affect ({...}) constructs. */
2996 int old_expr_stmts_for_value = expr_stmts_for_value;
2997 rtx old_last_expr_value = last_expr_value;
2998 tree old_last_expr_type = last_expr_type;
2999 expr_stmts_for_value = 0;
3001 /* Do the cleanups. */
3002 expand_cleanups (thisblock->data.block.cleanups, NULL_TREE, 0, reachable);
3003 if (reachable)
3004 do_pending_stack_adjust ();
3006 expr_stmts_for_value = old_expr_stmts_for_value;
3007 last_expr_value = old_last_expr_value;
3008 last_expr_type = old_last_expr_type;
3010 /* Restore the stack level. */
3012 if (reachable && thisblock->data.block.stack_level != 0)
3014 emit_stack_restore (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3015 thisblock->data.block.stack_level, NULL_RTX);
3016 if (nonlocal_goto_handler_slot != 0)
3017 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level,
3018 NULL_RTX);
3021 /* Any gotos out of this block must also do these things.
3022 Also report any gotos with fixups that came to labels in this
3023 level. */
3024 fixup_gotos (thisblock,
3025 thisblock->data.block.stack_level,
3026 thisblock->data.block.cleanups,
3027 thisblock->data.block.first_insn,
3028 dont_jump_in);
3031 /* Mark the beginning and end of the scope if requested.
3032 We do this now, after running cleanups on the variables
3033 just going out of scope, so they are in scope for their cleanups. */
3035 if (mark_ends)
3036 last_block_end_note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_END);
3037 else
3038 /* Get rid of the beginning-mark if we don't make an end-mark. */
3039 NOTE_LINE_NUMBER (thisblock->data.block.first_insn) = NOTE_INSN_DELETED;
3041 /* If doing stupid register allocation, make sure lives of all
3042 register variables declared here extend thru end of scope. */
3044 if (obey_regdecls)
3045 for (decl = vars; decl; decl = TREE_CHAIN (decl))
3047 rtx rtl = DECL_RTL (decl);
3048 if (TREE_CODE (decl) == VAR_DECL && rtl != 0)
3049 use_variable (rtl);
3052 /* Restore the temporary level of TARGET_EXPRs. */
3053 target_temp_slot_level = thisblock->data.block.target_temp_slot_level;
3055 /* Restore block_stack level for containing block. */
3057 stack_block_stack = thisblock->data.block.innermost_stack_block;
3058 POPSTACK (block_stack);
3060 /* Pop the stack slot nesting and free any slots at this level. */
3061 pop_temp_slots ();
3066 /* Generate RTL for the automatic variable declaration DECL.
3067 (Other kinds of declarations are simply ignored if seen here.) */
3069 void
3070 expand_decl (decl)
3071 register tree decl;
3073 struct nesting *thisblock = block_stack;
3074 tree type;
3076 type = TREE_TYPE (decl);
3078 /* Only automatic variables need any expansion done.
3079 Static and external variables, and external functions,
3080 will be handled by `assemble_variable' (called from finish_decl).
3081 TYPE_DECL and CONST_DECL require nothing.
3082 PARM_DECLs are handled in `assign_parms'. */
3084 if (TREE_CODE (decl) != VAR_DECL)
3085 return;
3086 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
3087 return;
3089 /* Create the RTL representation for the variable. */
3091 if (type == error_mark_node)
3092 DECL_RTL (decl) = gen_rtx_MEM (BLKmode, const0_rtx);
3093 else if (DECL_SIZE (decl) == 0)
3094 /* Variable with incomplete type. */
3096 if (DECL_INITIAL (decl) == 0)
3097 /* Error message was already done; now avoid a crash. */
3098 DECL_RTL (decl) = assign_stack_temp (DECL_MODE (decl), 0, 1);
3099 else
3100 /* An initializer is going to decide the size of this array.
3101 Until we know the size, represent its address with a reg. */
3102 DECL_RTL (decl) = gen_rtx_MEM (BLKmode, gen_reg_rtx (Pmode));
3103 MEM_IN_STRUCT_P (DECL_RTL (decl)) = AGGREGATE_TYPE_P (type);
3105 else if (DECL_MODE (decl) != BLKmode
3106 /* If -ffloat-store, don't put explicit float vars
3107 into regs. */
3108 && !(flag_float_store
3109 && TREE_CODE (type) == REAL_TYPE)
3110 && ! TREE_THIS_VOLATILE (decl)
3111 && ! TREE_ADDRESSABLE (decl)
3112 && (DECL_REGISTER (decl) || ! obey_regdecls)
3113 /* if -fcheck-memory-usage, check all variables. */
3114 && ! flag_check_memory_usage)
3116 /* Automatic variable that can go in a register. */
3117 int unsignedp = TREE_UNSIGNED (type);
3118 enum machine_mode reg_mode
3119 = promote_mode (type, DECL_MODE (decl), &unsignedp, 0);
3121 DECL_RTL (decl) = gen_reg_rtx (reg_mode);
3122 mark_user_reg (DECL_RTL (decl));
3124 if (POINTER_TYPE_P (type))
3125 mark_reg_pointer (DECL_RTL (decl),
3126 (TYPE_ALIGN (TREE_TYPE (TREE_TYPE (decl)))
3127 / BITS_PER_UNIT));
3130 else if (TREE_CODE (DECL_SIZE (decl)) == INTEGER_CST
3131 && ! (flag_stack_check && ! STACK_CHECK_BUILTIN
3132 && (TREE_INT_CST_HIGH (DECL_SIZE (decl)) != 0
3133 || (TREE_INT_CST_LOW (DECL_SIZE (decl))
3134 > STACK_CHECK_MAX_VAR_SIZE * BITS_PER_UNIT))))
3136 /* Variable of fixed size that goes on the stack. */
3137 rtx oldaddr = 0;
3138 rtx addr;
3140 /* If we previously made RTL for this decl, it must be an array
3141 whose size was determined by the initializer.
3142 The old address was a register; set that register now
3143 to the proper address. */
3144 if (DECL_RTL (decl) != 0)
3146 if (GET_CODE (DECL_RTL (decl)) != MEM
3147 || GET_CODE (XEXP (DECL_RTL (decl), 0)) != REG)
3148 abort ();
3149 oldaddr = XEXP (DECL_RTL (decl), 0);
3152 DECL_RTL (decl)
3153 = assign_stack_temp (DECL_MODE (decl),
3154 ((TREE_INT_CST_LOW (DECL_SIZE (decl))
3155 + BITS_PER_UNIT - 1)
3156 / BITS_PER_UNIT),
3158 MEM_IN_STRUCT_P (DECL_RTL (decl)) = AGGREGATE_TYPE_P (TREE_TYPE (decl));
3160 /* Set alignment we actually gave this decl. */
3161 DECL_ALIGN (decl) = (DECL_MODE (decl) == BLKmode ? BIGGEST_ALIGNMENT
3162 : GET_MODE_BITSIZE (DECL_MODE (decl)));
3164 if (oldaddr)
3166 addr = force_operand (XEXP (DECL_RTL (decl), 0), oldaddr);
3167 if (addr != oldaddr)
3168 emit_move_insn (oldaddr, addr);
3171 /* If this is a memory ref that contains aggregate components,
3172 mark it as such for cse and loop optimize. */
3173 MEM_IN_STRUCT_P (DECL_RTL (decl)) = AGGREGATE_TYPE_P (TREE_TYPE (decl));
3174 #if 0
3175 /* If this is in memory because of -ffloat-store,
3176 set the volatile bit, to prevent optimizations from
3177 undoing the effects. */
3178 if (flag_float_store && TREE_CODE (type) == REAL_TYPE)
3179 MEM_VOLATILE_P (DECL_RTL (decl)) = 1;
3180 #endif
3182 MEM_ALIAS_SET (DECL_RTL (decl)) = get_alias_set (decl);
3184 else
3185 /* Dynamic-size object: must push space on the stack. */
3187 rtx address, size;
3189 /* Record the stack pointer on entry to block, if have
3190 not already done so. */
3191 if (thisblock->data.block.stack_level == 0)
3193 do_pending_stack_adjust ();
3194 emit_stack_save (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3195 &thisblock->data.block.stack_level,
3196 thisblock->data.block.first_insn);
3197 stack_block_stack = thisblock;
3200 /* Compute the variable's size, in bytes. */
3201 size = expand_expr (size_binop (CEIL_DIV_EXPR,
3202 DECL_SIZE (decl),
3203 size_int (BITS_PER_UNIT)),
3204 NULL_RTX, VOIDmode, 0);
3205 free_temp_slots ();
3207 /* Allocate space on the stack for the variable. Note that
3208 DECL_ALIGN says how the variable is to be aligned and we
3209 cannot use it to conclude anything about the alignment of
3210 the size. */
3211 address = allocate_dynamic_stack_space (size, NULL_RTX,
3212 TYPE_ALIGN (TREE_TYPE (decl)));
3214 /* Reference the variable indirect through that rtx. */
3215 DECL_RTL (decl) = gen_rtx_MEM (DECL_MODE (decl), address);
3217 /* If this is a memory ref that contains aggregate components,
3218 mark it as such for cse and loop optimize. */
3219 MEM_IN_STRUCT_P (DECL_RTL (decl)) = AGGREGATE_TYPE_P (TREE_TYPE (decl));
3221 /* Indicate the alignment we actually gave this variable. */
3222 #ifdef STACK_BOUNDARY
3223 DECL_ALIGN (decl) = STACK_BOUNDARY;
3224 #else
3225 DECL_ALIGN (decl) = BIGGEST_ALIGNMENT;
3226 #endif
3229 if (TREE_THIS_VOLATILE (decl))
3230 MEM_VOLATILE_P (DECL_RTL (decl)) = 1;
3231 #if 0 /* A variable is not necessarily unchanging
3232 just because it is const. RTX_UNCHANGING_P
3233 means no change in the function,
3234 not merely no change in the variable's scope.
3235 It is correct to set RTX_UNCHANGING_P if the variable's scope
3236 is the whole function. There's no convenient way to test that. */
3237 if (TREE_READONLY (decl))
3238 RTX_UNCHANGING_P (DECL_RTL (decl)) = 1;
3239 #endif
3241 /* If doing stupid register allocation, make sure life of any
3242 register variable starts here, at the start of its scope. */
3244 if (obey_regdecls)
3245 use_variable (DECL_RTL (decl));
3250 /* Emit code to perform the initialization of a declaration DECL. */
3252 void
3253 expand_decl_init (decl)
3254 tree decl;
3256 int was_used = TREE_USED (decl);
3258 /* If this is a CONST_DECL, we don't have to generate any code, but
3259 if DECL_INITIAL is a constant, call expand_expr to force TREE_CST_RTL
3260 to be set while in the obstack containing the constant. If we don't
3261 do this, we can lose if we have functions nested three deep and the middle
3262 function makes a CONST_DECL whose DECL_INITIAL is a STRING_CST while
3263 the innermost function is the first to expand that STRING_CST. */
3264 if (TREE_CODE (decl) == CONST_DECL)
3266 if (DECL_INITIAL (decl) && TREE_CONSTANT (DECL_INITIAL (decl)))
3267 expand_expr (DECL_INITIAL (decl), NULL_RTX, VOIDmode,
3268 EXPAND_INITIALIZER);
3269 return;
3272 if (TREE_STATIC (decl))
3273 return;
3275 /* Compute and store the initial value now. */
3277 if (DECL_INITIAL (decl) == error_mark_node)
3279 enum tree_code code = TREE_CODE (TREE_TYPE (decl));
3281 if (code == INTEGER_TYPE || code == REAL_TYPE || code == ENUMERAL_TYPE
3282 || code == POINTER_TYPE || code == REFERENCE_TYPE)
3283 expand_assignment (decl, convert (TREE_TYPE (decl), integer_zero_node),
3284 0, 0);
3285 emit_queue ();
3287 else if (DECL_INITIAL (decl) && TREE_CODE (DECL_INITIAL (decl)) != TREE_LIST)
3289 emit_line_note (DECL_SOURCE_FILE (decl), DECL_SOURCE_LINE (decl));
3290 expand_assignment (decl, DECL_INITIAL (decl), 0, 0);
3291 emit_queue ();
3294 /* Don't let the initialization count as "using" the variable. */
3295 TREE_USED (decl) = was_used;
3297 /* Free any temporaries we made while initializing the decl. */
3298 preserve_temp_slots (NULL_RTX);
3299 free_temp_slots ();
3302 /* CLEANUP is an expression to be executed at exit from this binding contour;
3303 for example, in C++, it might call the destructor for this variable.
3305 We wrap CLEANUP in an UNSAVE_EXPR node, so that we can expand the
3306 CLEANUP multiple times, and have the correct semantics. This
3307 happens in exception handling, for gotos, returns, breaks that
3308 leave the current scope.
3310 If CLEANUP is nonzero and DECL is zero, we record a cleanup
3311 that is not associated with any particular variable. */
3314 expand_decl_cleanup (decl, cleanup)
3315 tree decl, cleanup;
3317 struct nesting *thisblock = block_stack;
3319 /* Error if we are not in any block. */
3320 if (thisblock == 0)
3321 return 0;
3323 /* Record the cleanup if there is one. */
3325 if (cleanup != 0)
3327 tree t;
3328 rtx seq;
3329 tree *cleanups = &thisblock->data.block.cleanups;
3330 int cond_context = conditional_context ();
3332 if (cond_context)
3334 rtx flag = gen_reg_rtx (word_mode);
3335 rtx set_flag_0;
3336 tree cond;
3338 start_sequence ();
3339 emit_move_insn (flag, const0_rtx);
3340 set_flag_0 = get_insns ();
3341 end_sequence ();
3343 thisblock->data.block.last_unconditional_cleanup
3344 = emit_insns_after (set_flag_0,
3345 thisblock->data.block.last_unconditional_cleanup);
3347 emit_move_insn (flag, const1_rtx);
3349 /* All cleanups must be on the function_obstack. */
3350 push_obstacks_nochange ();
3351 resume_temporary_allocation ();
3353 cond = build_decl (VAR_DECL, NULL_TREE, type_for_mode (word_mode, 1));
3354 DECL_RTL (cond) = flag;
3356 /* Conditionalize the cleanup. */
3357 cleanup = build (COND_EXPR, void_type_node,
3358 truthvalue_conversion (cond),
3359 cleanup, integer_zero_node);
3360 cleanup = fold (cleanup);
3362 pop_obstacks ();
3364 cleanups = thisblock->data.block.cleanup_ptr;
3367 /* All cleanups must be on the function_obstack. */
3368 push_obstacks_nochange ();
3369 resume_temporary_allocation ();
3370 cleanup = unsave_expr (cleanup);
3371 pop_obstacks ();
3373 t = *cleanups = temp_tree_cons (decl, cleanup, *cleanups);
3375 if (! cond_context)
3376 /* If this block has a cleanup, it belongs in stack_block_stack. */
3377 stack_block_stack = thisblock;
3379 if (cond_context)
3381 start_sequence ();
3384 /* If this was optimized so that there is no exception region for the
3385 cleanup, then mark the TREE_LIST node, so that we can later tell
3386 if we need to call expand_eh_region_end. */
3387 if (! using_eh_for_cleanups_p
3388 || expand_eh_region_start_tree (decl, cleanup))
3389 TREE_ADDRESSABLE (t) = 1;
3390 /* If that started a new EH region, we're in a new block. */
3391 thisblock = block_stack;
3393 if (cond_context)
3395 seq = get_insns ();
3396 end_sequence ();
3397 if (seq)
3398 thisblock->data.block.last_unconditional_cleanup
3399 = emit_insns_after (seq,
3400 thisblock->data.block.last_unconditional_cleanup);
3402 else
3404 thisblock->data.block.last_unconditional_cleanup
3405 = get_last_insn ();
3406 thisblock->data.block.cleanup_ptr = &thisblock->data.block.cleanups;
3409 return 1;
3412 /* Like expand_decl_cleanup, but suppress generating an exception handler
3413 to perform the cleanup. */
3416 expand_decl_cleanup_no_eh (decl, cleanup)
3417 tree decl, cleanup;
3419 int save_eh = using_eh_for_cleanups_p;
3420 int result;
3422 using_eh_for_cleanups_p = 0;
3423 result = expand_decl_cleanup (decl, cleanup);
3424 using_eh_for_cleanups_p = save_eh;
3426 return result;
3429 /* Arrange for the top element of the dynamic cleanup chain to be
3430 popped if we exit the current binding contour. DECL is the
3431 associated declaration, if any, otherwise NULL_TREE. If the
3432 current contour is left via an exception, then __sjthrow will pop
3433 the top element off the dynamic cleanup chain. The code that
3434 avoids doing the action we push into the cleanup chain in the
3435 exceptional case is contained in expand_cleanups.
3437 This routine is only used by expand_eh_region_start, and that is
3438 the only way in which an exception region should be started. This
3439 routine is only used when using the setjmp/longjmp codegen method
3440 for exception handling. */
3443 expand_dcc_cleanup (decl)
3444 tree decl;
3446 struct nesting *thisblock = block_stack;
3447 tree cleanup;
3449 /* Error if we are not in any block. */
3450 if (thisblock == 0)
3451 return 0;
3453 /* Record the cleanup for the dynamic handler chain. */
3455 /* All cleanups must be on the function_obstack. */
3456 push_obstacks_nochange ();
3457 resume_temporary_allocation ();
3458 cleanup = make_node (POPDCC_EXPR);
3459 pop_obstacks ();
3461 /* Add the cleanup in a manner similar to expand_decl_cleanup. */
3462 thisblock->data.block.cleanups
3463 = temp_tree_cons (decl, cleanup, thisblock->data.block.cleanups);
3465 /* If this block has a cleanup, it belongs in stack_block_stack. */
3466 stack_block_stack = thisblock;
3467 return 1;
3470 /* Arrange for the top element of the dynamic handler chain to be
3471 popped if we exit the current binding contour. DECL is the
3472 associated declaration, if any, otherwise NULL_TREE. If the current
3473 contour is left via an exception, then __sjthrow will pop the top
3474 element off the dynamic handler chain. The code that avoids doing
3475 the action we push into the handler chain in the exceptional case
3476 is contained in expand_cleanups.
3478 This routine is only used by expand_eh_region_start, and that is
3479 the only way in which an exception region should be started. This
3480 routine is only used when using the setjmp/longjmp codegen method
3481 for exception handling. */
3484 expand_dhc_cleanup (decl)
3485 tree decl;
3487 struct nesting *thisblock = block_stack;
3488 tree cleanup;
3490 /* Error if we are not in any block. */
3491 if (thisblock == 0)
3492 return 0;
3494 /* Record the cleanup for the dynamic handler chain. */
3496 /* All cleanups must be on the function_obstack. */
3497 push_obstacks_nochange ();
3498 resume_temporary_allocation ();
3499 cleanup = make_node (POPDHC_EXPR);
3500 pop_obstacks ();
3502 /* Add the cleanup in a manner similar to expand_decl_cleanup. */
3503 thisblock->data.block.cleanups
3504 = temp_tree_cons (decl, cleanup, thisblock->data.block.cleanups);
3506 /* If this block has a cleanup, it belongs in stack_block_stack. */
3507 stack_block_stack = thisblock;
3508 return 1;
3511 /* DECL is an anonymous union. CLEANUP is a cleanup for DECL.
3512 DECL_ELTS is the list of elements that belong to DECL's type.
3513 In each, the TREE_VALUE is a VAR_DECL, and the TREE_PURPOSE a cleanup. */
3515 void
3516 expand_anon_union_decl (decl, cleanup, decl_elts)
3517 tree decl, cleanup, decl_elts;
3519 struct nesting *thisblock = block_stack;
3520 rtx x;
3522 expand_decl (decl);
3523 expand_decl_cleanup (decl, cleanup);
3524 x = DECL_RTL (decl);
3526 while (decl_elts)
3528 tree decl_elt = TREE_VALUE (decl_elts);
3529 tree cleanup_elt = TREE_PURPOSE (decl_elts);
3530 enum machine_mode mode = TYPE_MODE (TREE_TYPE (decl_elt));
3532 /* Propagate the union's alignment to the elements. */
3533 DECL_ALIGN (decl_elt) = DECL_ALIGN (decl);
3535 /* If the element has BLKmode and the union doesn't, the union is
3536 aligned such that the element doesn't need to have BLKmode, so
3537 change the element's mode to the appropriate one for its size. */
3538 if (mode == BLKmode && DECL_MODE (decl) != BLKmode)
3539 DECL_MODE (decl_elt) = mode
3540 = mode_for_size (TREE_INT_CST_LOW (DECL_SIZE (decl_elt)),
3541 MODE_INT, 1);
3543 /* (SUBREG (MEM ...)) at RTL generation time is invalid, so we
3544 instead create a new MEM rtx with the proper mode. */
3545 if (GET_CODE (x) == MEM)
3547 if (mode == GET_MODE (x))
3548 DECL_RTL (decl_elt) = x;
3549 else
3551 DECL_RTL (decl_elt) = gen_rtx_MEM (mode, copy_rtx (XEXP (x, 0)));
3552 MEM_IN_STRUCT_P (DECL_RTL (decl_elt)) = MEM_IN_STRUCT_P (x);
3553 RTX_UNCHANGING_P (DECL_RTL (decl_elt)) = RTX_UNCHANGING_P (x);
3556 else if (GET_CODE (x) == REG)
3558 if (mode == GET_MODE (x))
3559 DECL_RTL (decl_elt) = x;
3560 else
3561 DECL_RTL (decl_elt) = gen_rtx_SUBREG (mode, x, 0);
3563 else
3564 abort ();
3566 /* Record the cleanup if there is one. */
3568 if (cleanup != 0)
3569 thisblock->data.block.cleanups
3570 = temp_tree_cons (decl_elt, cleanup_elt,
3571 thisblock->data.block.cleanups);
3573 decl_elts = TREE_CHAIN (decl_elts);
3577 /* Expand a list of cleanups LIST.
3578 Elements may be expressions or may be nested lists.
3580 If DONT_DO is nonnull, then any list-element
3581 whose TREE_PURPOSE matches DONT_DO is omitted.
3582 This is sometimes used to avoid a cleanup associated with
3583 a value that is being returned out of the scope.
3585 If IN_FIXUP is non-zero, we are generating this cleanup for a fixup
3586 goto and handle protection regions specially in that case.
3588 If REACHABLE, we emit code, otherwise just inform the exception handling
3589 code about this finalization. */
3591 static void
3592 expand_cleanups (list, dont_do, in_fixup, reachable)
3593 tree list;
3594 tree dont_do;
3595 int in_fixup;
3596 int reachable;
3598 tree tail;
3599 for (tail = list; tail; tail = TREE_CHAIN (tail))
3600 if (dont_do == 0 || TREE_PURPOSE (tail) != dont_do)
3602 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3603 expand_cleanups (TREE_VALUE (tail), dont_do, in_fixup, reachable);
3604 else
3606 if (! in_fixup)
3608 tree cleanup = TREE_VALUE (tail);
3610 /* See expand_d{h,c}c_cleanup for why we avoid this. */
3611 if (TREE_CODE (cleanup) != POPDHC_EXPR
3612 && TREE_CODE (cleanup) != POPDCC_EXPR
3613 /* See expand_eh_region_start_tree for this case. */
3614 && ! TREE_ADDRESSABLE (tail))
3616 cleanup = protect_with_terminate (cleanup);
3617 expand_eh_region_end (cleanup);
3621 if (reachable)
3623 /* Cleanups may be run multiple times. For example,
3624 when exiting a binding contour, we expand the
3625 cleanups associated with that contour. When a goto
3626 within that binding contour has a target outside that
3627 contour, it will expand all cleanups from its scope to
3628 the target. Though the cleanups are expanded multiple
3629 times, the control paths are non-overlapping so the
3630 cleanups will not be executed twice. */
3632 /* We may need to protect fixups with rethrow regions. */
3633 int protect = (in_fixup && ! TREE_ADDRESSABLE (tail));
3635 if (protect)
3636 expand_fixup_region_start ();
3638 expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
3639 if (protect)
3640 expand_fixup_region_end (TREE_VALUE (tail));
3641 free_temp_slots ();
3647 /* Mark when the context we are emitting RTL for as a conditional
3648 context, so that any cleanup actions we register with
3649 expand_decl_init will be properly conditionalized when those
3650 cleanup actions are later performed. Must be called before any
3651 expression (tree) is expanded that is within a conditional context. */
3653 void
3654 start_cleanup_deferral ()
3656 /* block_stack can be NULL if we are inside the parameter list. It is
3657 OK to do nothing, because cleanups aren't possible here. */
3658 if (block_stack)
3659 ++block_stack->data.block.conditional_code;
3662 /* Mark the end of a conditional region of code. Because cleanup
3663 deferrals may be nested, we may still be in a conditional region
3664 after we end the currently deferred cleanups, only after we end all
3665 deferred cleanups, are we back in unconditional code. */
3667 void
3668 end_cleanup_deferral ()
3670 /* block_stack can be NULL if we are inside the parameter list. It is
3671 OK to do nothing, because cleanups aren't possible here. */
3672 if (block_stack)
3673 --block_stack->data.block.conditional_code;
3676 /* Move all cleanups from the current block_stack
3677 to the containing block_stack, where they are assumed to
3678 have been created. If anything can cause a temporary to
3679 be created, but not expanded for more than one level of
3680 block_stacks, then this code will have to change. */
3682 void
3683 move_cleanups_up ()
3685 struct nesting *block = block_stack;
3686 struct nesting *outer = block->next;
3688 outer->data.block.cleanups
3689 = chainon (block->data.block.cleanups,
3690 outer->data.block.cleanups);
3691 block->data.block.cleanups = 0;
3694 tree
3695 last_cleanup_this_contour ()
3697 if (block_stack == 0)
3698 return 0;
3700 return block_stack->data.block.cleanups;
3703 /* Return 1 if there are any pending cleanups at this point.
3704 If THIS_CONTOUR is nonzero, check the current contour as well.
3705 Otherwise, look only at the contours that enclose this one. */
3708 any_pending_cleanups (this_contour)
3709 int this_contour;
3711 struct nesting *block;
3713 if (block_stack == 0)
3714 return 0;
3716 if (this_contour && block_stack->data.block.cleanups != NULL)
3717 return 1;
3718 if (block_stack->data.block.cleanups == 0
3719 && block_stack->data.block.outer_cleanups == 0)
3720 return 0;
3722 for (block = block_stack->next; block; block = block->next)
3723 if (block->data.block.cleanups != 0)
3724 return 1;
3726 return 0;
3729 /* Enter a case (Pascal) or switch (C) statement.
3730 Push a block onto case_stack and nesting_stack
3731 to accumulate the case-labels that are seen
3732 and to record the labels generated for the statement.
3734 EXIT_FLAG is nonzero if `exit_something' should exit this case stmt.
3735 Otherwise, this construct is transparent for `exit_something'.
3737 EXPR is the index-expression to be dispatched on.
3738 TYPE is its nominal type. We could simply convert EXPR to this type,
3739 but instead we take short cuts. */
3741 void
3742 expand_start_case (exit_flag, expr, type, printname)
3743 int exit_flag;
3744 tree expr;
3745 tree type;
3746 char *printname;
3748 register struct nesting *thiscase = ALLOC_NESTING ();
3750 /* Make an entry on case_stack for the case we are entering. */
3752 thiscase->next = case_stack;
3753 thiscase->all = nesting_stack;
3754 thiscase->depth = ++nesting_depth;
3755 thiscase->exit_label = exit_flag ? gen_label_rtx () : 0;
3756 thiscase->data.case_stmt.case_list = 0;
3757 thiscase->data.case_stmt.index_expr = expr;
3758 thiscase->data.case_stmt.nominal_type = type;
3759 thiscase->data.case_stmt.default_label = 0;
3760 thiscase->data.case_stmt.num_ranges = 0;
3761 thiscase->data.case_stmt.printname = printname;
3762 thiscase->data.case_stmt.line_number_status = force_line_numbers ();
3763 case_stack = thiscase;
3764 nesting_stack = thiscase;
3766 do_pending_stack_adjust ();
3768 /* Make sure case_stmt.start points to something that won't
3769 need any transformation before expand_end_case. */
3770 if (GET_CODE (get_last_insn ()) != NOTE)
3771 emit_note (NULL_PTR, NOTE_INSN_DELETED);
3773 thiscase->data.case_stmt.start = get_last_insn ();
3775 start_cleanup_deferral ();
3779 /* Start a "dummy case statement" within which case labels are invalid
3780 and are not connected to any larger real case statement.
3781 This can be used if you don't want to let a case statement jump
3782 into the middle of certain kinds of constructs. */
3784 void
3785 expand_start_case_dummy ()
3787 register struct nesting *thiscase = ALLOC_NESTING ();
3789 /* Make an entry on case_stack for the dummy. */
3791 thiscase->next = case_stack;
3792 thiscase->all = nesting_stack;
3793 thiscase->depth = ++nesting_depth;
3794 thiscase->exit_label = 0;
3795 thiscase->data.case_stmt.case_list = 0;
3796 thiscase->data.case_stmt.start = 0;
3797 thiscase->data.case_stmt.nominal_type = 0;
3798 thiscase->data.case_stmt.default_label = 0;
3799 thiscase->data.case_stmt.num_ranges = 0;
3800 case_stack = thiscase;
3801 nesting_stack = thiscase;
3802 start_cleanup_deferral ();
3805 /* End a dummy case statement. */
3807 void
3808 expand_end_case_dummy ()
3810 end_cleanup_deferral ();
3811 POPSTACK (case_stack);
3814 /* Return the data type of the index-expression
3815 of the innermost case statement, or null if none. */
3817 tree
3818 case_index_expr_type ()
3820 if (case_stack)
3821 return TREE_TYPE (case_stack->data.case_stmt.index_expr);
3822 return 0;
3825 static void
3826 check_seenlabel ()
3828 /* If this is the first label, warn if any insns have been emitted. */
3829 if (case_stack->data.case_stmt.line_number_status >= 0)
3831 rtx insn;
3833 restore_line_number_status
3834 (case_stack->data.case_stmt.line_number_status);
3835 case_stack->data.case_stmt.line_number_status = -1;
3837 for (insn = case_stack->data.case_stmt.start;
3838 insn;
3839 insn = NEXT_INSN (insn))
3841 if (GET_CODE (insn) == CODE_LABEL)
3842 break;
3843 if (GET_CODE (insn) != NOTE
3844 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn)) != USE))
3847 insn = PREV_INSN (insn);
3848 while (insn && (GET_CODE (insn) != NOTE || NOTE_LINE_NUMBER (insn) < 0));
3850 /* If insn is zero, then there must have been a syntax error. */
3851 if (insn)
3852 warning_with_file_and_line (NOTE_SOURCE_FILE(insn),
3853 NOTE_LINE_NUMBER(insn),
3854 "unreachable code at beginning of %s",
3855 case_stack->data.case_stmt.printname);
3856 break;
3862 /* Accumulate one case or default label inside a case or switch statement.
3863 VALUE is the value of the case (a null pointer, for a default label).
3864 The function CONVERTER, when applied to arguments T and V,
3865 converts the value V to the type T.
3867 If not currently inside a case or switch statement, return 1 and do
3868 nothing. The caller will print a language-specific error message.
3869 If VALUE is a duplicate or overlaps, return 2 and do nothing
3870 except store the (first) duplicate node in *DUPLICATE.
3871 If VALUE is out of range, return 3 and do nothing.
3872 If we are jumping into the scope of a cleanup or var-sized array, return 5.
3873 Return 0 on success.
3875 Extended to handle range statements. */
3878 pushcase (value, converter, label, duplicate)
3879 register tree value;
3880 tree (*converter) PROTO((tree, tree));
3881 register tree label;
3882 tree *duplicate;
3884 tree index_type;
3885 tree nominal_type;
3887 /* Fail if not inside a real case statement. */
3888 if (! (case_stack && case_stack->data.case_stmt.start))
3889 return 1;
3891 if (stack_block_stack
3892 && stack_block_stack->depth > case_stack->depth)
3893 return 5;
3895 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
3896 nominal_type = case_stack->data.case_stmt.nominal_type;
3898 /* If the index is erroneous, avoid more problems: pretend to succeed. */
3899 if (index_type == error_mark_node)
3900 return 0;
3902 /* Convert VALUE to the type in which the comparisons are nominally done. */
3903 if (value != 0)
3904 value = (*converter) (nominal_type, value);
3906 check_seenlabel ();
3908 /* Fail if this value is out of range for the actual type of the index
3909 (which may be narrower than NOMINAL_TYPE). */
3910 if (value != 0 && ! int_fits_type_p (value, index_type))
3911 return 3;
3913 /* Fail if this is a duplicate or overlaps another entry. */
3914 if (value == 0)
3916 if (case_stack->data.case_stmt.default_label != 0)
3918 *duplicate = case_stack->data.case_stmt.default_label;
3919 return 2;
3921 case_stack->data.case_stmt.default_label = label;
3923 else
3924 return add_case_node (value, value, label, duplicate);
3926 expand_label (label);
3927 return 0;
3930 /* Like pushcase but this case applies to all values between VALUE1 and
3931 VALUE2 (inclusive). If VALUE1 is NULL, the range starts at the lowest
3932 value of the index type and ends at VALUE2. If VALUE2 is NULL, the range
3933 starts at VALUE1 and ends at the highest value of the index type.
3934 If both are NULL, this case applies to all values.
3936 The return value is the same as that of pushcase but there is one
3937 additional error code: 4 means the specified range was empty. */
3940 pushcase_range (value1, value2, converter, label, duplicate)
3941 register tree value1, value2;
3942 tree (*converter) PROTO((tree, tree));
3943 register tree label;
3944 tree *duplicate;
3946 tree index_type;
3947 tree nominal_type;
3949 /* Fail if not inside a real case statement. */
3950 if (! (case_stack && case_stack->data.case_stmt.start))
3951 return 1;
3953 if (stack_block_stack
3954 && stack_block_stack->depth > case_stack->depth)
3955 return 5;
3957 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
3958 nominal_type = case_stack->data.case_stmt.nominal_type;
3960 /* If the index is erroneous, avoid more problems: pretend to succeed. */
3961 if (index_type == error_mark_node)
3962 return 0;
3964 check_seenlabel ();
3966 /* Convert VALUEs to type in which the comparisons are nominally done
3967 and replace any unspecified value with the corresponding bound. */
3968 if (value1 == 0)
3969 value1 = TYPE_MIN_VALUE (index_type);
3970 if (value2 == 0)
3971 value2 = TYPE_MAX_VALUE (index_type);
3973 /* Fail if the range is empty. Do this before any conversion since
3974 we want to allow out-of-range empty ranges. */
3975 if (value2 && tree_int_cst_lt (value2, value1))
3976 return 4;
3978 value1 = (*converter) (nominal_type, value1);
3980 /* If the max was unbounded, use the max of the nominal_type we are
3981 converting to. Do this after the < check above to suppress false
3982 positives. */
3983 if (!value2)
3984 value2 = TYPE_MAX_VALUE (nominal_type);
3985 value2 = (*converter) (nominal_type, value2);
3987 /* Fail if these values are out of range. */
3988 if (TREE_CONSTANT_OVERFLOW (value1)
3989 || ! int_fits_type_p (value1, index_type))
3990 return 3;
3992 if (TREE_CONSTANT_OVERFLOW (value2)
3993 || ! int_fits_type_p (value2, index_type))
3994 return 3;
3996 return add_case_node (value1, value2, label, duplicate);
3999 /* Do the actual insertion of a case label for pushcase and pushcase_range
4000 into case_stack->data.case_stmt.case_list. Use an AVL tree to avoid
4001 slowdown for large switch statements. */
4003 static int
4004 add_case_node (low, high, label, duplicate)
4005 tree low, high;
4006 tree label;
4007 tree *duplicate;
4009 struct case_node *p, **q, *r;
4011 q = &case_stack->data.case_stmt.case_list;
4012 p = *q;
4014 while ((r = *q))
4016 p = r;
4018 /* Keep going past elements distinctly greater than HIGH. */
4019 if (tree_int_cst_lt (high, p->low))
4020 q = &p->left;
4022 /* or distinctly less than LOW. */
4023 else if (tree_int_cst_lt (p->high, low))
4024 q = &p->right;
4026 else
4028 /* We have an overlap; this is an error. */
4029 *duplicate = p->code_label;
4030 return 2;
4034 /* Add this label to the chain, and succeed.
4035 Copy LOW, HIGH so they are on temporary rather than momentary
4036 obstack and will thus survive till the end of the case statement. */
4038 r = (struct case_node *) oballoc (sizeof (struct case_node));
4039 r->low = copy_node (low);
4041 /* If the bounds are equal, turn this into the one-value case. */
4043 if (tree_int_cst_equal (low, high))
4044 r->high = r->low;
4045 else
4047 r->high = copy_node (high);
4048 case_stack->data.case_stmt.num_ranges++;
4051 r->code_label = label;
4052 expand_label (label);
4054 *q = r;
4055 r->parent = p;
4056 r->left = 0;
4057 r->right = 0;
4058 r->balance = 0;
4060 while (p)
4062 struct case_node *s;
4064 if (r == p->left)
4066 int b;
4068 if (! (b = p->balance))
4069 /* Growth propagation from left side. */
4070 p->balance = -1;
4071 else if (b < 0)
4073 if (r->balance < 0)
4075 /* R-Rotation */
4076 if ((p->left = s = r->right))
4077 s->parent = p;
4079 r->right = p;
4080 p->balance = 0;
4081 r->balance = 0;
4082 s = p->parent;
4083 p->parent = r;
4085 if ((r->parent = s))
4087 if (s->left == p)
4088 s->left = r;
4089 else
4090 s->right = r;
4092 else
4093 case_stack->data.case_stmt.case_list = r;
4095 else
4096 /* r->balance == +1 */
4098 /* LR-Rotation */
4100 int b2;
4101 struct case_node *t = r->right;
4103 if ((p->left = s = t->right))
4104 s->parent = p;
4106 t->right = p;
4107 if ((r->right = s = t->left))
4108 s->parent = r;
4110 t->left = r;
4111 b = t->balance;
4112 b2 = b < 0;
4113 p->balance = b2;
4114 b2 = -b2 - b;
4115 r->balance = b2;
4116 t->balance = 0;
4117 s = p->parent;
4118 p->parent = t;
4119 r->parent = t;
4121 if ((t->parent = s))
4123 if (s->left == p)
4124 s->left = t;
4125 else
4126 s->right = t;
4128 else
4129 case_stack->data.case_stmt.case_list = t;
4131 break;
4134 else
4136 /* p->balance == +1; growth of left side balances the node. */
4137 p->balance = 0;
4138 break;
4141 else
4142 /* r == p->right */
4144 int b;
4146 if (! (b = p->balance))
4147 /* Growth propagation from right side. */
4148 p->balance++;
4149 else if (b > 0)
4151 if (r->balance > 0)
4153 /* L-Rotation */
4155 if ((p->right = s = r->left))
4156 s->parent = p;
4158 r->left = p;
4159 p->balance = 0;
4160 r->balance = 0;
4161 s = p->parent;
4162 p->parent = r;
4163 if ((r->parent = s))
4165 if (s->left == p)
4166 s->left = r;
4167 else
4168 s->right = r;
4171 else
4172 case_stack->data.case_stmt.case_list = r;
4175 else
4176 /* r->balance == -1 */
4178 /* RL-Rotation */
4179 int b2;
4180 struct case_node *t = r->left;
4182 if ((p->right = s = t->left))
4183 s->parent = p;
4185 t->left = p;
4187 if ((r->left = s = t->right))
4188 s->parent = r;
4190 t->right = r;
4191 b = t->balance;
4192 b2 = b < 0;
4193 r->balance = b2;
4194 b2 = -b2 - b;
4195 p->balance = b2;
4196 t->balance = 0;
4197 s = p->parent;
4198 p->parent = t;
4199 r->parent = t;
4201 if ((t->parent = s))
4203 if (s->left == p)
4204 s->left = t;
4205 else
4206 s->right = t;
4209 else
4210 case_stack->data.case_stmt.case_list = t;
4212 break;
4214 else
4216 /* p->balance == -1; growth of right side balances the node. */
4217 p->balance = 0;
4218 break;
4222 r = p;
4223 p = p->parent;
4226 return 0;
4230 /* Returns the number of possible values of TYPE.
4231 Returns -1 if the number is unknown or variable.
4232 Returns -2 if the number does not fit in a HOST_WIDE_INT.
4233 Sets *SPARENESS to 2 if TYPE is an ENUMERAL_TYPE whose values
4234 do not increase monotonically (there may be duplicates);
4235 to 1 if the values increase monotonically, but not always by 1;
4236 otherwise sets it to 0. */
4238 HOST_WIDE_INT
4239 all_cases_count (type, spareness)
4240 tree type;
4241 int *spareness;
4243 HOST_WIDE_INT count;
4244 *spareness = 0;
4246 switch (TREE_CODE (type))
4248 tree t;
4249 case BOOLEAN_TYPE:
4250 count = 2;
4251 break;
4252 case CHAR_TYPE:
4253 count = 1 << BITS_PER_UNIT;
4254 break;
4255 default:
4256 case INTEGER_TYPE:
4257 if (TREE_CODE (TYPE_MIN_VALUE (type)) != INTEGER_CST
4258 || TYPE_MAX_VALUE (type) == NULL
4259 || TREE_CODE (TYPE_MAX_VALUE (type)) != INTEGER_CST)
4260 return -1;
4261 else
4263 /* count
4264 = TREE_INT_CST_LOW (TYPE_MAX_VALUE (type))
4265 - TREE_INT_CST_LOW (TYPE_MIN_VALUE (type)) + 1
4266 but with overflow checking. */
4267 tree mint = TYPE_MIN_VALUE (type);
4268 tree maxt = TYPE_MAX_VALUE (type);
4269 HOST_WIDE_INT lo, hi;
4270 neg_double(TREE_INT_CST_LOW (mint), TREE_INT_CST_HIGH (mint),
4271 &lo, &hi);
4272 add_double(TREE_INT_CST_LOW (maxt), TREE_INT_CST_HIGH (maxt),
4273 lo, hi, &lo, &hi);
4274 add_double (lo, hi, 1, 0, &lo, &hi);
4275 if (hi != 0 || lo < 0)
4276 return -2;
4277 count = lo;
4279 break;
4280 case ENUMERAL_TYPE:
4281 count = 0;
4282 for (t = TYPE_VALUES (type); t != NULL_TREE; t = TREE_CHAIN (t))
4284 if (TREE_CODE (TYPE_MIN_VALUE (type)) != INTEGER_CST
4285 || TREE_CODE (TREE_VALUE (t)) != INTEGER_CST
4286 || TREE_INT_CST_LOW (TYPE_MIN_VALUE (type)) + count
4287 != TREE_INT_CST_LOW (TREE_VALUE (t)))
4288 *spareness = 1;
4289 count++;
4291 if (*spareness == 1)
4293 tree prev = TREE_VALUE (TYPE_VALUES (type));
4294 for (t = TYPE_VALUES (type); t = TREE_CHAIN (t), t != NULL_TREE; )
4296 if (! tree_int_cst_lt (prev, TREE_VALUE (t)))
4298 *spareness = 2;
4299 break;
4301 prev = TREE_VALUE (t);
4306 return count;
4310 #define BITARRAY_TEST(ARRAY, INDEX) \
4311 ((ARRAY)[(unsigned) (INDEX) / HOST_BITS_PER_CHAR]\
4312 & (1 << ((unsigned) (INDEX) % HOST_BITS_PER_CHAR)))
4313 #define BITARRAY_SET(ARRAY, INDEX) \
4314 ((ARRAY)[(unsigned) (INDEX) / HOST_BITS_PER_CHAR]\
4315 |= 1 << ((unsigned) (INDEX) % HOST_BITS_PER_CHAR))
4317 /* Set the elements of the bitstring CASES_SEEN (which has length COUNT),
4318 with the case values we have seen, assuming the case expression
4319 has the given TYPE.
4320 SPARSENESS is as determined by all_cases_count.
4322 The time needed is proportional to COUNT, unless
4323 SPARSENESS is 2, in which case quadratic time is needed. */
4325 void
4326 mark_seen_cases (type, cases_seen, count, sparseness)
4327 tree type;
4328 unsigned char *cases_seen;
4329 long count;
4330 int sparseness;
4332 tree next_node_to_try = NULL_TREE;
4333 long next_node_offset = 0;
4335 register struct case_node *n, *root = case_stack->data.case_stmt.case_list;
4336 tree val = make_node (INTEGER_CST);
4337 TREE_TYPE (val) = type;
4338 if (! root)
4339 ; /* Do nothing */
4340 else if (sparseness == 2)
4342 tree t;
4343 HOST_WIDE_INT xlo;
4345 /* This less efficient loop is only needed to handle
4346 duplicate case values (multiple enum constants
4347 with the same value). */
4348 TREE_TYPE (val) = TREE_TYPE (root->low);
4349 for (t = TYPE_VALUES (type), xlo = 0; t != NULL_TREE;
4350 t = TREE_CHAIN (t), xlo++)
4352 TREE_INT_CST_LOW (val) = TREE_INT_CST_LOW (TREE_VALUE (t));
4353 TREE_INT_CST_HIGH (val) = TREE_INT_CST_HIGH (TREE_VALUE (t));
4354 n = root;
4357 /* Keep going past elements distinctly greater than VAL. */
4358 if (tree_int_cst_lt (val, n->low))
4359 n = n->left;
4361 /* or distinctly less than VAL. */
4362 else if (tree_int_cst_lt (n->high, val))
4363 n = n->right;
4365 else
4367 /* We have found a matching range. */
4368 BITARRAY_SET (cases_seen, xlo);
4369 break;
4372 while (n);
4375 else
4377 if (root->left)
4378 case_stack->data.case_stmt.case_list = root = case_tree2list (root, 0);
4379 for (n = root; n; n = n->right)
4381 TREE_INT_CST_LOW (val) = TREE_INT_CST_LOW (n->low);
4382 TREE_INT_CST_HIGH (val) = TREE_INT_CST_HIGH (n->low);
4383 while ( ! tree_int_cst_lt (n->high, val))
4385 /* Calculate (into xlo) the "offset" of the integer (val).
4386 The element with lowest value has offset 0, the next smallest
4387 element has offset 1, etc. */
4389 HOST_WIDE_INT xlo, xhi;
4390 tree t;
4391 if (sparseness && TYPE_VALUES (type) != NULL_TREE)
4393 /* The TYPE_VALUES will be in increasing order, so
4394 starting searching where we last ended. */
4395 t = next_node_to_try;
4396 xlo = next_node_offset;
4397 xhi = 0;
4398 for (;;)
4400 if (t == NULL_TREE)
4402 t = TYPE_VALUES (type);
4403 xlo = 0;
4405 if (tree_int_cst_equal (val, TREE_VALUE (t)))
4407 next_node_to_try = TREE_CHAIN (t);
4408 next_node_offset = xlo + 1;
4409 break;
4411 xlo++;
4412 t = TREE_CHAIN (t);
4413 if (t == next_node_to_try)
4415 xlo = -1;
4416 break;
4420 else
4422 t = TYPE_MIN_VALUE (type);
4423 if (t)
4424 neg_double (TREE_INT_CST_LOW (t), TREE_INT_CST_HIGH (t),
4425 &xlo, &xhi);
4426 else
4427 xlo = xhi = 0;
4428 add_double (xlo, xhi,
4429 TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
4430 &xlo, &xhi);
4433 if (xhi == 0 && xlo >= 0 && xlo < count)
4434 BITARRAY_SET (cases_seen, xlo);
4435 add_double (TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
4436 1, 0,
4437 &TREE_INT_CST_LOW (val), &TREE_INT_CST_HIGH (val));
4443 /* Called when the index of a switch statement is an enumerated type
4444 and there is no default label.
4446 Checks that all enumeration literals are covered by the case
4447 expressions of a switch. Also, warn if there are any extra
4448 switch cases that are *not* elements of the enumerated type.
4450 If all enumeration literals were covered by the case expressions,
4451 turn one of the expressions into the default expression since it should
4452 not be possible to fall through such a switch. */
4454 void
4455 check_for_full_enumeration_handling (type)
4456 tree type;
4458 register struct case_node *n;
4459 register tree chain;
4460 #if 0 /* variable used by 'if 0'ed code below. */
4461 register struct case_node **l;
4462 int all_values = 1;
4463 #endif
4465 /* True iff the selector type is a numbered set mode. */
4466 int sparseness = 0;
4468 /* The number of possible selector values. */
4469 HOST_WIDE_INT size;
4471 /* For each possible selector value. a one iff it has been matched
4472 by a case value alternative. */
4473 unsigned char *cases_seen;
4475 /* The allocated size of cases_seen, in chars. */
4476 long bytes_needed;
4478 if (! warn_switch)
4479 return;
4481 size = all_cases_count (type, &sparseness);
4482 bytes_needed = (size + HOST_BITS_PER_CHAR) / HOST_BITS_PER_CHAR;
4484 if (size > 0 && size < 600000
4485 /* We deliberately use malloc here - not xmalloc. */
4486 && (cases_seen = (unsigned char *) malloc (bytes_needed)) != NULL)
4488 long i;
4489 tree v = TYPE_VALUES (type);
4490 bzero (cases_seen, bytes_needed);
4492 /* The time complexity of this code is normally O(N), where
4493 N being the number of members in the enumerated type.
4494 However, if type is a ENUMERAL_TYPE whose values do not
4495 increase monotonically, O(N*log(N)) time may be needed. */
4497 mark_seen_cases (type, cases_seen, size, sparseness);
4499 for (i = 0; v != NULL_TREE && i < size; i++, v = TREE_CHAIN (v))
4501 if (BITARRAY_TEST(cases_seen, i) == 0)
4502 warning ("enumeration value `%s' not handled in switch",
4503 IDENTIFIER_POINTER (TREE_PURPOSE (v)));
4506 free (cases_seen);
4509 /* Now we go the other way around; we warn if there are case
4510 expressions that don't correspond to enumerators. This can
4511 occur since C and C++ don't enforce type-checking of
4512 assignments to enumeration variables. */
4514 if (case_stack->data.case_stmt.case_list
4515 && case_stack->data.case_stmt.case_list->left)
4516 case_stack->data.case_stmt.case_list
4517 = case_tree2list (case_stack->data.case_stmt.case_list, 0);
4518 if (warn_switch)
4519 for (n = case_stack->data.case_stmt.case_list; n; n = n->right)
4521 for (chain = TYPE_VALUES (type);
4522 chain && !tree_int_cst_equal (n->low, TREE_VALUE (chain));
4523 chain = TREE_CHAIN (chain))
4526 if (!chain)
4528 if (TYPE_NAME (type) == 0)
4529 warning ("case value `%ld' not in enumerated type",
4530 (long) TREE_INT_CST_LOW (n->low));
4531 else
4532 warning ("case value `%ld' not in enumerated type `%s'",
4533 (long) TREE_INT_CST_LOW (n->low),
4534 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
4535 == IDENTIFIER_NODE)
4536 ? TYPE_NAME (type)
4537 : DECL_NAME (TYPE_NAME (type))));
4539 if (!tree_int_cst_equal (n->low, n->high))
4541 for (chain = TYPE_VALUES (type);
4542 chain && !tree_int_cst_equal (n->high, TREE_VALUE (chain));
4543 chain = TREE_CHAIN (chain))
4546 if (!chain)
4548 if (TYPE_NAME (type) == 0)
4549 warning ("case value `%ld' not in enumerated type",
4550 (long) TREE_INT_CST_LOW (n->high));
4551 else
4552 warning ("case value `%ld' not in enumerated type `%s'",
4553 (long) TREE_INT_CST_LOW (n->high),
4554 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
4555 == IDENTIFIER_NODE)
4556 ? TYPE_NAME (type)
4557 : DECL_NAME (TYPE_NAME (type))));
4562 #if 0
4563 /* ??? This optimization is disabled because it causes valid programs to
4564 fail. ANSI C does not guarantee that an expression with enum type
4565 will have a value that is the same as one of the enumeration literals. */
4567 /* If all values were found as case labels, make one of them the default
4568 label. Thus, this switch will never fall through. We arbitrarily pick
4569 the last one to make the default since this is likely the most
4570 efficient choice. */
4572 if (all_values)
4574 for (l = &case_stack->data.case_stmt.case_list;
4575 (*l)->right != 0;
4576 l = &(*l)->right)
4579 case_stack->data.case_stmt.default_label = (*l)->code_label;
4580 *l = 0;
4582 #endif /* 0 */
4586 /* Terminate a case (Pascal) or switch (C) statement
4587 in which ORIG_INDEX is the expression to be tested.
4588 Generate the code to test it and jump to the right place. */
4590 void
4591 expand_end_case (orig_index)
4592 tree orig_index;
4594 tree minval, maxval, range, orig_minval;
4595 rtx default_label = 0;
4596 register struct case_node *n;
4597 unsigned int count;
4598 rtx index;
4599 rtx table_label;
4600 int ncases;
4601 rtx *labelvec;
4602 register int i;
4603 rtx before_case;
4604 register struct nesting *thiscase = case_stack;
4605 tree index_expr, index_type;
4606 int unsignedp;
4608 table_label = gen_label_rtx ();
4609 index_expr = thiscase->data.case_stmt.index_expr;
4610 index_type = TREE_TYPE (index_expr);
4611 unsignedp = TREE_UNSIGNED (index_type);
4613 do_pending_stack_adjust ();
4615 /* This might get an spurious warning in the presence of a syntax error;
4616 it could be fixed by moving the call to check_seenlabel after the
4617 check for error_mark_node, and copying the code of check_seenlabel that
4618 deals with case_stack->data.case_stmt.line_number_status /
4619 restore_line_number_status in front of the call to end_cleanup_deferral;
4620 However, this might miss some useful warnings in the presence of
4621 non-syntax errors. */
4622 check_seenlabel ();
4624 /* An ERROR_MARK occurs for various reasons including invalid data type. */
4625 if (index_type != error_mark_node)
4627 /* If switch expression was an enumerated type, check that all
4628 enumeration literals are covered by the cases.
4629 No sense trying this if there's a default case, however. */
4631 if (!thiscase->data.case_stmt.default_label
4632 && TREE_CODE (TREE_TYPE (orig_index)) == ENUMERAL_TYPE
4633 && TREE_CODE (index_expr) != INTEGER_CST)
4634 check_for_full_enumeration_handling (TREE_TYPE (orig_index));
4636 /* If we don't have a default-label, create one here,
4637 after the body of the switch. */
4638 if (thiscase->data.case_stmt.default_label == 0)
4640 thiscase->data.case_stmt.default_label
4641 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
4642 expand_label (thiscase->data.case_stmt.default_label);
4644 default_label = label_rtx (thiscase->data.case_stmt.default_label);
4646 before_case = get_last_insn ();
4648 if (thiscase->data.case_stmt.case_list
4649 && thiscase->data.case_stmt.case_list->left)
4650 thiscase->data.case_stmt.case_list
4651 = case_tree2list(thiscase->data.case_stmt.case_list, 0);
4653 /* Simplify the case-list before we count it. */
4654 group_case_nodes (thiscase->data.case_stmt.case_list);
4656 /* Get upper and lower bounds of case values.
4657 Also convert all the case values to the index expr's data type. */
4659 count = 0;
4660 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
4662 /* Check low and high label values are integers. */
4663 if (TREE_CODE (n->low) != INTEGER_CST)
4664 abort ();
4665 if (TREE_CODE (n->high) != INTEGER_CST)
4666 abort ();
4668 n->low = convert (index_type, n->low);
4669 n->high = convert (index_type, n->high);
4671 /* Count the elements and track the largest and smallest
4672 of them (treating them as signed even if they are not). */
4673 if (count++ == 0)
4675 minval = n->low;
4676 maxval = n->high;
4678 else
4680 if (INT_CST_LT (n->low, minval))
4681 minval = n->low;
4682 if (INT_CST_LT (maxval, n->high))
4683 maxval = n->high;
4685 /* A range counts double, since it requires two compares. */
4686 if (! tree_int_cst_equal (n->low, n->high))
4687 count++;
4690 orig_minval = minval;
4692 /* Compute span of values. */
4693 if (count != 0)
4694 range = fold (build (MINUS_EXPR, index_type, maxval, minval));
4696 end_cleanup_deferral ();
4698 if (count == 0)
4700 expand_expr (index_expr, const0_rtx, VOIDmode, 0);
4701 emit_queue ();
4702 emit_jump (default_label);
4705 /* If range of values is much bigger than number of values,
4706 make a sequence of conditional branches instead of a dispatch.
4707 If the switch-index is a constant, do it this way
4708 because we can optimize it. */
4710 #ifndef CASE_VALUES_THRESHOLD
4711 #ifdef HAVE_casesi
4712 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
4713 #else
4714 /* If machine does not have a case insn that compares the
4715 bounds, this means extra overhead for dispatch tables
4716 which raises the threshold for using them. */
4717 #define CASE_VALUES_THRESHOLD 5
4718 #endif /* HAVE_casesi */
4719 #endif /* CASE_VALUES_THRESHOLD */
4721 else if (TREE_INT_CST_HIGH (range) != 0
4722 || count < CASE_VALUES_THRESHOLD
4723 || ((unsigned HOST_WIDE_INT) (TREE_INT_CST_LOW (range))
4724 > 10 * count)
4725 #ifndef ASM_OUTPUT_ADDR_DIFF_ELT
4726 || flag_pic
4727 #endif
4728 || TREE_CODE (index_expr) == INTEGER_CST
4729 /* These will reduce to a constant. */
4730 || (TREE_CODE (index_expr) == CALL_EXPR
4731 && TREE_CODE (TREE_OPERAND (index_expr, 0)) == ADDR_EXPR
4732 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == FUNCTION_DECL
4733 && DECL_FUNCTION_CODE (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == BUILT_IN_CLASSIFY_TYPE)
4734 || (TREE_CODE (index_expr) == COMPOUND_EXPR
4735 && TREE_CODE (TREE_OPERAND (index_expr, 1)) == INTEGER_CST))
4737 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
4739 /* If the index is a short or char that we do not have
4740 an insn to handle comparisons directly, convert it to
4741 a full integer now, rather than letting each comparison
4742 generate the conversion. */
4744 if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT
4745 && (cmp_optab->handlers[(int) GET_MODE(index)].insn_code
4746 == CODE_FOR_nothing))
4748 enum machine_mode wider_mode;
4749 for (wider_mode = GET_MODE (index); wider_mode != VOIDmode;
4750 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
4751 if (cmp_optab->handlers[(int) wider_mode].insn_code
4752 != CODE_FOR_nothing)
4754 index = convert_to_mode (wider_mode, index, unsignedp);
4755 break;
4759 emit_queue ();
4760 do_pending_stack_adjust ();
4762 index = protect_from_queue (index, 0);
4763 if (GET_CODE (index) == MEM)
4764 index = copy_to_reg (index);
4765 if (GET_CODE (index) == CONST_INT
4766 || TREE_CODE (index_expr) == INTEGER_CST)
4768 /* Make a tree node with the proper constant value
4769 if we don't already have one. */
4770 if (TREE_CODE (index_expr) != INTEGER_CST)
4772 index_expr
4773 = build_int_2 (INTVAL (index),
4774 unsignedp || INTVAL (index) >= 0 ? 0 : -1);
4775 index_expr = convert (index_type, index_expr);
4778 /* For constant index expressions we need only
4779 issue a unconditional branch to the appropriate
4780 target code. The job of removing any unreachable
4781 code is left to the optimisation phase if the
4782 "-O" option is specified. */
4783 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
4784 if (! tree_int_cst_lt (index_expr, n->low)
4785 && ! tree_int_cst_lt (n->high, index_expr))
4786 break;
4788 if (n)
4789 emit_jump (label_rtx (n->code_label));
4790 else
4791 emit_jump (default_label);
4793 else
4795 /* If the index expression is not constant we generate
4796 a binary decision tree to select the appropriate
4797 target code. This is done as follows:
4799 The list of cases is rearranged into a binary tree,
4800 nearly optimal assuming equal probability for each case.
4802 The tree is transformed into RTL, eliminating
4803 redundant test conditions at the same time.
4805 If program flow could reach the end of the
4806 decision tree an unconditional jump to the
4807 default code is emitted. */
4809 use_cost_table
4810 = (TREE_CODE (TREE_TYPE (orig_index)) != ENUMERAL_TYPE
4811 && estimate_case_costs (thiscase->data.case_stmt.case_list));
4812 balance_case_nodes (&thiscase->data.case_stmt.case_list,
4813 NULL_PTR);
4814 emit_case_nodes (index, thiscase->data.case_stmt.case_list,
4815 default_label, index_type);
4816 emit_jump_if_reachable (default_label);
4819 else
4821 int win = 0;
4822 #ifdef HAVE_casesi
4823 if (HAVE_casesi)
4825 enum machine_mode index_mode = SImode;
4826 int index_bits = GET_MODE_BITSIZE (index_mode);
4827 rtx op1, op2;
4828 enum machine_mode op_mode;
4830 /* Convert the index to SImode. */
4831 if (GET_MODE_BITSIZE (TYPE_MODE (index_type))
4832 > GET_MODE_BITSIZE (index_mode))
4834 enum machine_mode omode = TYPE_MODE (index_type);
4835 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
4837 /* We must handle the endpoints in the original mode. */
4838 index_expr = build (MINUS_EXPR, index_type,
4839 index_expr, minval);
4840 minval = integer_zero_node;
4841 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
4842 emit_cmp_insn (rangertx, index, LTU, NULL_RTX, omode, 1, 0);
4843 emit_jump_insn (gen_bltu (default_label));
4844 /* Now we can safely truncate. */
4845 index = convert_to_mode (index_mode, index, 0);
4847 else
4849 if (TYPE_MODE (index_type) != index_mode)
4851 index_expr = convert (type_for_size (index_bits, 0),
4852 index_expr);
4853 index_type = TREE_TYPE (index_expr);
4856 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
4858 emit_queue ();
4859 index = protect_from_queue (index, 0);
4860 do_pending_stack_adjust ();
4862 op_mode = insn_operand_mode[(int)CODE_FOR_casesi][0];
4863 if (! (*insn_operand_predicate[(int)CODE_FOR_casesi][0])
4864 (index, op_mode))
4865 index = copy_to_mode_reg (op_mode, index);
4867 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
4869 op_mode = insn_operand_mode[(int)CODE_FOR_casesi][1];
4870 if (! (*insn_operand_predicate[(int)CODE_FOR_casesi][1])
4871 (op1, op_mode))
4872 op1 = copy_to_mode_reg (op_mode, op1);
4874 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
4876 op_mode = insn_operand_mode[(int)CODE_FOR_casesi][2];
4877 if (! (*insn_operand_predicate[(int)CODE_FOR_casesi][2])
4878 (op2, op_mode))
4879 op2 = copy_to_mode_reg (op_mode, op2);
4881 emit_jump_insn (gen_casesi (index, op1, op2,
4882 table_label, default_label));
4883 win = 1;
4885 #endif
4886 #ifdef HAVE_tablejump
4887 if (! win && HAVE_tablejump)
4889 index_expr = convert (thiscase->data.case_stmt.nominal_type,
4890 fold (build (MINUS_EXPR, index_type,
4891 index_expr, minval)));
4892 index_type = TREE_TYPE (index_expr);
4893 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
4894 emit_queue ();
4895 index = protect_from_queue (index, 0);
4896 do_pending_stack_adjust ();
4898 do_tablejump (index, TYPE_MODE (index_type),
4899 expand_expr (range, NULL_RTX, VOIDmode, 0),
4900 table_label, default_label);
4901 win = 1;
4903 #endif
4904 if (! win)
4905 abort ();
4907 /* Get table of labels to jump to, in order of case index. */
4909 ncases = TREE_INT_CST_LOW (range) + 1;
4910 labelvec = (rtx *) alloca (ncases * sizeof (rtx));
4911 bzero ((char *) labelvec, ncases * sizeof (rtx));
4913 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
4915 register HOST_WIDE_INT i
4916 = TREE_INT_CST_LOW (n->low) - TREE_INT_CST_LOW (orig_minval);
4918 while (1)
4920 labelvec[i]
4921 = gen_rtx_LABEL_REF (Pmode, label_rtx (n->code_label));
4922 if (i + TREE_INT_CST_LOW (orig_minval)
4923 == TREE_INT_CST_LOW (n->high))
4924 break;
4925 i++;
4929 /* Fill in the gaps with the default. */
4930 for (i = 0; i < ncases; i++)
4931 if (labelvec[i] == 0)
4932 labelvec[i] = gen_rtx_LABEL_REF (Pmode, default_label);
4934 /* Output the table */
4935 emit_label (table_label);
4937 if (CASE_VECTOR_PC_RELATIVE || flag_pic)
4938 emit_jump_insn (gen_rtx_ADDR_DIFF_VEC (CASE_VECTOR_MODE,
4939 gen_rtx_LABEL_REF (Pmode, table_label),
4940 gen_rtvec_v (ncases, labelvec),
4941 const0_rtx, const0_rtx, 0));
4942 else
4943 emit_jump_insn (gen_rtx_ADDR_VEC (CASE_VECTOR_MODE,
4944 gen_rtvec_v (ncases, labelvec)));
4946 /* If the case insn drops through the table,
4947 after the table we must jump to the default-label.
4948 Otherwise record no drop-through after the table. */
4949 #ifdef CASE_DROPS_THROUGH
4950 emit_jump (default_label);
4951 #else
4952 emit_barrier ();
4953 #endif
4956 before_case = squeeze_notes (NEXT_INSN (before_case), get_last_insn ());
4957 reorder_insns (before_case, get_last_insn (),
4958 thiscase->data.case_stmt.start);
4960 else
4961 end_cleanup_deferral ();
4963 if (thiscase->exit_label)
4964 emit_label (thiscase->exit_label);
4966 POPSTACK (case_stack);
4968 free_temp_slots ();
4971 /* Convert the tree NODE into a list linked by the right field, with the left
4972 field zeroed. RIGHT is used for recursion; it is a list to be placed
4973 rightmost in the resulting list. */
4975 static struct case_node *
4976 case_tree2list (node, right)
4977 struct case_node *node, *right;
4979 struct case_node *left;
4981 if (node->right)
4982 right = case_tree2list (node->right, right);
4984 node->right = right;
4985 if ((left = node->left))
4987 node->left = 0;
4988 return case_tree2list (left, node);
4991 return node;
4994 /* Generate code to jump to LABEL if OP1 and OP2 are equal. */
4996 static void
4997 do_jump_if_equal (op1, op2, label, unsignedp)
4998 rtx op1, op2, label;
4999 int unsignedp;
5001 if (GET_CODE (op1) == CONST_INT
5002 && GET_CODE (op2) == CONST_INT)
5004 if (INTVAL (op1) == INTVAL (op2))
5005 emit_jump (label);
5007 else
5009 enum machine_mode mode = GET_MODE (op1);
5010 if (mode == VOIDmode)
5011 mode = GET_MODE (op2);
5012 emit_cmp_insn (op1, op2, EQ, NULL_RTX, mode, unsignedp, 0);
5013 emit_jump_insn (gen_beq (label));
5017 /* Not all case values are encountered equally. This function
5018 uses a heuristic to weight case labels, in cases where that
5019 looks like a reasonable thing to do.
5021 Right now, all we try to guess is text, and we establish the
5022 following weights:
5024 chars above space: 16
5025 digits: 16
5026 default: 12
5027 space, punct: 8
5028 tab: 4
5029 newline: 2
5030 other "\" chars: 1
5031 remaining chars: 0
5033 If we find any cases in the switch that are not either -1 or in the range
5034 of valid ASCII characters, or are control characters other than those
5035 commonly used with "\", don't treat this switch scanning text.
5037 Return 1 if these nodes are suitable for cost estimation, otherwise
5038 return 0. */
5040 static int
5041 estimate_case_costs (node)
5042 case_node_ptr node;
5044 tree min_ascii = build_int_2 (-1, -1);
5045 tree max_ascii = convert (TREE_TYPE (node->high), build_int_2 (127, 0));
5046 case_node_ptr n;
5047 int i;
5049 /* If we haven't already made the cost table, make it now. Note that the
5050 lower bound of the table is -1, not zero. */
5052 if (cost_table == NULL)
5054 cost_table = ((short *) xmalloc (129 * sizeof (short))) + 1;
5055 bzero ((char *) (cost_table - 1), 129 * sizeof (short));
5057 for (i = 0; i < 128; i++)
5059 if (ISALNUM (i))
5060 cost_table[i] = 16;
5061 else if (ISPUNCT (i))
5062 cost_table[i] = 8;
5063 else if (ISCNTRL (i))
5064 cost_table[i] = -1;
5067 cost_table[' '] = 8;
5068 cost_table['\t'] = 4;
5069 cost_table['\0'] = 4;
5070 cost_table['\n'] = 2;
5071 cost_table['\f'] = 1;
5072 cost_table['\v'] = 1;
5073 cost_table['\b'] = 1;
5076 /* See if all the case expressions look like text. It is text if the
5077 constant is >= -1 and the highest constant is <= 127. Do all comparisons
5078 as signed arithmetic since we don't want to ever access cost_table with a
5079 value less than -1. Also check that none of the constants in a range
5080 are strange control characters. */
5082 for (n = node; n; n = n->right)
5084 if ((INT_CST_LT (n->low, min_ascii)) || INT_CST_LT (max_ascii, n->high))
5085 return 0;
5087 for (i = TREE_INT_CST_LOW (n->low); i <= TREE_INT_CST_LOW (n->high); i++)
5088 if (cost_table[i] < 0)
5089 return 0;
5092 /* All interesting values are within the range of interesting
5093 ASCII characters. */
5094 return 1;
5097 /* Scan an ordered list of case nodes
5098 combining those with consecutive values or ranges.
5100 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
5102 static void
5103 group_case_nodes (head)
5104 case_node_ptr head;
5106 case_node_ptr node = head;
5108 while (node)
5110 rtx lb = next_real_insn (label_rtx (node->code_label));
5111 rtx lb2;
5112 case_node_ptr np = node;
5114 /* Try to group the successors of NODE with NODE. */
5115 while (((np = np->right) != 0)
5116 /* Do they jump to the same place? */
5117 && ((lb2 = next_real_insn (label_rtx (np->code_label))) == lb
5118 || (lb != 0 && lb2 != 0
5119 && simplejump_p (lb)
5120 && simplejump_p (lb2)
5121 && rtx_equal_p (SET_SRC (PATTERN (lb)),
5122 SET_SRC (PATTERN (lb2)))))
5123 /* Are their ranges consecutive? */
5124 && tree_int_cst_equal (np->low,
5125 fold (build (PLUS_EXPR,
5126 TREE_TYPE (node->high),
5127 node->high,
5128 integer_one_node)))
5129 /* An overflow is not consecutive. */
5130 && tree_int_cst_lt (node->high,
5131 fold (build (PLUS_EXPR,
5132 TREE_TYPE (node->high),
5133 node->high,
5134 integer_one_node))))
5136 node->high = np->high;
5138 /* NP is the first node after NODE which can't be grouped with it.
5139 Delete the nodes in between, and move on to that node. */
5140 node->right = np;
5141 node = np;
5145 /* Take an ordered list of case nodes
5146 and transform them into a near optimal binary tree,
5147 on the assumption that any target code selection value is as
5148 likely as any other.
5150 The transformation is performed by splitting the ordered
5151 list into two equal sections plus a pivot. The parts are
5152 then attached to the pivot as left and right branches. Each
5153 branch is then transformed recursively. */
5155 static void
5156 balance_case_nodes (head, parent)
5157 case_node_ptr *head;
5158 case_node_ptr parent;
5160 register case_node_ptr np;
5162 np = *head;
5163 if (np)
5165 int cost = 0;
5166 int i = 0;
5167 int ranges = 0;
5168 register case_node_ptr *npp;
5169 case_node_ptr left;
5171 /* Count the number of entries on branch. Also count the ranges. */
5173 while (np)
5175 if (!tree_int_cst_equal (np->low, np->high))
5177 ranges++;
5178 if (use_cost_table)
5179 cost += cost_table[TREE_INT_CST_LOW (np->high)];
5182 if (use_cost_table)
5183 cost += cost_table[TREE_INT_CST_LOW (np->low)];
5185 i++;
5186 np = np->right;
5189 if (i > 2)
5191 /* Split this list if it is long enough for that to help. */
5192 npp = head;
5193 left = *npp;
5194 if (use_cost_table)
5196 /* Find the place in the list that bisects the list's total cost,
5197 Here I gets half the total cost. */
5198 int n_moved = 0;
5199 i = (cost + 1) / 2;
5200 while (1)
5202 /* Skip nodes while their cost does not reach that amount. */
5203 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5204 i -= cost_table[TREE_INT_CST_LOW ((*npp)->high)];
5205 i -= cost_table[TREE_INT_CST_LOW ((*npp)->low)];
5206 if (i <= 0)
5207 break;
5208 npp = &(*npp)->right;
5209 n_moved += 1;
5211 if (n_moved == 0)
5213 /* Leave this branch lopsided, but optimize left-hand
5214 side and fill in `parent' fields for right-hand side. */
5215 np = *head;
5216 np->parent = parent;
5217 balance_case_nodes (&np->left, np);
5218 for (; np->right; np = np->right)
5219 np->right->parent = np;
5220 return;
5223 /* If there are just three nodes, split at the middle one. */
5224 else if (i == 3)
5225 npp = &(*npp)->right;
5226 else
5228 /* Find the place in the list that bisects the list's total cost,
5229 where ranges count as 2.
5230 Here I gets half the total cost. */
5231 i = (i + ranges + 1) / 2;
5232 while (1)
5234 /* Skip nodes while their cost does not reach that amount. */
5235 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5236 i--;
5237 i--;
5238 if (i <= 0)
5239 break;
5240 npp = &(*npp)->right;
5243 *head = np = *npp;
5244 *npp = 0;
5245 np->parent = parent;
5246 np->left = left;
5248 /* Optimize each of the two split parts. */
5249 balance_case_nodes (&np->left, np);
5250 balance_case_nodes (&np->right, np);
5252 else
5254 /* Else leave this branch as one level,
5255 but fill in `parent' fields. */
5256 np = *head;
5257 np->parent = parent;
5258 for (; np->right; np = np->right)
5259 np->right->parent = np;
5264 /* Search the parent sections of the case node tree
5265 to see if a test for the lower bound of NODE would be redundant.
5266 INDEX_TYPE is the type of the index expression.
5268 The instructions to generate the case decision tree are
5269 output in the same order as nodes are processed so it is
5270 known that if a parent node checks the range of the current
5271 node minus one that the current node is bounded at its lower
5272 span. Thus the test would be redundant. */
5274 static int
5275 node_has_low_bound (node, index_type)
5276 case_node_ptr node;
5277 tree index_type;
5279 tree low_minus_one;
5280 case_node_ptr pnode;
5282 /* If the lower bound of this node is the lowest value in the index type,
5283 we need not test it. */
5285 if (tree_int_cst_equal (node->low, TYPE_MIN_VALUE (index_type)))
5286 return 1;
5288 /* If this node has a left branch, the value at the left must be less
5289 than that at this node, so it cannot be bounded at the bottom and
5290 we need not bother testing any further. */
5292 if (node->left)
5293 return 0;
5295 low_minus_one = fold (build (MINUS_EXPR, TREE_TYPE (node->low),
5296 node->low, integer_one_node));
5298 /* If the subtraction above overflowed, we can't verify anything.
5299 Otherwise, look for a parent that tests our value - 1. */
5301 if (! tree_int_cst_lt (low_minus_one, node->low))
5302 return 0;
5304 for (pnode = node->parent; pnode; pnode = pnode->parent)
5305 if (tree_int_cst_equal (low_minus_one, pnode->high))
5306 return 1;
5308 return 0;
5311 /* Search the parent sections of the case node tree
5312 to see if a test for the upper bound of NODE would be redundant.
5313 INDEX_TYPE is the type of the index expression.
5315 The instructions to generate the case decision tree are
5316 output in the same order as nodes are processed so it is
5317 known that if a parent node checks the range of the current
5318 node plus one that the current node is bounded at its upper
5319 span. Thus the test would be redundant. */
5321 static int
5322 node_has_high_bound (node, index_type)
5323 case_node_ptr node;
5324 tree index_type;
5326 tree high_plus_one;
5327 case_node_ptr pnode;
5329 /* If there is no upper bound, obviously no test is needed. */
5331 if (TYPE_MAX_VALUE (index_type) == NULL)
5332 return 1;
5334 /* If the upper bound of this node is the highest value in the type
5335 of the index expression, we need not test against it. */
5337 if (tree_int_cst_equal (node->high, TYPE_MAX_VALUE (index_type)))
5338 return 1;
5340 /* If this node has a right branch, the value at the right must be greater
5341 than that at this node, so it cannot be bounded at the top and
5342 we need not bother testing any further. */
5344 if (node->right)
5345 return 0;
5347 high_plus_one = fold (build (PLUS_EXPR, TREE_TYPE (node->high),
5348 node->high, integer_one_node));
5350 /* If the addition above overflowed, we can't verify anything.
5351 Otherwise, look for a parent that tests our value + 1. */
5353 if (! tree_int_cst_lt (node->high, high_plus_one))
5354 return 0;
5356 for (pnode = node->parent; pnode; pnode = pnode->parent)
5357 if (tree_int_cst_equal (high_plus_one, pnode->low))
5358 return 1;
5360 return 0;
5363 /* Search the parent sections of the
5364 case node tree to see if both tests for the upper and lower
5365 bounds of NODE would be redundant. */
5367 static int
5368 node_is_bounded (node, index_type)
5369 case_node_ptr node;
5370 tree index_type;
5372 return (node_has_low_bound (node, index_type)
5373 && node_has_high_bound (node, index_type));
5376 /* Emit an unconditional jump to LABEL unless it would be dead code. */
5378 static void
5379 emit_jump_if_reachable (label)
5380 rtx label;
5382 if (GET_CODE (get_last_insn ()) != BARRIER)
5383 emit_jump (label);
5386 /* Emit step-by-step code to select a case for the value of INDEX.
5387 The thus generated decision tree follows the form of the
5388 case-node binary tree NODE, whose nodes represent test conditions.
5389 INDEX_TYPE is the type of the index of the switch.
5391 Care is taken to prune redundant tests from the decision tree
5392 by detecting any boundary conditions already checked by
5393 emitted rtx. (See node_has_high_bound, node_has_low_bound
5394 and node_is_bounded, above.)
5396 Where the test conditions can be shown to be redundant we emit
5397 an unconditional jump to the target code. As a further
5398 optimization, the subordinates of a tree node are examined to
5399 check for bounded nodes. In this case conditional and/or
5400 unconditional jumps as a result of the boundary check for the
5401 current node are arranged to target the subordinates associated
5402 code for out of bound conditions on the current node.
5404 We can assume that when control reaches the code generated here,
5405 the index value has already been compared with the parents
5406 of this node, and determined to be on the same side of each parent
5407 as this node is. Thus, if this node tests for the value 51,
5408 and a parent tested for 52, we don't need to consider
5409 the possibility of a value greater than 51. If another parent
5410 tests for the value 50, then this node need not test anything. */
5412 static void
5413 emit_case_nodes (index, node, default_label, index_type)
5414 rtx index;
5415 case_node_ptr node;
5416 rtx default_label;
5417 tree index_type;
5419 /* If INDEX has an unsigned type, we must make unsigned branches. */
5420 int unsignedp = TREE_UNSIGNED (index_type);
5421 typedef rtx rtx_function ();
5422 rtx_function *gen_bgt_pat = unsignedp ? gen_bgtu : gen_bgt;
5423 rtx_function *gen_bge_pat = unsignedp ? gen_bgeu : gen_bge;
5424 rtx_function *gen_blt_pat = unsignedp ? gen_bltu : gen_blt;
5425 rtx_function *gen_ble_pat = unsignedp ? gen_bleu : gen_ble;
5426 enum machine_mode mode = GET_MODE (index);
5428 /* See if our parents have already tested everything for us.
5429 If they have, emit an unconditional jump for this node. */
5430 if (node_is_bounded (node, index_type))
5431 emit_jump (label_rtx (node->code_label));
5433 else if (tree_int_cst_equal (node->low, node->high))
5435 /* Node is single valued. First see if the index expression matches
5436 this node and then check our children, if any. */
5438 do_jump_if_equal (index, expand_expr (node->low, NULL_RTX, VOIDmode, 0),
5439 label_rtx (node->code_label), unsignedp);
5441 if (node->right != 0 && node->left != 0)
5443 /* This node has children on both sides.
5444 Dispatch to one side or the other
5445 by comparing the index value with this node's value.
5446 If one subtree is bounded, check that one first,
5447 so we can avoid real branches in the tree. */
5449 if (node_is_bounded (node->right, index_type))
5451 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5452 VOIDmode, 0),
5453 GT, NULL_RTX, mode, unsignedp, 0);
5455 emit_jump_insn ((*gen_bgt_pat) (label_rtx (node->right->code_label)));
5456 emit_case_nodes (index, node->left, default_label, index_type);
5459 else if (node_is_bounded (node->left, index_type))
5461 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5462 VOIDmode, 0),
5463 LT, NULL_RTX, mode, unsignedp, 0);
5464 emit_jump_insn ((*gen_blt_pat) (label_rtx (node->left->code_label)));
5465 emit_case_nodes (index, node->right, default_label, index_type);
5468 else
5470 /* Neither node is bounded. First distinguish the two sides;
5471 then emit the code for one side at a time. */
5473 tree test_label
5474 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
5476 /* See if the value is on the right. */
5477 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5478 VOIDmode, 0),
5479 GT, NULL_RTX, mode, unsignedp, 0);
5480 emit_jump_insn ((*gen_bgt_pat) (label_rtx (test_label)));
5482 /* Value must be on the left.
5483 Handle the left-hand subtree. */
5484 emit_case_nodes (index, node->left, default_label, index_type);
5485 /* If left-hand subtree does nothing,
5486 go to default. */
5487 emit_jump_if_reachable (default_label);
5489 /* Code branches here for the right-hand subtree. */
5490 expand_label (test_label);
5491 emit_case_nodes (index, node->right, default_label, index_type);
5495 else if (node->right != 0 && node->left == 0)
5497 /* Here we have a right child but no left so we issue conditional
5498 branch to default and process the right child.
5500 Omit the conditional branch to default if we it avoid only one
5501 right child; it costs too much space to save so little time. */
5503 if (node->right->right || node->right->left
5504 || !tree_int_cst_equal (node->right->low, node->right->high))
5506 if (!node_has_low_bound (node, index_type))
5508 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5509 VOIDmode, 0),
5510 LT, NULL_RTX, mode, unsignedp, 0);
5511 emit_jump_insn ((*gen_blt_pat) (default_label));
5514 emit_case_nodes (index, node->right, default_label, index_type);
5516 else
5517 /* We cannot process node->right normally
5518 since we haven't ruled out the numbers less than
5519 this node's value. So handle node->right explicitly. */
5520 do_jump_if_equal (index,
5521 expand_expr (node->right->low, NULL_RTX,
5522 VOIDmode, 0),
5523 label_rtx (node->right->code_label), unsignedp);
5526 else if (node->right == 0 && node->left != 0)
5528 /* Just one subtree, on the left. */
5530 #if 0 /* The following code and comment were formerly part
5531 of the condition here, but they didn't work
5532 and I don't understand what the idea was. -- rms. */
5533 /* If our "most probable entry" is less probable
5534 than the default label, emit a jump to
5535 the default label using condition codes
5536 already lying around. With no right branch,
5537 a branch-greater-than will get us to the default
5538 label correctly. */
5539 if (use_cost_table
5540 && cost_table[TREE_INT_CST_LOW (node->high)] < 12)
5542 #endif /* 0 */
5543 if (node->left->left || node->left->right
5544 || !tree_int_cst_equal (node->left->low, node->left->high))
5546 if (!node_has_high_bound (node, index_type))
5548 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5549 VOIDmode, 0),
5550 GT, NULL_RTX, mode, unsignedp, 0);
5551 emit_jump_insn ((*gen_bgt_pat) (default_label));
5554 emit_case_nodes (index, node->left, default_label, index_type);
5556 else
5557 /* We cannot process node->left normally
5558 since we haven't ruled out the numbers less than
5559 this node's value. So handle node->left explicitly. */
5560 do_jump_if_equal (index,
5561 expand_expr (node->left->low, NULL_RTX,
5562 VOIDmode, 0),
5563 label_rtx (node->left->code_label), unsignedp);
5566 else
5568 /* Node is a range. These cases are very similar to those for a single
5569 value, except that we do not start by testing whether this node
5570 is the one to branch to. */
5572 if (node->right != 0 && node->left != 0)
5574 /* Node has subtrees on both sides.
5575 If the right-hand subtree is bounded,
5576 test for it first, since we can go straight there.
5577 Otherwise, we need to make a branch in the control structure,
5578 then handle the two subtrees. */
5579 tree test_label = 0;
5581 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5582 VOIDmode, 0),
5583 GT, NULL_RTX, mode, unsignedp, 0);
5585 if (node_is_bounded (node->right, index_type))
5586 /* Right hand node is fully bounded so we can eliminate any
5587 testing and branch directly to the target code. */
5588 emit_jump_insn ((*gen_bgt_pat) (label_rtx (node->right->code_label)));
5589 else
5591 /* Right hand node requires testing.
5592 Branch to a label where we will handle it later. */
5594 test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
5595 emit_jump_insn ((*gen_bgt_pat) (label_rtx (test_label)));
5598 /* Value belongs to this node or to the left-hand subtree. */
5600 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX, VOIDmode, 0),
5601 GE, NULL_RTX, mode, unsignedp, 0);
5602 emit_jump_insn ((*gen_bge_pat) (label_rtx (node->code_label)));
5604 /* Handle the left-hand subtree. */
5605 emit_case_nodes (index, node->left, default_label, index_type);
5607 /* If right node had to be handled later, do that now. */
5609 if (test_label)
5611 /* If the left-hand subtree fell through,
5612 don't let it fall into the right-hand subtree. */
5613 emit_jump_if_reachable (default_label);
5615 expand_label (test_label);
5616 emit_case_nodes (index, node->right, default_label, index_type);
5620 else if (node->right != 0 && node->left == 0)
5622 /* Deal with values to the left of this node,
5623 if they are possible. */
5624 if (!node_has_low_bound (node, index_type))
5626 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX,
5627 VOIDmode, 0),
5628 LT, NULL_RTX, mode, unsignedp, 0);
5629 emit_jump_insn ((*gen_blt_pat) (default_label));
5632 /* Value belongs to this node or to the right-hand subtree. */
5634 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5635 VOIDmode, 0),
5636 LE, NULL_RTX, mode, unsignedp, 0);
5637 emit_jump_insn ((*gen_ble_pat) (label_rtx (node->code_label)));
5639 emit_case_nodes (index, node->right, default_label, index_type);
5642 else if (node->right == 0 && node->left != 0)
5644 /* Deal with values to the right of this node,
5645 if they are possible. */
5646 if (!node_has_high_bound (node, index_type))
5648 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5649 VOIDmode, 0),
5650 GT, NULL_RTX, mode, unsignedp, 0);
5651 emit_jump_insn ((*gen_bgt_pat) (default_label));
5654 /* Value belongs to this node or to the left-hand subtree. */
5656 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX, VOIDmode, 0),
5657 GE, NULL_RTX, mode, unsignedp, 0);
5658 emit_jump_insn ((*gen_bge_pat) (label_rtx (node->code_label)));
5660 emit_case_nodes (index, node->left, default_label, index_type);
5663 else
5665 /* Node has no children so we check low and high bounds to remove
5666 redundant tests. Only one of the bounds can exist,
5667 since otherwise this node is bounded--a case tested already. */
5669 if (!node_has_high_bound (node, index_type))
5671 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5672 VOIDmode, 0),
5673 GT, NULL_RTX, mode, unsignedp, 0);
5674 emit_jump_insn ((*gen_bgt_pat) (default_label));
5677 if (!node_has_low_bound (node, index_type))
5679 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX,
5680 VOIDmode, 0),
5681 LT, NULL_RTX, mode, unsignedp, 0);
5682 emit_jump_insn ((*gen_blt_pat) (default_label));
5685 emit_jump (label_rtx (node->code_label));
5690 /* These routines are used by the loop unrolling code. They copy BLOCK trees
5691 so that the debugging info will be correct for the unrolled loop. */
5693 /* Indexed by block number, contains a pointer to the N'th block node.
5695 Allocated by the call to identify_blocks, then released after the call
5696 to reorder_blocks in the function unroll_block_trees. */
5698 static tree *block_vector;
5700 void
5701 find_loop_tree_blocks ()
5703 tree block = DECL_INITIAL (current_function_decl);
5705 block_vector = identify_blocks (block, get_insns ());
5708 void
5709 unroll_block_trees ()
5711 tree block = DECL_INITIAL (current_function_decl);
5713 reorder_blocks (block_vector, block, get_insns ());
5715 /* Release any memory allocated by identify_blocks. */
5716 if (block_vector)
5717 free (block_vector);