* recog.c (preproces_constraints): Zero recog_op_alt before
[official-gcc.git] / gcc / stmt.c
blob62cfee0daf96276ec6c4d3865f3b076239d58742
1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 88, 89, 92-98, 1999 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
22 /* This file handles the generation of rtl code from tree structure
23 above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
24 It also creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
27 The functions whose names start with `expand_' are called by the
28 parser to generate RTL instructions for various kinds of constructs.
30 Some control and binding constructs require calling several such
31 functions at different times. For example, a simple if-then
32 is expanded by calling `expand_start_cond' (with the condition-expression
33 as argument) before parsing the then-clause and calling `expand_end_cond'
34 after parsing the then-clause. */
36 #include "config.h"
37 #include "system.h"
39 #include "rtl.h"
40 #include "tree.h"
41 #include "flags.h"
42 #include "except.h"
43 #include "function.h"
44 #include "insn-flags.h"
45 #include "insn-config.h"
46 #include "insn-codes.h"
47 #include "expr.h"
48 #include "hard-reg-set.h"
49 #include "obstack.h"
50 #include "loop.h"
51 #include "recog.h"
52 #include "machmode.h"
53 #include "toplev.h"
54 #include "output.h"
56 #define obstack_chunk_alloc xmalloc
57 #define obstack_chunk_free free
58 struct obstack stmt_obstack;
60 /* Assume that case vectors are not pc-relative. */
61 #ifndef CASE_VECTOR_PC_RELATIVE
62 #define CASE_VECTOR_PC_RELATIVE 0
63 #endif
65 /* Each time we expand the end of a binding contour (in `expand_end_bindings')
66 and we emit a new NOTE_INSN_BLOCK_END note, we save a pointer to it here.
67 This is used by the `remember_end_note' function to record the endpoint
68 of each generated block in its associated BLOCK node. */
70 static rtx last_block_end_note;
72 /* Functions and data structures for expanding case statements. */
74 /* Case label structure, used to hold info on labels within case
75 statements. We handle "range" labels; for a single-value label
76 as in C, the high and low limits are the same.
78 An AVL tree of case nodes is initially created, and later transformed
79 to a list linked via the RIGHT fields in the nodes. Nodes with
80 higher case values are later in the list.
82 Switch statements can be output in one of two forms. A branch table
83 is used if there are more than a few labels and the labels are dense
84 within the range between the smallest and largest case value. If a
85 branch table is used, no further manipulations are done with the case
86 node chain.
88 The alternative to the use of a branch table is to generate a series
89 of compare and jump insns. When that is done, we use the LEFT, RIGHT,
90 and PARENT fields to hold a binary tree. Initially the tree is
91 totally unbalanced, with everything on the right. We balance the tree
92 with nodes on the left having lower case values than the parent
93 and nodes on the right having higher values. We then output the tree
94 in order. */
96 struct case_node
98 struct case_node *left; /* Left son in binary tree */
99 struct case_node *right; /* Right son in binary tree; also node chain */
100 struct case_node *parent; /* Parent of node in binary tree */
101 tree low; /* Lowest index value for this label */
102 tree high; /* Highest index value for this label */
103 tree code_label; /* Label to jump to when node matches */
104 int balance;
107 typedef struct case_node case_node;
108 typedef struct case_node *case_node_ptr;
110 /* These are used by estimate_case_costs and balance_case_nodes. */
112 /* This must be a signed type, and non-ANSI compilers lack signed char. */
113 static short *cost_table;
114 static int use_cost_table;
116 /* Stack of control and binding constructs we are currently inside.
118 These constructs begin when you call `expand_start_WHATEVER'
119 and end when you call `expand_end_WHATEVER'. This stack records
120 info about how the construct began that tells the end-function
121 what to do. It also may provide information about the construct
122 to alter the behavior of other constructs within the body.
123 For example, they may affect the behavior of C `break' and `continue'.
125 Each construct gets one `struct nesting' object.
126 All of these objects are chained through the `all' field.
127 `nesting_stack' points to the first object (innermost construct).
128 The position of an entry on `nesting_stack' is in its `depth' field.
130 Each type of construct has its own individual stack.
131 For example, loops have `loop_stack'. Each object points to the
132 next object of the same type through the `next' field.
134 Some constructs are visible to `break' exit-statements and others
135 are not. Which constructs are visible depends on the language.
136 Therefore, the data structure allows each construct to be visible
137 or not, according to the args given when the construct is started.
138 The construct is visible if the `exit_label' field is non-null.
139 In that case, the value should be a CODE_LABEL rtx. */
141 struct nesting
143 struct nesting *all;
144 struct nesting *next;
145 int depth;
146 rtx exit_label;
147 union
149 /* For conds (if-then and if-then-else statements). */
150 struct
152 /* Label for the end of the if construct.
153 There is none if EXITFLAG was not set
154 and no `else' has been seen yet. */
155 rtx endif_label;
156 /* Label for the end of this alternative.
157 This may be the end of the if or the next else/elseif. */
158 rtx next_label;
159 } cond;
160 /* For loops. */
161 struct
163 /* Label at the top of the loop; place to loop back to. */
164 rtx start_label;
165 /* Label at the end of the whole construct. */
166 rtx end_label;
167 /* Label before a jump that branches to the end of the whole
168 construct. This is where destructors go if any. */
169 rtx alt_end_label;
170 /* Label for `continue' statement to jump to;
171 this is in front of the stepper of the loop. */
172 rtx continue_label;
173 } loop;
174 /* For variable binding contours. */
175 struct
177 /* Sequence number of this binding contour within the function,
178 in order of entry. */
179 int block_start_count;
180 /* Nonzero => value to restore stack to on exit. */
181 rtx stack_level;
182 /* The NOTE that starts this contour.
183 Used by expand_goto to check whether the destination
184 is within each contour or not. */
185 rtx first_insn;
186 /* Innermost containing binding contour that has a stack level. */
187 struct nesting *innermost_stack_block;
188 /* List of cleanups to be run on exit from this contour.
189 This is a list of expressions to be evaluated.
190 The TREE_PURPOSE of each link is the ..._DECL node
191 which the cleanup pertains to. */
192 tree cleanups;
193 /* List of cleanup-lists of blocks containing this block,
194 as they were at the locus where this block appears.
195 There is an element for each containing block,
196 ordered innermost containing block first.
197 The tail of this list can be 0,
198 if all remaining elements would be empty lists.
199 The element's TREE_VALUE is the cleanup-list of that block,
200 which may be null. */
201 tree outer_cleanups;
202 /* Chain of labels defined inside this binding contour.
203 For contours that have stack levels or cleanups. */
204 struct label_chain *label_chain;
205 /* Number of function calls seen, as of start of this block. */
206 int n_function_calls;
207 /* Nonzero if this is associated with a EH region. */
208 int exception_region;
209 /* The saved target_temp_slot_level from our outer block.
210 We may reset target_temp_slot_level to be the level of
211 this block, if that is done, target_temp_slot_level
212 reverts to the saved target_temp_slot_level at the very
213 end of the block. */
214 int block_target_temp_slot_level;
215 /* True if we are currently emitting insns in an area of
216 output code that is controlled by a conditional
217 expression. This is used by the cleanup handling code to
218 generate conditional cleanup actions. */
219 int conditional_code;
220 /* A place to move the start of the exception region for any
221 of the conditional cleanups, must be at the end or after
222 the start of the last unconditional cleanup, and before any
223 conditional branch points. */
224 rtx last_unconditional_cleanup;
225 /* When in a conditional context, this is the specific
226 cleanup list associated with last_unconditional_cleanup,
227 where we place the conditionalized cleanups. */
228 tree *cleanup_ptr;
229 } block;
230 /* For switch (C) or case (Pascal) statements,
231 and also for dummies (see `expand_start_case_dummy'). */
232 struct
234 /* The insn after which the case dispatch should finally
235 be emitted. Zero for a dummy. */
236 rtx start;
237 /* A list of case labels; it is first built as an AVL tree.
238 During expand_end_case, this is converted to a list, and may be
239 rearranged into a nearly balanced binary tree. */
240 struct case_node *case_list;
241 /* Label to jump to if no case matches. */
242 tree default_label;
243 /* The expression to be dispatched on. */
244 tree index_expr;
245 /* Type that INDEX_EXPR should be converted to. */
246 tree nominal_type;
247 /* Number of range exprs in case statement. */
248 int num_ranges;
249 /* Name of this kind of statement, for warnings. */
250 const char *printname;
251 /* Used to save no_line_numbers till we see the first case label.
252 We set this to -1 when we see the first case label in this
253 case statement. */
254 int line_number_status;
255 } case_stmt;
256 } data;
259 /* Allocate and return a new `struct nesting'. */
261 #define ALLOC_NESTING() \
262 (struct nesting *) obstack_alloc (&stmt_obstack, sizeof (struct nesting))
264 /* Pop the nesting stack element by element until we pop off
265 the element which is at the top of STACK.
266 Update all the other stacks, popping off elements from them
267 as we pop them from nesting_stack. */
269 #define POPSTACK(STACK) \
270 do { struct nesting *target = STACK; \
271 struct nesting *this; \
272 do { this = nesting_stack; \
273 if (loop_stack == this) \
274 loop_stack = loop_stack->next; \
275 if (cond_stack == this) \
276 cond_stack = cond_stack->next; \
277 if (block_stack == this) \
278 block_stack = block_stack->next; \
279 if (stack_block_stack == this) \
280 stack_block_stack = stack_block_stack->next; \
281 if (case_stack == this) \
282 case_stack = case_stack->next; \
283 nesting_depth = nesting_stack->depth - 1; \
284 nesting_stack = this->all; \
285 obstack_free (&stmt_obstack, this); } \
286 while (this != target); } while (0)
288 /* In some cases it is impossible to generate code for a forward goto
289 until the label definition is seen. This happens when it may be necessary
290 for the goto to reset the stack pointer: we don't yet know how to do that.
291 So expand_goto puts an entry on this fixup list.
292 Each time a binding contour that resets the stack is exited,
293 we check each fixup.
294 If the target label has now been defined, we can insert the proper code. */
296 struct goto_fixup
298 /* Points to following fixup. */
299 struct goto_fixup *next;
300 /* Points to the insn before the jump insn.
301 If more code must be inserted, it goes after this insn. */
302 rtx before_jump;
303 /* The LABEL_DECL that this jump is jumping to, or 0
304 for break, continue or return. */
305 tree target;
306 /* The BLOCK for the place where this goto was found. */
307 tree context;
308 /* The CODE_LABEL rtx that this is jumping to. */
309 rtx target_rtl;
310 /* Number of binding contours started in current function
311 before the label reference. */
312 int block_start_count;
313 /* The outermost stack level that should be restored for this jump.
314 Each time a binding contour that resets the stack is exited,
315 if the target label is *not* yet defined, this slot is updated. */
316 rtx stack_level;
317 /* List of lists of cleanup expressions to be run by this goto.
318 There is one element for each block that this goto is within.
319 The tail of this list can be 0,
320 if all remaining elements would be empty.
321 The TREE_VALUE contains the cleanup list of that block as of the
322 time this goto was seen.
323 The TREE_ADDRESSABLE flag is 1 for a block that has been exited. */
324 tree cleanup_list_list;
327 /* Within any binding contour that must restore a stack level,
328 all labels are recorded with a chain of these structures. */
330 struct label_chain
332 /* Points to following fixup. */
333 struct label_chain *next;
334 tree label;
337 struct stmt_status
339 /* Chain of all pending binding contours. */
340 struct nesting *x_block_stack;
342 /* If any new stacks are added here, add them to POPSTACKS too. */
344 /* Chain of all pending binding contours that restore stack levels
345 or have cleanups. */
346 struct nesting *x_stack_block_stack;
348 /* Chain of all pending conditional statements. */
349 struct nesting *x_cond_stack;
351 /* Chain of all pending loops. */
352 struct nesting *x_loop_stack;
354 /* Chain of all pending case or switch statements. */
355 struct nesting *x_case_stack;
357 /* Separate chain including all of the above,
358 chained through the `all' field. */
359 struct nesting *x_nesting_stack;
361 /* Number of entries on nesting_stack now. */
362 int x_nesting_depth;
364 /* Number of binding contours started so far in this function. */
365 int x_block_start_count;
367 /* Each time we expand an expression-statement,
368 record the expr's type and its RTL value here. */
369 tree x_last_expr_type;
370 rtx x_last_expr_value;
372 /* Nonzero if within a ({...}) grouping, in which case we must
373 always compute a value for each expr-stmt in case it is the last one. */
374 int x_expr_stmts_for_value;
376 /* Filename and line number of last line-number note,
377 whether we actually emitted it or not. */
378 char *x_emit_filename;
379 int x_emit_lineno;
381 struct goto_fixup *x_goto_fixup_chain;
384 #define block_stack (current_function->stmt->x_block_stack)
385 #define stack_block_stack (current_function->stmt->x_stack_block_stack)
386 #define cond_stack (current_function->stmt->x_cond_stack)
387 #define loop_stack (current_function->stmt->x_loop_stack)
388 #define case_stack (current_function->stmt->x_case_stack)
389 #define nesting_stack (current_function->stmt->x_nesting_stack)
390 #define nesting_depth (current_function->stmt->x_nesting_depth)
391 #define current_block_start_count (current_function->stmt->x_block_start_count)
392 #define last_expr_type (current_function->stmt->x_last_expr_type)
393 #define last_expr_value (current_function->stmt->x_last_expr_value)
394 #define expr_stmts_for_value (current_function->stmt->x_expr_stmts_for_value)
395 #define emit_filename (current_function->stmt->x_emit_filename)
396 #define emit_lineno (current_function->stmt->x_emit_lineno)
397 #define goto_fixup_chain (current_function->stmt->x_goto_fixup_chain)
399 /* Non-zero if we are using EH to handle cleanus. */
400 static int using_eh_for_cleanups_p = 0;
403 static int n_occurrences PROTO((int, const char *));
404 static void expand_goto_internal PROTO((tree, rtx, rtx));
405 static int expand_fixup PROTO((tree, rtx, rtx));
406 static rtx expand_nl_handler_label PROTO((rtx, rtx));
407 static void expand_nl_goto_receiver PROTO((void));
408 static void expand_nl_goto_receivers PROTO((struct nesting *));
409 static void fixup_gotos PROTO((struct nesting *, rtx, tree,
410 rtx, int));
411 static void expand_null_return_1 PROTO((rtx, int));
412 static void expand_value_return PROTO((rtx));
413 static int tail_recursion_args PROTO((tree, tree));
414 static void expand_cleanups PROTO((tree, tree, int, int));
415 static void check_seenlabel PROTO((void));
416 static void do_jump_if_equal PROTO((rtx, rtx, rtx, int));
417 static int estimate_case_costs PROTO((case_node_ptr));
418 static void group_case_nodes PROTO((case_node_ptr));
419 static void balance_case_nodes PROTO((case_node_ptr *,
420 case_node_ptr));
421 static int node_has_low_bound PROTO((case_node_ptr, tree));
422 static int node_has_high_bound PROTO((case_node_ptr, tree));
423 static int node_is_bounded PROTO((case_node_ptr, tree));
424 static void emit_jump_if_reachable PROTO((rtx));
425 static void emit_case_nodes PROTO((rtx, case_node_ptr, rtx, tree));
426 static int add_case_node PROTO((tree, tree, tree, tree *));
427 static struct case_node *case_tree2list PROTO((case_node *, case_node *));
429 void
430 using_eh_for_cleanups ()
432 using_eh_for_cleanups_p = 1;
435 void
436 init_stmt ()
438 gcc_obstack_init (&stmt_obstack);
439 init_eh ();
442 void
443 init_stmt_for_function ()
445 current_function->stmt
446 = (struct stmt_status *) xmalloc (sizeof (struct stmt_status));
448 /* We are not currently within any block, conditional, loop or case. */
449 block_stack = 0;
450 stack_block_stack = 0;
451 loop_stack = 0;
452 case_stack = 0;
453 cond_stack = 0;
454 nesting_stack = 0;
455 nesting_depth = 0;
457 current_block_start_count = 0;
459 /* No gotos have been expanded yet. */
460 goto_fixup_chain = 0;
462 /* We are not processing a ({...}) grouping. */
463 expr_stmts_for_value = 0;
464 last_expr_type = 0;
466 init_eh_for_function ();
469 /* Return nonzero if anything is pushed on the loop, condition, or case
470 stack. */
472 in_control_zone_p ()
474 return cond_stack || loop_stack || case_stack;
477 /* Record the current file and line. Called from emit_line_note. */
478 void
479 set_file_and_line_for_stmt (file, line)
480 char *file;
481 int line;
483 emit_filename = file;
484 emit_lineno = line;
487 /* Emit a no-op instruction. */
489 void
490 emit_nop ()
492 rtx last_insn;
494 last_insn = get_last_insn ();
495 if (!optimize
496 && (GET_CODE (last_insn) == CODE_LABEL
497 || (GET_CODE (last_insn) == NOTE
498 && prev_real_insn (last_insn) == 0)))
499 emit_insn (gen_nop ());
502 /* Return the rtx-label that corresponds to a LABEL_DECL,
503 creating it if necessary. */
506 label_rtx (label)
507 tree label;
509 if (TREE_CODE (label) != LABEL_DECL)
510 abort ();
512 if (DECL_RTL (label))
513 return DECL_RTL (label);
515 return DECL_RTL (label) = gen_label_rtx ();
518 /* Add an unconditional jump to LABEL as the next sequential instruction. */
520 void
521 emit_jump (label)
522 rtx label;
524 do_pending_stack_adjust ();
525 emit_jump_insn (gen_jump (label));
526 emit_barrier ();
529 /* Emit code to jump to the address
530 specified by the pointer expression EXP. */
532 void
533 expand_computed_goto (exp)
534 tree exp;
536 rtx x = expand_expr (exp, NULL_RTX, VOIDmode, 0);
538 #ifdef POINTERS_EXTEND_UNSIGNED
539 x = convert_memory_address (Pmode, x);
540 #endif
542 emit_queue ();
543 /* Be sure the function is executable. */
544 if (current_function_check_memory_usage)
545 emit_library_call (chkr_check_exec_libfunc, 1,
546 VOIDmode, 1, x, ptr_mode);
548 do_pending_stack_adjust ();
549 emit_indirect_jump (x);
551 current_function_has_computed_jump = 1;
554 /* Handle goto statements and the labels that they can go to. */
556 /* Specify the location in the RTL code of a label LABEL,
557 which is a LABEL_DECL tree node.
559 This is used for the kind of label that the user can jump to with a
560 goto statement, and for alternatives of a switch or case statement.
561 RTL labels generated for loops and conditionals don't go through here;
562 they are generated directly at the RTL level, by other functions below.
564 Note that this has nothing to do with defining label *names*.
565 Languages vary in how they do that and what that even means. */
567 void
568 expand_label (label)
569 tree label;
571 struct label_chain *p;
573 do_pending_stack_adjust ();
574 emit_label (label_rtx (label));
575 if (DECL_NAME (label))
576 LABEL_NAME (DECL_RTL (label)) = IDENTIFIER_POINTER (DECL_NAME (label));
578 if (stack_block_stack != 0)
580 p = (struct label_chain *) oballoc (sizeof (struct label_chain));
581 p->next = stack_block_stack->data.block.label_chain;
582 stack_block_stack->data.block.label_chain = p;
583 p->label = label;
587 /* Declare that LABEL (a LABEL_DECL) may be used for nonlocal gotos
588 from nested functions. */
590 void
591 declare_nonlocal_label (label)
592 tree label;
594 rtx slot = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
596 nonlocal_labels = tree_cons (NULL_TREE, label, nonlocal_labels);
597 LABEL_PRESERVE_P (label_rtx (label)) = 1;
598 if (nonlocal_goto_handler_slots == 0)
600 emit_stack_save (SAVE_NONLOCAL,
601 &nonlocal_goto_stack_level,
602 PREV_INSN (tail_recursion_reentry));
604 nonlocal_goto_handler_slots
605 = gen_rtx_EXPR_LIST (VOIDmode, slot, nonlocal_goto_handler_slots);
608 /* Generate RTL code for a `goto' statement with target label LABEL.
609 LABEL should be a LABEL_DECL tree node that was or will later be
610 defined with `expand_label'. */
612 void
613 expand_goto (label)
614 tree label;
616 tree context;
618 /* Check for a nonlocal goto to a containing function. */
619 context = decl_function_context (label);
620 if (context != 0 && context != current_function_decl)
622 struct function *p = find_function_data (context);
623 rtx label_ref = gen_rtx_LABEL_REF (Pmode, label_rtx (label));
624 rtx temp, handler_slot;
625 tree link;
627 /* Find the corresponding handler slot for this label. */
628 handler_slot = p->nonlocal_goto_handler_slots;
629 for (link = p->nonlocal_labels; TREE_VALUE (link) != label;
630 link = TREE_CHAIN (link))
631 handler_slot = XEXP (handler_slot, 1);
632 handler_slot = XEXP (handler_slot, 0);
634 p->has_nonlocal_label = 1;
635 current_function_has_nonlocal_goto = 1;
636 LABEL_REF_NONLOCAL_P (label_ref) = 1;
638 /* Copy the rtl for the slots so that they won't be shared in
639 case the virtual stack vars register gets instantiated differently
640 in the parent than in the child. */
642 #if HAVE_nonlocal_goto
643 if (HAVE_nonlocal_goto)
644 emit_insn (gen_nonlocal_goto (lookup_static_chain (label),
645 copy_rtx (handler_slot),
646 copy_rtx (p->nonlocal_goto_stack_level),
647 label_ref));
648 else
649 #endif
651 rtx addr;
653 /* Restore frame pointer for containing function.
654 This sets the actual hard register used for the frame pointer
655 to the location of the function's incoming static chain info.
656 The non-local goto handler will then adjust it to contain the
657 proper value and reload the argument pointer, if needed. */
658 emit_move_insn (hard_frame_pointer_rtx, lookup_static_chain (label));
660 /* We have now loaded the frame pointer hardware register with
661 the address of that corresponds to the start of the virtual
662 stack vars. So replace virtual_stack_vars_rtx in all
663 addresses we use with stack_pointer_rtx. */
665 /* Get addr of containing function's current nonlocal goto handler,
666 which will do any cleanups and then jump to the label. */
667 addr = copy_rtx (handler_slot);
668 temp = copy_to_reg (replace_rtx (addr, virtual_stack_vars_rtx,
669 hard_frame_pointer_rtx));
671 /* Restore the stack pointer. Note this uses fp just restored. */
672 addr = p->nonlocal_goto_stack_level;
673 if (addr)
674 addr = replace_rtx (copy_rtx (addr),
675 virtual_stack_vars_rtx,
676 hard_frame_pointer_rtx);
678 emit_stack_restore (SAVE_NONLOCAL, addr, NULL_RTX);
680 /* USE of hard_frame_pointer_rtx added for consistency; not clear if
681 really needed. */
682 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
683 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
684 emit_indirect_jump (temp);
687 else
688 expand_goto_internal (label, label_rtx (label), NULL_RTX);
691 /* Generate RTL code for a `goto' statement with target label BODY.
692 LABEL should be a LABEL_REF.
693 LAST_INSN, if non-0, is the rtx we should consider as the last
694 insn emitted (for the purposes of cleaning up a return). */
696 static void
697 expand_goto_internal (body, label, last_insn)
698 tree body;
699 rtx label;
700 rtx last_insn;
702 struct nesting *block;
703 rtx stack_level = 0;
705 if (GET_CODE (label) != CODE_LABEL)
706 abort ();
708 /* If label has already been defined, we can tell now
709 whether and how we must alter the stack level. */
711 if (PREV_INSN (label) != 0)
713 /* Find the innermost pending block that contains the label.
714 (Check containment by comparing insn-uids.)
715 Then restore the outermost stack level within that block,
716 and do cleanups of all blocks contained in it. */
717 for (block = block_stack; block; block = block->next)
719 if (INSN_UID (block->data.block.first_insn) < INSN_UID (label))
720 break;
721 if (block->data.block.stack_level != 0)
722 stack_level = block->data.block.stack_level;
723 /* Execute the cleanups for blocks we are exiting. */
724 if (block->data.block.cleanups != 0)
726 expand_cleanups (block->data.block.cleanups, NULL_TREE, 1, 1);
727 do_pending_stack_adjust ();
731 if (stack_level)
733 /* Ensure stack adjust isn't done by emit_jump, as this
734 would clobber the stack pointer. This one should be
735 deleted as dead by flow. */
736 clear_pending_stack_adjust ();
737 do_pending_stack_adjust ();
738 emit_stack_restore (SAVE_BLOCK, stack_level, NULL_RTX);
741 if (body != 0 && DECL_TOO_LATE (body))
742 error ("jump to `%s' invalidly jumps into binding contour",
743 IDENTIFIER_POINTER (DECL_NAME (body)));
745 /* Label not yet defined: may need to put this goto
746 on the fixup list. */
747 else if (! expand_fixup (body, label, last_insn))
749 /* No fixup needed. Record that the label is the target
750 of at least one goto that has no fixup. */
751 if (body != 0)
752 TREE_ADDRESSABLE (body) = 1;
755 emit_jump (label);
758 /* Generate if necessary a fixup for a goto
759 whose target label in tree structure (if any) is TREE_LABEL
760 and whose target in rtl is RTL_LABEL.
762 If LAST_INSN is nonzero, we pretend that the jump appears
763 after insn LAST_INSN instead of at the current point in the insn stream.
765 The fixup will be used later to insert insns just before the goto.
766 Those insns will restore the stack level as appropriate for the
767 target label, and will (in the case of C++) also invoke any object
768 destructors which have to be invoked when we exit the scopes which
769 are exited by the goto.
771 Value is nonzero if a fixup is made. */
773 static int
774 expand_fixup (tree_label, rtl_label, last_insn)
775 tree tree_label;
776 rtx rtl_label;
777 rtx last_insn;
779 struct nesting *block, *end_block;
781 /* See if we can recognize which block the label will be output in.
782 This is possible in some very common cases.
783 If we succeed, set END_BLOCK to that block.
784 Otherwise, set it to 0. */
786 if (cond_stack
787 && (rtl_label == cond_stack->data.cond.endif_label
788 || rtl_label == cond_stack->data.cond.next_label))
789 end_block = cond_stack;
790 /* If we are in a loop, recognize certain labels which
791 are likely targets. This reduces the number of fixups
792 we need to create. */
793 else if (loop_stack
794 && (rtl_label == loop_stack->data.loop.start_label
795 || rtl_label == loop_stack->data.loop.end_label
796 || rtl_label == loop_stack->data.loop.continue_label))
797 end_block = loop_stack;
798 else
799 end_block = 0;
801 /* Now set END_BLOCK to the binding level to which we will return. */
803 if (end_block)
805 struct nesting *next_block = end_block->all;
806 block = block_stack;
808 /* First see if the END_BLOCK is inside the innermost binding level.
809 If so, then no cleanups or stack levels are relevant. */
810 while (next_block && next_block != block)
811 next_block = next_block->all;
813 if (next_block)
814 return 0;
816 /* Otherwise, set END_BLOCK to the innermost binding level
817 which is outside the relevant control-structure nesting. */
818 next_block = block_stack->next;
819 for (block = block_stack; block != end_block; block = block->all)
820 if (block == next_block)
821 next_block = next_block->next;
822 end_block = next_block;
825 /* Does any containing block have a stack level or cleanups?
826 If not, no fixup is needed, and that is the normal case
827 (the only case, for standard C). */
828 for (block = block_stack; block != end_block; block = block->next)
829 if (block->data.block.stack_level != 0
830 || block->data.block.cleanups != 0)
831 break;
833 if (block != end_block)
835 /* Ok, a fixup is needed. Add a fixup to the list of such. */
836 struct goto_fixup *fixup
837 = (struct goto_fixup *) oballoc (sizeof (struct goto_fixup));
838 /* In case an old stack level is restored, make sure that comes
839 after any pending stack adjust. */
840 /* ?? If the fixup isn't to come at the present position,
841 doing the stack adjust here isn't useful. Doing it with our
842 settings at that location isn't useful either. Let's hope
843 someone does it! */
844 if (last_insn == 0)
845 do_pending_stack_adjust ();
846 fixup->target = tree_label;
847 fixup->target_rtl = rtl_label;
849 /* Create a BLOCK node and a corresponding matched set of
850 NOTE_INSN_BEGIN_BLOCK and NOTE_INSN_END_BLOCK notes at
851 this point. The notes will encapsulate any and all fixup
852 code which we might later insert at this point in the insn
853 stream. Also, the BLOCK node will be the parent (i.e. the
854 `SUPERBLOCK') of any other BLOCK nodes which we might create
855 later on when we are expanding the fixup code.
857 Note that optimization passes (including expand_end_loop)
858 might move the *_BLOCK notes away, so we use a NOTE_INSN_DELETED
859 as a placeholder. */
862 register rtx original_before_jump
863 = last_insn ? last_insn : get_last_insn ();
864 rtx start;
866 start_sequence ();
867 pushlevel (0);
868 start = emit_note (NULL_PTR, NOTE_INSN_BLOCK_BEG);
869 fixup->before_jump = emit_note (NULL_PTR, NOTE_INSN_DELETED);
870 last_block_end_note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_END);
871 fixup->context = poplevel (1, 0, 0); /* Create the BLOCK node now! */
872 end_sequence ();
873 emit_insns_after (start, original_before_jump);
876 fixup->block_start_count = current_block_start_count;
877 fixup->stack_level = 0;
878 fixup->cleanup_list_list
879 = ((block->data.block.outer_cleanups
880 || block->data.block.cleanups)
881 ? tree_cons (NULL_TREE, block->data.block.cleanups,
882 block->data.block.outer_cleanups)
883 : 0);
884 fixup->next = goto_fixup_chain;
885 goto_fixup_chain = fixup;
888 return block != 0;
893 /* Expand any needed fixups in the outputmost binding level of the
894 function. FIRST_INSN is the first insn in the function. */
896 void
897 expand_fixups (first_insn)
898 rtx first_insn;
900 fixup_gotos (NULL_PTR, NULL_RTX, NULL_TREE, first_insn, 0);
903 /* When exiting a binding contour, process all pending gotos requiring fixups.
904 THISBLOCK is the structure that describes the block being exited.
905 STACK_LEVEL is the rtx for the stack level to restore exiting this contour.
906 CLEANUP_LIST is a list of expressions to evaluate on exiting this contour.
907 FIRST_INSN is the insn that began this contour.
909 Gotos that jump out of this contour must restore the
910 stack level and do the cleanups before actually jumping.
912 DONT_JUMP_IN nonzero means report error there is a jump into this
913 contour from before the beginning of the contour.
914 This is also done if STACK_LEVEL is nonzero. */
916 static void
917 fixup_gotos (thisblock, stack_level, cleanup_list, first_insn, dont_jump_in)
918 struct nesting *thisblock;
919 rtx stack_level;
920 tree cleanup_list;
921 rtx first_insn;
922 int dont_jump_in;
924 register struct goto_fixup *f, *prev;
926 /* F is the fixup we are considering; PREV is the previous one. */
927 /* We run this loop in two passes so that cleanups of exited blocks
928 are run first, and blocks that are exited are marked so
929 afterwards. */
931 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
933 /* Test for a fixup that is inactive because it is already handled. */
934 if (f->before_jump == 0)
936 /* Delete inactive fixup from the chain, if that is easy to do. */
937 if (prev != 0)
938 prev->next = f->next;
940 /* Has this fixup's target label been defined?
941 If so, we can finalize it. */
942 else if (PREV_INSN (f->target_rtl) != 0)
944 register rtx cleanup_insns;
946 /* Get the first non-label after the label
947 this goto jumps to. If that's before this scope begins,
948 we don't have a jump into the scope. */
949 rtx after_label = f->target_rtl;
950 while (after_label != 0 && GET_CODE (after_label) == CODE_LABEL)
951 after_label = NEXT_INSN (after_label);
953 /* If this fixup jumped into this contour from before the beginning
954 of this contour, report an error. */
955 /* ??? Bug: this does not detect jumping in through intermediate
956 blocks that have stack levels or cleanups.
957 It detects only a problem with the innermost block
958 around the label. */
959 if (f->target != 0
960 && (dont_jump_in || stack_level || cleanup_list)
961 /* If AFTER_LABEL is 0, it means the jump goes to the end
962 of the rtl, which means it jumps into this scope. */
963 && (after_label == 0
964 || INSN_UID (first_insn) < INSN_UID (after_label))
965 && INSN_UID (first_insn) > INSN_UID (f->before_jump)
966 && ! DECL_ERROR_ISSUED (f->target))
968 error_with_decl (f->target,
969 "label `%s' used before containing binding contour");
970 /* Prevent multiple errors for one label. */
971 DECL_ERROR_ISSUED (f->target) = 1;
974 /* We will expand the cleanups into a sequence of their own and
975 then later on we will attach this new sequence to the insn
976 stream just ahead of the actual jump insn. */
978 start_sequence ();
980 /* Temporarily restore the lexical context where we will
981 logically be inserting the fixup code. We do this for the
982 sake of getting the debugging information right. */
984 pushlevel (0);
985 set_block (f->context);
987 /* Expand the cleanups for blocks this jump exits. */
988 if (f->cleanup_list_list)
990 tree lists;
991 for (lists = f->cleanup_list_list; lists; lists = TREE_CHAIN (lists))
992 /* Marked elements correspond to blocks that have been closed.
993 Do their cleanups. */
994 if (TREE_ADDRESSABLE (lists)
995 && TREE_VALUE (lists) != 0)
997 expand_cleanups (TREE_VALUE (lists), NULL_TREE, 1, 1);
998 /* Pop any pushes done in the cleanups,
999 in case function is about to return. */
1000 do_pending_stack_adjust ();
1004 /* Restore stack level for the biggest contour that this
1005 jump jumps out of. */
1006 if (f->stack_level)
1007 emit_stack_restore (SAVE_BLOCK, f->stack_level, f->before_jump);
1009 /* Finish up the sequence containing the insns which implement the
1010 necessary cleanups, and then attach that whole sequence to the
1011 insn stream just ahead of the actual jump insn. Attaching it
1012 at that point insures that any cleanups which are in fact
1013 implicit C++ object destructions (which must be executed upon
1014 leaving the block) appear (to the debugger) to be taking place
1015 in an area of the generated code where the object(s) being
1016 destructed are still "in scope". */
1018 cleanup_insns = get_insns ();
1019 poplevel (1, 0, 0);
1021 end_sequence ();
1022 emit_insns_after (cleanup_insns, f->before_jump);
1025 f->before_jump = 0;
1029 /* For any still-undefined labels, do the cleanups for this block now.
1030 We must do this now since items in the cleanup list may go out
1031 of scope when the block ends. */
1032 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1033 if (f->before_jump != 0
1034 && PREV_INSN (f->target_rtl) == 0
1035 /* Label has still not appeared. If we are exiting a block with
1036 a stack level to restore, that started before the fixup,
1037 mark this stack level as needing restoration
1038 when the fixup is later finalized. */
1039 && thisblock != 0
1040 /* Note: if THISBLOCK == 0 and we have a label that hasn't appeared, it
1041 means the label is undefined. That's erroneous, but possible. */
1042 && (thisblock->data.block.block_start_count
1043 <= f->block_start_count))
1045 tree lists = f->cleanup_list_list;
1046 rtx cleanup_insns;
1048 for (; lists; lists = TREE_CHAIN (lists))
1049 /* If the following elt. corresponds to our containing block
1050 then the elt. must be for this block. */
1051 if (TREE_CHAIN (lists) == thisblock->data.block.outer_cleanups)
1053 start_sequence ();
1054 pushlevel (0);
1055 set_block (f->context);
1056 expand_cleanups (TREE_VALUE (lists), NULL_TREE, 1, 1);
1057 do_pending_stack_adjust ();
1058 cleanup_insns = get_insns ();
1059 poplevel (1, 0, 0);
1060 end_sequence ();
1061 if (cleanup_insns != 0)
1062 f->before_jump
1063 = emit_insns_after (cleanup_insns, f->before_jump);
1065 f->cleanup_list_list = TREE_CHAIN (lists);
1068 if (stack_level)
1069 f->stack_level = stack_level;
1073 /* Return the number of times character C occurs in string S. */
1074 static int
1075 n_occurrences (c, s)
1076 int c;
1077 const char *s;
1079 int n = 0;
1080 while (*s)
1081 n += (*s++ == c);
1082 return n;
1085 /* Generate RTL for an asm statement (explicit assembler code).
1086 BODY is a STRING_CST node containing the assembler code text,
1087 or an ADDR_EXPR containing a STRING_CST. */
1089 void
1090 expand_asm (body)
1091 tree body;
1093 if (current_function_check_memory_usage)
1095 error ("`asm' cannot be used with `-fcheck-memory-usage'");
1096 return;
1099 if (TREE_CODE (body) == ADDR_EXPR)
1100 body = TREE_OPERAND (body, 0);
1102 emit_insn (gen_rtx_ASM_INPUT (VOIDmode,
1103 TREE_STRING_POINTER (body)));
1104 last_expr_type = 0;
1107 /* Generate RTL for an asm statement with arguments.
1108 STRING is the instruction template.
1109 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
1110 Each output or input has an expression in the TREE_VALUE and
1111 a constraint-string in the TREE_PURPOSE.
1112 CLOBBERS is a list of STRING_CST nodes each naming a hard register
1113 that is clobbered by this insn.
1115 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
1116 Some elements of OUTPUTS may be replaced with trees representing temporary
1117 values. The caller should copy those temporary values to the originally
1118 specified lvalues.
1120 VOL nonzero means the insn is volatile; don't optimize it. */
1122 void
1123 expand_asm_operands (string, outputs, inputs, clobbers, vol, filename, line)
1124 tree string, outputs, inputs, clobbers;
1125 int vol;
1126 char *filename;
1127 int line;
1129 rtvec argvec, constraints;
1130 rtx body;
1131 int ninputs = list_length (inputs);
1132 int noutputs = list_length (outputs);
1133 int ninout = 0;
1134 int nclobbers;
1135 tree tail;
1136 register int i;
1137 /* Vector of RTX's of evaluated output operands. */
1138 rtx *output_rtx = (rtx *) alloca (noutputs * sizeof (rtx));
1139 int *inout_opnum = (int *) alloca (noutputs * sizeof (int));
1140 rtx *real_output_rtx = (rtx *) alloca (noutputs * sizeof (rtx));
1141 enum machine_mode *inout_mode
1142 = (enum machine_mode *) alloca (noutputs * sizeof (enum machine_mode));
1143 /* The insn we have emitted. */
1144 rtx insn;
1146 /* An ASM with no outputs needs to be treated as volatile, for now. */
1147 if (noutputs == 0)
1148 vol = 1;
1150 if (current_function_check_memory_usage)
1152 error ("`asm' cannot be used with `-fcheck-memory-usage'");
1153 return;
1156 /* Count the number of meaningful clobbered registers, ignoring what
1157 we would ignore later. */
1158 nclobbers = 0;
1159 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1161 char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1162 i = decode_reg_name (regname);
1163 if (i >= 0 || i == -4)
1164 ++nclobbers;
1165 else if (i == -2)
1166 error ("unknown register name `%s' in `asm'", regname);
1169 last_expr_type = 0;
1171 /* Check that the number of alternatives is constant across all
1172 operands. */
1173 if (outputs || inputs)
1175 tree tmp = TREE_PURPOSE (outputs ? outputs : inputs);
1176 int nalternatives = n_occurrences (',', TREE_STRING_POINTER (tmp));
1177 tree next = inputs;
1179 if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
1181 error ("too many alternatives in `asm'");
1182 return;
1185 tmp = outputs;
1186 while (tmp)
1188 char *constraint = TREE_STRING_POINTER (TREE_PURPOSE (tmp));
1189 if (n_occurrences (',', constraint) != nalternatives)
1191 error ("operand constraints for `asm' differ in number of alternatives");
1192 return;
1194 if (TREE_CHAIN (tmp))
1195 tmp = TREE_CHAIN (tmp);
1196 else
1197 tmp = next, next = 0;
1201 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1203 tree val = TREE_VALUE (tail);
1204 tree type = TREE_TYPE (val);
1205 char *constraint;
1206 char *p;
1207 int c_len;
1208 int j;
1209 int is_inout = 0;
1210 int allows_reg = 0;
1211 int allows_mem = 0;
1213 /* If there's an erroneous arg, emit no insn. */
1214 if (TREE_TYPE (val) == error_mark_node)
1215 return;
1217 /* Make sure constraint has `=' and does not have `+'. Also, see
1218 if it allows any register. Be liberal on the latter test, since
1219 the worst that happens if we get it wrong is we issue an error
1220 message. */
1222 c_len = TREE_STRING_LENGTH (TREE_PURPOSE (tail)) - 1;
1223 constraint = TREE_STRING_POINTER (TREE_PURPOSE (tail));
1225 /* Allow the `=' or `+' to not be at the beginning of the string,
1226 since it wasn't explicitly documented that way, and there is a
1227 large body of code that puts it last. Swap the character to
1228 the front, so as not to uglify any place else. */
1229 switch (c_len)
1231 default:
1232 if ((p = strchr (constraint, '=')) != NULL)
1233 break;
1234 if ((p = strchr (constraint, '+')) != NULL)
1235 break;
1236 case 0:
1237 error ("output operand constraint lacks `='");
1238 return;
1241 if (p != constraint)
1243 j = *p;
1244 bcopy (constraint, constraint+1, p-constraint);
1245 *constraint = j;
1247 warning ("output constraint `%c' for operand %d is not at the beginning", j, i);
1250 is_inout = constraint[0] == '+';
1251 /* Replace '+' with '='. */
1252 constraint[0] = '=';
1253 /* Make sure we can specify the matching operand. */
1254 if (is_inout && i > 9)
1256 error ("output operand constraint %d contains `+'", i);
1257 return;
1260 for (j = 1; j < c_len; j++)
1261 switch (constraint[j])
1263 case '+':
1264 case '=':
1265 error ("operand constraint contains '+' or '=' at illegal position.");
1266 return;
1268 case '%':
1269 if (i + 1 == ninputs + noutputs)
1271 error ("`%%' constraint used with last operand");
1272 return;
1274 break;
1276 case '?': case '!': case '*': case '&':
1277 case 'E': case 'F': case 'G': case 'H':
1278 case 's': case 'i': case 'n':
1279 case 'I': case 'J': case 'K': case 'L': case 'M':
1280 case 'N': case 'O': case 'P': case ',':
1281 #ifdef EXTRA_CONSTRAINT
1282 case 'Q': case 'R': case 'S': case 'T': case 'U':
1283 #endif
1284 break;
1286 case '0': case '1': case '2': case '3': case '4':
1287 case '5': case '6': case '7': case '8': case '9':
1288 error ("matching constraint not valid in output operand");
1289 break;
1291 case 'V': case 'm': case 'o':
1292 allows_mem = 1;
1293 break;
1295 case '<': case '>':
1296 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
1297 excepting those that expand_call created. So match memory
1298 and hope. */
1299 allows_mem = 1;
1300 break;
1302 case 'g': case 'X':
1303 allows_reg = 1;
1304 allows_mem = 1;
1305 break;
1307 case 'p': case 'r':
1308 default:
1309 allows_reg = 1;
1310 break;
1313 /* If an output operand is not a decl or indirect ref and our constraint
1314 allows a register, make a temporary to act as an intermediate.
1315 Make the asm insn write into that, then our caller will copy it to
1316 the real output operand. Likewise for promoted variables. */
1318 real_output_rtx[i] = NULL_RTX;
1319 if ((TREE_CODE (val) == INDIRECT_REF
1320 && allows_mem)
1321 || (TREE_CODE_CLASS (TREE_CODE (val)) == 'd'
1322 && (allows_mem || GET_CODE (DECL_RTL (val)) == REG)
1323 && ! (GET_CODE (DECL_RTL (val)) == REG
1324 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
1325 || ! allows_reg
1326 || is_inout)
1328 if (! allows_reg)
1329 mark_addressable (TREE_VALUE (tail));
1331 output_rtx[i]
1332 = expand_expr (TREE_VALUE (tail), NULL_RTX, VOIDmode,
1333 EXPAND_MEMORY_USE_WO);
1335 if (! allows_reg && GET_CODE (output_rtx[i]) != MEM)
1336 error ("output number %d not directly addressable", i);
1337 if (! allows_mem && GET_CODE (output_rtx[i]) == MEM)
1339 real_output_rtx[i] = protect_from_queue (output_rtx[i], 1);
1340 output_rtx[i] = gen_reg_rtx (GET_MODE (output_rtx[i]));
1341 if (is_inout)
1342 emit_move_insn (output_rtx[i], real_output_rtx[i]);
1345 else
1347 output_rtx[i] = assign_temp (type, 0, 0, 0);
1348 TREE_VALUE (tail) = make_tree (type, output_rtx[i]);
1351 if (is_inout)
1353 inout_mode[ninout] = TYPE_MODE (TREE_TYPE (TREE_VALUE (tail)));
1354 inout_opnum[ninout++] = i;
1358 ninputs += ninout;
1359 if (ninputs + noutputs > MAX_RECOG_OPERANDS)
1361 error ("more than %d operands in `asm'", MAX_RECOG_OPERANDS);
1362 return;
1365 /* Make vectors for the expression-rtx and constraint strings. */
1367 argvec = rtvec_alloc (ninputs);
1368 constraints = rtvec_alloc (ninputs);
1370 body = gen_rtx_ASM_OPERANDS (VOIDmode,
1371 TREE_STRING_POINTER (string), "", 0, argvec,
1372 constraints, filename, line);
1374 MEM_VOLATILE_P (body) = vol;
1376 /* Eval the inputs and put them into ARGVEC.
1377 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
1379 i = 0;
1380 for (tail = inputs; tail; tail = TREE_CHAIN (tail))
1382 int j;
1383 int allows_reg = 0, allows_mem = 0;
1384 char *constraint, *orig_constraint;
1385 int c_len;
1386 rtx op;
1388 /* If there's an erroneous arg, emit no insn,
1389 because the ASM_INPUT would get VOIDmode
1390 and that could cause a crash in reload. */
1391 if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
1392 return;
1394 /* ??? Can this happen, and does the error message make any sense? */
1395 if (TREE_PURPOSE (tail) == NULL_TREE)
1397 error ("hard register `%s' listed as input operand to `asm'",
1398 TREE_STRING_POINTER (TREE_VALUE (tail)) );
1399 return;
1402 c_len = TREE_STRING_LENGTH (TREE_PURPOSE (tail)) - 1;
1403 constraint = TREE_STRING_POINTER (TREE_PURPOSE (tail));
1404 orig_constraint = constraint;
1406 /* Make sure constraint has neither `=', `+', nor '&'. */
1408 for (j = 0; j < c_len; j++)
1409 switch (constraint[j])
1411 case '+': case '=': case '&':
1412 if (constraint == orig_constraint)
1414 error ("input operand constraint contains `%c'", constraint[j]);
1415 return;
1417 break;
1419 case '%':
1420 if (constraint == orig_constraint
1421 && i + 1 == ninputs - ninout)
1423 error ("`%%' constraint used with last operand");
1424 return;
1426 break;
1428 case 'V': case 'm': case 'o':
1429 allows_mem = 1;
1430 break;
1432 case '<': case '>':
1433 case '?': case '!': case '*':
1434 case 'E': case 'F': case 'G': case 'H': case 'X':
1435 case 's': case 'i': case 'n':
1436 case 'I': case 'J': case 'K': case 'L': case 'M':
1437 case 'N': case 'O': case 'P': case ',':
1438 #ifdef EXTRA_CONSTRAINT
1439 case 'Q': case 'R': case 'S': case 'T': case 'U':
1440 #endif
1441 break;
1443 /* Whether or not a numeric constraint allows a register is
1444 decided by the matching constraint, and so there is no need
1445 to do anything special with them. We must handle them in
1446 the default case, so that we don't unnecessarily force
1447 operands to memory. */
1448 case '0': case '1': case '2': case '3': case '4':
1449 case '5': case '6': case '7': case '8': case '9':
1450 if (constraint[j] >= '0' + noutputs)
1452 error
1453 ("matching constraint references invalid operand number");
1454 return;
1457 /* Try and find the real constraint for this dup. */
1458 if ((j == 0 && c_len == 1)
1459 || (j == 1 && c_len == 2 && constraint[0] == '%'))
1461 tree o = outputs;
1462 for (j = constraint[j] - '0'; j > 0; --j)
1463 o = TREE_CHAIN (o);
1465 c_len = TREE_STRING_LENGTH (TREE_PURPOSE (o)) - 1;
1466 constraint = TREE_STRING_POINTER (TREE_PURPOSE (o));
1467 j = 0;
1468 break;
1471 /* ... fall through ... */
1473 case 'p': case 'r':
1474 default:
1475 allows_reg = 1;
1476 break;
1478 case 'g':
1479 allows_reg = 1;
1480 allows_mem = 1;
1481 break;
1484 if (! allows_reg && allows_mem)
1485 mark_addressable (TREE_VALUE (tail));
1487 op = expand_expr (TREE_VALUE (tail), NULL_RTX, VOIDmode, 0);
1489 if (asm_operand_ok (op, constraint) <= 0)
1491 if (allows_reg)
1492 op = force_reg (TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))), op);
1493 else if (!allows_mem)
1494 warning ("asm operand %d probably doesn't match constraints", i);
1495 else if (CONSTANT_P (op))
1496 op = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))),
1497 op);
1498 else if (GET_CODE (op) == REG
1499 || GET_CODE (op) == SUBREG
1500 || GET_CODE (op) == CONCAT)
1502 tree type = TREE_TYPE (TREE_VALUE (tail));
1503 rtx memloc = assign_temp (type, 1, 1, 1);
1505 emit_move_insn (memloc, op);
1506 op = memloc;
1508 else if (GET_CODE (op) == MEM && MEM_VOLATILE_P (op))
1509 /* We won't recognize volatile memory as available a
1510 memory_operand at this point. Ignore it. */
1512 else if (queued_subexp_p (op))
1514 else
1515 /* ??? Leave this only until we have experience with what
1516 happens in combine and elsewhere when constraints are
1517 not satisfied. */
1518 warning ("asm operand %d probably doesn't match constraints", i);
1520 XVECEXP (body, 3, i) = op;
1522 XVECEXP (body, 4, i) /* constraints */
1523 = gen_rtx_ASM_INPUT (TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))),
1524 orig_constraint);
1525 i++;
1528 /* Protect all the operands from the queue,
1529 now that they have all been evaluated. */
1531 for (i = 0; i < ninputs - ninout; i++)
1532 XVECEXP (body, 3, i) = protect_from_queue (XVECEXP (body, 3, i), 0);
1534 for (i = 0; i < noutputs; i++)
1535 output_rtx[i] = protect_from_queue (output_rtx[i], 1);
1537 /* For in-out operands, copy output rtx to input rtx. */
1538 for (i = 0; i < ninout; i++)
1540 static char match[9+1][2]
1541 = {"0", "1", "2", "3", "4", "5", "6", "7", "8", "9"};
1542 int j = inout_opnum[i];
1544 XVECEXP (body, 3, ninputs - ninout + i) /* argvec */
1545 = output_rtx[j];
1546 XVECEXP (body, 4, ninputs - ninout + i) /* constraints */
1547 = gen_rtx_ASM_INPUT (inout_mode[j], match[j]);
1550 /* Now, for each output, construct an rtx
1551 (set OUTPUT (asm_operands INSN OUTPUTNUMBER OUTPUTCONSTRAINT
1552 ARGVEC CONSTRAINTS))
1553 If there is more than one, put them inside a PARALLEL. */
1555 if (noutputs == 1 && nclobbers == 0)
1557 XSTR (body, 1) = TREE_STRING_POINTER (TREE_PURPOSE (outputs));
1558 insn = emit_insn (gen_rtx_SET (VOIDmode, output_rtx[0], body));
1560 else if (noutputs == 0 && nclobbers == 0)
1562 /* No output operands: put in a raw ASM_OPERANDS rtx. */
1563 insn = emit_insn (body);
1565 else
1567 rtx obody = body;
1568 int num = noutputs;
1569 if (num == 0) num = 1;
1570 body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
1572 /* For each output operand, store a SET. */
1574 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1576 XVECEXP (body, 0, i)
1577 = gen_rtx_SET (VOIDmode,
1578 output_rtx[i],
1579 gen_rtx_ASM_OPERANDS (VOIDmode,
1580 TREE_STRING_POINTER (string),
1581 TREE_STRING_POINTER (TREE_PURPOSE (tail)),
1582 i, argvec, constraints,
1583 filename, line));
1584 MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
1587 /* If there are no outputs (but there are some clobbers)
1588 store the bare ASM_OPERANDS into the PARALLEL. */
1590 if (i == 0)
1591 XVECEXP (body, 0, i++) = obody;
1593 /* Store (clobber REG) for each clobbered register specified. */
1595 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1597 char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1598 int j = decode_reg_name (regname);
1600 if (j < 0)
1602 if (j == -3) /* `cc', which is not a register */
1603 continue;
1605 if (j == -4) /* `memory', don't cache memory across asm */
1607 XVECEXP (body, 0, i++)
1608 = gen_rtx_CLOBBER (VOIDmode,
1609 gen_rtx_MEM (BLKmode,
1610 gen_rtx_SCRATCH (VOIDmode)));
1611 continue;
1614 /* Ignore unknown register, error already signaled. */
1615 continue;
1618 /* Use QImode since that's guaranteed to clobber just one reg. */
1619 XVECEXP (body, 0, i++)
1620 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (QImode, j));
1623 insn = emit_insn (body);
1626 /* For any outputs that needed reloading into registers, spill them
1627 back to where they belong. */
1628 for (i = 0; i < noutputs; ++i)
1629 if (real_output_rtx[i])
1630 emit_move_insn (real_output_rtx[i], output_rtx[i]);
1632 free_temp_slots ();
1635 /* Generate RTL to evaluate the expression EXP
1636 and remember it in case this is the VALUE in a ({... VALUE; }) constr. */
1638 void
1639 expand_expr_stmt (exp)
1640 tree exp;
1642 /* If -W, warn about statements with no side effects,
1643 except for an explicit cast to void (e.g. for assert()), and
1644 except inside a ({...}) where they may be useful. */
1645 if (expr_stmts_for_value == 0 && exp != error_mark_node)
1647 if (! TREE_SIDE_EFFECTS (exp) && (extra_warnings || warn_unused)
1648 && !(TREE_CODE (exp) == CONVERT_EXPR
1649 && TREE_TYPE (exp) == void_type_node))
1650 warning_with_file_and_line (emit_filename, emit_lineno,
1651 "statement with no effect");
1652 else if (warn_unused)
1653 warn_if_unused_value (exp);
1656 /* If EXP is of function type and we are expanding statements for
1657 value, convert it to pointer-to-function. */
1658 if (expr_stmts_for_value && TREE_CODE (TREE_TYPE (exp)) == FUNCTION_TYPE)
1659 exp = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (exp)), exp);
1661 last_expr_type = TREE_TYPE (exp);
1662 last_expr_value = expand_expr (exp,
1663 (expr_stmts_for_value
1664 ? NULL_RTX : const0_rtx),
1665 VOIDmode, 0);
1667 /* If all we do is reference a volatile value in memory,
1668 copy it to a register to be sure it is actually touched. */
1669 if (last_expr_value != 0 && GET_CODE (last_expr_value) == MEM
1670 && TREE_THIS_VOLATILE (exp))
1672 if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode)
1674 else if (TYPE_MODE (TREE_TYPE (exp)) != BLKmode)
1675 copy_to_reg (last_expr_value);
1676 else
1678 rtx lab = gen_label_rtx ();
1680 /* Compare the value with itself to reference it. */
1681 emit_cmp_and_jump_insns (last_expr_value, last_expr_value, EQ,
1682 expand_expr (TYPE_SIZE (last_expr_type),
1683 NULL_RTX, VOIDmode, 0),
1684 BLKmode, 0,
1685 TYPE_ALIGN (last_expr_type) / BITS_PER_UNIT,
1686 lab);
1687 emit_label (lab);
1691 /* If this expression is part of a ({...}) and is in memory, we may have
1692 to preserve temporaries. */
1693 preserve_temp_slots (last_expr_value);
1695 /* Free any temporaries used to evaluate this expression. Any temporary
1696 used as a result of this expression will already have been preserved
1697 above. */
1698 free_temp_slots ();
1700 emit_queue ();
1703 /* Warn if EXP contains any computations whose results are not used.
1704 Return 1 if a warning is printed; 0 otherwise. */
1707 warn_if_unused_value (exp)
1708 tree exp;
1710 if (TREE_USED (exp))
1711 return 0;
1713 switch (TREE_CODE (exp))
1715 case PREINCREMENT_EXPR:
1716 case POSTINCREMENT_EXPR:
1717 case PREDECREMENT_EXPR:
1718 case POSTDECREMENT_EXPR:
1719 case MODIFY_EXPR:
1720 case INIT_EXPR:
1721 case TARGET_EXPR:
1722 case CALL_EXPR:
1723 case METHOD_CALL_EXPR:
1724 case RTL_EXPR:
1725 case TRY_CATCH_EXPR:
1726 case WITH_CLEANUP_EXPR:
1727 case EXIT_EXPR:
1728 /* We don't warn about COND_EXPR because it may be a useful
1729 construct if either arm contains a side effect. */
1730 case COND_EXPR:
1731 return 0;
1733 case BIND_EXPR:
1734 /* For a binding, warn if no side effect within it. */
1735 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1737 case SAVE_EXPR:
1738 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1740 case TRUTH_ORIF_EXPR:
1741 case TRUTH_ANDIF_EXPR:
1742 /* In && or ||, warn if 2nd operand has no side effect. */
1743 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1745 case COMPOUND_EXPR:
1746 if (TREE_NO_UNUSED_WARNING (exp))
1747 return 0;
1748 if (warn_if_unused_value (TREE_OPERAND (exp, 0)))
1749 return 1;
1750 /* Let people do `(foo (), 0)' without a warning. */
1751 if (TREE_CONSTANT (TREE_OPERAND (exp, 1)))
1752 return 0;
1753 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1755 case NOP_EXPR:
1756 case CONVERT_EXPR:
1757 case NON_LVALUE_EXPR:
1758 /* Don't warn about values cast to void. */
1759 if (TREE_TYPE (exp) == void_type_node)
1760 return 0;
1761 /* Don't warn about conversions not explicit in the user's program. */
1762 if (TREE_NO_UNUSED_WARNING (exp))
1763 return 0;
1764 /* Assignment to a cast usually results in a cast of a modify.
1765 Don't complain about that. There can be an arbitrary number of
1766 casts before the modify, so we must loop until we find the first
1767 non-cast expression and then test to see if that is a modify. */
1769 tree tem = TREE_OPERAND (exp, 0);
1771 while (TREE_CODE (tem) == CONVERT_EXPR || TREE_CODE (tem) == NOP_EXPR)
1772 tem = TREE_OPERAND (tem, 0);
1774 if (TREE_CODE (tem) == MODIFY_EXPR || TREE_CODE (tem) == INIT_EXPR
1775 || TREE_CODE (tem) == CALL_EXPR)
1776 return 0;
1778 goto warn;
1780 case INDIRECT_REF:
1781 /* Don't warn about automatic dereferencing of references, since
1782 the user cannot control it. */
1783 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == REFERENCE_TYPE)
1784 return warn_if_unused_value (TREE_OPERAND (exp, 0));
1785 /* ... fall through ... */
1787 default:
1788 /* Referencing a volatile value is a side effect, so don't warn. */
1789 if ((TREE_CODE_CLASS (TREE_CODE (exp)) == 'd'
1790 || TREE_CODE_CLASS (TREE_CODE (exp)) == 'r')
1791 && TREE_THIS_VOLATILE (exp))
1792 return 0;
1793 warn:
1794 warning_with_file_and_line (emit_filename, emit_lineno,
1795 "value computed is not used");
1796 return 1;
1800 /* Clear out the memory of the last expression evaluated. */
1802 void
1803 clear_last_expr ()
1805 last_expr_type = 0;
1808 /* Begin a statement which will return a value.
1809 Return the RTL_EXPR for this statement expr.
1810 The caller must save that value and pass it to expand_end_stmt_expr. */
1812 tree
1813 expand_start_stmt_expr ()
1815 int momentary;
1816 tree t;
1818 /* Make the RTL_EXPR node temporary, not momentary,
1819 so that rtl_expr_chain doesn't become garbage. */
1820 momentary = suspend_momentary ();
1821 t = make_node (RTL_EXPR);
1822 resume_momentary (momentary);
1823 do_pending_stack_adjust ();
1824 start_sequence_for_rtl_expr (t);
1825 NO_DEFER_POP;
1826 expr_stmts_for_value++;
1827 return t;
1830 /* Restore the previous state at the end of a statement that returns a value.
1831 Returns a tree node representing the statement's value and the
1832 insns to compute the value.
1834 The nodes of that expression have been freed by now, so we cannot use them.
1835 But we don't want to do that anyway; the expression has already been
1836 evaluated and now we just want to use the value. So generate a RTL_EXPR
1837 with the proper type and RTL value.
1839 If the last substatement was not an expression,
1840 return something with type `void'. */
1842 tree
1843 expand_end_stmt_expr (t)
1844 tree t;
1846 OK_DEFER_POP;
1848 if (last_expr_type == 0)
1850 last_expr_type = void_type_node;
1851 last_expr_value = const0_rtx;
1853 else if (last_expr_value == 0)
1854 /* There are some cases where this can happen, such as when the
1855 statement is void type. */
1856 last_expr_value = const0_rtx;
1857 else if (GET_CODE (last_expr_value) != REG && ! CONSTANT_P (last_expr_value))
1858 /* Remove any possible QUEUED. */
1859 last_expr_value = protect_from_queue (last_expr_value, 0);
1861 emit_queue ();
1863 TREE_TYPE (t) = last_expr_type;
1864 RTL_EXPR_RTL (t) = last_expr_value;
1865 RTL_EXPR_SEQUENCE (t) = get_insns ();
1867 rtl_expr_chain = tree_cons (NULL_TREE, t, rtl_expr_chain);
1869 end_sequence ();
1871 /* Don't consider deleting this expr or containing exprs at tree level. */
1872 TREE_SIDE_EFFECTS (t) = 1;
1873 /* Propagate volatility of the actual RTL expr. */
1874 TREE_THIS_VOLATILE (t) = volatile_refs_p (last_expr_value);
1876 last_expr_type = 0;
1877 expr_stmts_for_value--;
1879 return t;
1882 /* Generate RTL for the start of an if-then. COND is the expression
1883 whose truth should be tested.
1885 If EXITFLAG is nonzero, this conditional is visible to
1886 `exit_something'. */
1888 void
1889 expand_start_cond (cond, exitflag)
1890 tree cond;
1891 int exitflag;
1893 struct nesting *thiscond = ALLOC_NESTING ();
1895 /* Make an entry on cond_stack for the cond we are entering. */
1897 thiscond->next = cond_stack;
1898 thiscond->all = nesting_stack;
1899 thiscond->depth = ++nesting_depth;
1900 thiscond->data.cond.next_label = gen_label_rtx ();
1901 /* Before we encounter an `else', we don't need a separate exit label
1902 unless there are supposed to be exit statements
1903 to exit this conditional. */
1904 thiscond->exit_label = exitflag ? gen_label_rtx () : 0;
1905 thiscond->data.cond.endif_label = thiscond->exit_label;
1906 cond_stack = thiscond;
1907 nesting_stack = thiscond;
1909 do_jump (cond, thiscond->data.cond.next_label, NULL_RTX);
1912 /* Generate RTL between then-clause and the elseif-clause
1913 of an if-then-elseif-.... */
1915 void
1916 expand_start_elseif (cond)
1917 tree cond;
1919 if (cond_stack->data.cond.endif_label == 0)
1920 cond_stack->data.cond.endif_label = gen_label_rtx ();
1921 emit_jump (cond_stack->data.cond.endif_label);
1922 emit_label (cond_stack->data.cond.next_label);
1923 cond_stack->data.cond.next_label = gen_label_rtx ();
1924 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
1927 /* Generate RTL between the then-clause and the else-clause
1928 of an if-then-else. */
1930 void
1931 expand_start_else ()
1933 if (cond_stack->data.cond.endif_label == 0)
1934 cond_stack->data.cond.endif_label = gen_label_rtx ();
1936 emit_jump (cond_stack->data.cond.endif_label);
1937 emit_label (cond_stack->data.cond.next_label);
1938 cond_stack->data.cond.next_label = 0; /* No more _else or _elseif calls. */
1941 /* After calling expand_start_else, turn this "else" into an "else if"
1942 by providing another condition. */
1944 void
1945 expand_elseif (cond)
1946 tree cond;
1948 cond_stack->data.cond.next_label = gen_label_rtx ();
1949 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
1952 /* Generate RTL for the end of an if-then.
1953 Pop the record for it off of cond_stack. */
1955 void
1956 expand_end_cond ()
1958 struct nesting *thiscond = cond_stack;
1960 do_pending_stack_adjust ();
1961 if (thiscond->data.cond.next_label)
1962 emit_label (thiscond->data.cond.next_label);
1963 if (thiscond->data.cond.endif_label)
1964 emit_label (thiscond->data.cond.endif_label);
1966 POPSTACK (cond_stack);
1967 last_expr_type = 0;
1972 /* Generate RTL for the start of a loop. EXIT_FLAG is nonzero if this
1973 loop should be exited by `exit_something'. This is a loop for which
1974 `expand_continue' will jump to the top of the loop.
1976 Make an entry on loop_stack to record the labels associated with
1977 this loop. */
1979 struct nesting *
1980 expand_start_loop (exit_flag)
1981 int exit_flag;
1983 register struct nesting *thisloop = ALLOC_NESTING ();
1985 /* Make an entry on loop_stack for the loop we are entering. */
1987 thisloop->next = loop_stack;
1988 thisloop->all = nesting_stack;
1989 thisloop->depth = ++nesting_depth;
1990 thisloop->data.loop.start_label = gen_label_rtx ();
1991 thisloop->data.loop.end_label = gen_label_rtx ();
1992 thisloop->data.loop.alt_end_label = 0;
1993 thisloop->data.loop.continue_label = thisloop->data.loop.start_label;
1994 thisloop->exit_label = exit_flag ? thisloop->data.loop.end_label : 0;
1995 loop_stack = thisloop;
1996 nesting_stack = thisloop;
1998 do_pending_stack_adjust ();
1999 emit_queue ();
2000 emit_note (NULL_PTR, NOTE_INSN_LOOP_BEG);
2001 emit_label (thisloop->data.loop.start_label);
2003 return thisloop;
2006 /* Like expand_start_loop but for a loop where the continuation point
2007 (for expand_continue_loop) will be specified explicitly. */
2009 struct nesting *
2010 expand_start_loop_continue_elsewhere (exit_flag)
2011 int exit_flag;
2013 struct nesting *thisloop = expand_start_loop (exit_flag);
2014 loop_stack->data.loop.continue_label = gen_label_rtx ();
2015 return thisloop;
2018 /* Specify the continuation point for a loop started with
2019 expand_start_loop_continue_elsewhere.
2020 Use this at the point in the code to which a continue statement
2021 should jump. */
2023 void
2024 expand_loop_continue_here ()
2026 do_pending_stack_adjust ();
2027 emit_note (NULL_PTR, NOTE_INSN_LOOP_CONT);
2028 emit_label (loop_stack->data.loop.continue_label);
2031 /* Finish a loop. Generate a jump back to the top and the loop-exit label.
2032 Pop the block off of loop_stack. */
2034 void
2035 expand_end_loop ()
2037 rtx start_label = loop_stack->data.loop.start_label;
2038 rtx insn = get_last_insn ();
2039 int needs_end_jump = 1;
2041 /* Mark the continue-point at the top of the loop if none elsewhere. */
2042 if (start_label == loop_stack->data.loop.continue_label)
2043 emit_note_before (NOTE_INSN_LOOP_CONT, start_label);
2045 do_pending_stack_adjust ();
2047 /* If optimizing, perhaps reorder the loop.
2048 First, try to use a condjump near the end.
2049 expand_exit_loop_if_false ends loops with unconditional jumps,
2050 like this:
2052 if (test) goto label;
2053 optional: cleanup
2054 goto loop_stack->data.loop.end_label
2055 barrier
2056 label:
2058 If we find such a pattern, we can end the loop earlier. */
2060 if (optimize
2061 && GET_CODE (insn) == CODE_LABEL
2062 && LABEL_NAME (insn) == NULL
2063 && GET_CODE (PREV_INSN (insn)) == BARRIER)
2065 rtx label = insn;
2066 rtx jump = PREV_INSN (PREV_INSN (label));
2068 if (GET_CODE (jump) == JUMP_INSN
2069 && GET_CODE (PATTERN (jump)) == SET
2070 && SET_DEST (PATTERN (jump)) == pc_rtx
2071 && GET_CODE (SET_SRC (PATTERN (jump))) == LABEL_REF
2072 && (XEXP (SET_SRC (PATTERN (jump)), 0)
2073 == loop_stack->data.loop.end_label))
2075 rtx prev;
2077 /* The test might be complex and reference LABEL multiple times,
2078 like the loop in loop_iterations to set vtop. To handle this,
2079 we move LABEL. */
2080 insn = PREV_INSN (label);
2081 reorder_insns (label, label, start_label);
2083 for (prev = PREV_INSN (jump); ; prev = PREV_INSN (prev))
2085 /* We ignore line number notes, but if we see any other note,
2086 in particular NOTE_INSN_BLOCK_*, NOTE_INSN_EH_REGION_*,
2087 NOTE_INSN_LOOP_*, we disable this optimization. */
2088 if (GET_CODE (prev) == NOTE)
2090 if (NOTE_LINE_NUMBER (prev) < 0)
2091 break;
2092 continue;
2094 if (GET_CODE (prev) == CODE_LABEL)
2095 break;
2096 if (GET_CODE (prev) == JUMP_INSN)
2098 if (GET_CODE (PATTERN (prev)) == SET
2099 && SET_DEST (PATTERN (prev)) == pc_rtx
2100 && GET_CODE (SET_SRC (PATTERN (prev))) == IF_THEN_ELSE
2101 && (GET_CODE (XEXP (SET_SRC (PATTERN (prev)), 1))
2102 == LABEL_REF)
2103 && XEXP (XEXP (SET_SRC (PATTERN (prev)), 1), 0) == label)
2105 XEXP (XEXP (SET_SRC (PATTERN (prev)), 1), 0)
2106 = start_label;
2107 emit_note_after (NOTE_INSN_LOOP_END, prev);
2108 needs_end_jump = 0;
2110 break;
2116 /* If the loop starts with a loop exit, roll that to the end where
2117 it will optimize together with the jump back.
2119 We look for the conditional branch to the exit, except that once
2120 we find such a branch, we don't look past 30 instructions.
2122 In more detail, if the loop presently looks like this (in pseudo-C):
2124 start_label:
2125 if (test) goto end_label;
2126 body;
2127 goto start_label;
2128 end_label:
2130 transform it to look like:
2132 goto start_label;
2133 newstart_label:
2134 body;
2135 start_label:
2136 if (test) goto end_label;
2137 goto newstart_label;
2138 end_label:
2140 Here, the `test' may actually consist of some reasonably complex
2141 code, terminating in a test. */
2143 if (optimize
2144 && needs_end_jump
2146 ! (GET_CODE (insn) == JUMP_INSN
2147 && GET_CODE (PATTERN (insn)) == SET
2148 && SET_DEST (PATTERN (insn)) == pc_rtx
2149 && GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE))
2151 int eh_regions = 0;
2152 int num_insns = 0;
2153 rtx last_test_insn = NULL_RTX;
2155 /* Scan insns from the top of the loop looking for a qualified
2156 conditional exit. */
2157 for (insn = NEXT_INSN (loop_stack->data.loop.start_label); insn;
2158 insn = NEXT_INSN (insn))
2160 if (GET_CODE (insn) == NOTE)
2162 if (optimize < 2
2163 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2164 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
2165 /* The code that actually moves the exit test will
2166 carefully leave BLOCK notes in their original
2167 location. That means, however, that we can't debug
2168 the exit test itself. So, we refuse to move code
2169 containing BLOCK notes at low optimization levels. */
2170 break;
2172 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
2173 ++eh_regions;
2174 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END)
2176 --eh_regions;
2177 if (eh_regions < 0)
2178 /* We've come to the end of an EH region, but
2179 never saw the beginning of that region. That
2180 means that an EH region begins before the top
2181 of the loop, and ends in the middle of it. The
2182 existence of such a situation violates a basic
2183 assumption in this code, since that would imply
2184 that even when EH_REGIONS is zero, we might
2185 move code out of an exception region. */
2186 abort ();
2189 /* We must not walk into a nested loop. */
2190 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
2191 break;
2193 /* We already know this INSN is a NOTE, so there's no
2194 point in looking at it to see if it's a JUMP. */
2195 continue;
2198 if (GET_CODE (insn) == JUMP_INSN || GET_CODE (insn) == INSN)
2199 num_insns++;
2201 if (last_test_insn && num_insns > 30)
2202 break;
2204 if (eh_regions > 0)
2205 /* We don't want to move a partial EH region. Consider:
2207 while ( ( { try {
2208 if (cond ()) 0;
2209 else {
2210 bar();
2213 } catch (...) {
2215 } )) {
2216 body;
2219 This isn't legal C++, but here's what it's supposed to
2220 mean: if cond() is true, stop looping. Otherwise,
2221 call bar, and keep looping. In addition, if cond
2222 throws an exception, catch it and keep looping. Such
2223 constructs are certainy legal in LISP.
2225 We should not move the `if (cond()) 0' test since then
2226 the EH-region for the try-block would be broken up.
2227 (In this case we would the EH_BEG note for the `try'
2228 and `if cond()' but not the call to bar() or the
2229 EH_END note.)
2231 So we don't look for tests within an EH region. */
2232 continue;
2234 if (GET_CODE (insn) == JUMP_INSN
2235 && GET_CODE (PATTERN (insn)) == SET
2236 && SET_DEST (PATTERN (insn)) == pc_rtx)
2238 /* This is indeed a jump. */
2239 rtx dest1 = NULL_RTX;
2240 rtx dest2 = NULL_RTX;
2241 rtx potential_last_test;
2242 if (GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE)
2244 /* A conditional jump. */
2245 dest1 = XEXP (SET_SRC (PATTERN (insn)), 1);
2246 dest2 = XEXP (SET_SRC (PATTERN (insn)), 2);
2247 potential_last_test = insn;
2249 else
2251 /* An unconditional jump. */
2252 dest1 = SET_SRC (PATTERN (insn));
2253 /* Include the BARRIER after the JUMP. */
2254 potential_last_test = NEXT_INSN (insn);
2257 do {
2258 if (dest1 && GET_CODE (dest1) == LABEL_REF
2259 && ((XEXP (dest1, 0)
2260 == loop_stack->data.loop.alt_end_label)
2261 || (XEXP (dest1, 0)
2262 == loop_stack->data.loop.end_label)))
2264 last_test_insn = potential_last_test;
2265 break;
2268 /* If this was a conditional jump, there may be
2269 another label at which we should look. */
2270 dest1 = dest2;
2271 dest2 = NULL_RTX;
2272 } while (dest1);
2276 if (last_test_insn != 0 && last_test_insn != get_last_insn ())
2278 /* We found one. Move everything from there up
2279 to the end of the loop, and add a jump into the loop
2280 to jump to there. */
2281 register rtx newstart_label = gen_label_rtx ();
2282 register rtx start_move = start_label;
2283 rtx next_insn;
2285 /* If the start label is preceded by a NOTE_INSN_LOOP_CONT note,
2286 then we want to move this note also. */
2287 if (GET_CODE (PREV_INSN (start_move)) == NOTE
2288 && (NOTE_LINE_NUMBER (PREV_INSN (start_move))
2289 == NOTE_INSN_LOOP_CONT))
2290 start_move = PREV_INSN (start_move);
2292 emit_label_after (newstart_label, PREV_INSN (start_move));
2294 /* Actually move the insns. Start at the beginning, and
2295 keep copying insns until we've copied the
2296 last_test_insn. */
2297 for (insn = start_move; insn; insn = next_insn)
2299 /* Figure out which insn comes after this one. We have
2300 to do this before we move INSN. */
2301 if (insn == last_test_insn)
2302 /* We've moved all the insns. */
2303 next_insn = NULL_RTX;
2304 else
2305 next_insn = NEXT_INSN (insn);
2307 if (GET_CODE (insn) == NOTE
2308 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2309 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
2310 /* We don't want to move NOTE_INSN_BLOCK_BEGs or
2311 NOTE_INSN_BLOCK_ENDs because the correct generation
2312 of debugging information depends on these appearing
2313 in the same order in the RTL and in the tree
2314 structure, where they are represented as BLOCKs.
2315 So, we don't move block notes. Of course, moving
2316 the code inside the block is likely to make it
2317 impossible to debug the instructions in the exit
2318 test, but such is the price of optimization. */
2319 continue;
2321 /* Move the INSN. */
2322 reorder_insns (insn, insn, get_last_insn ());
2325 emit_jump_insn_after (gen_jump (start_label),
2326 PREV_INSN (newstart_label));
2327 emit_barrier_after (PREV_INSN (newstart_label));
2328 start_label = newstart_label;
2332 if (needs_end_jump)
2334 emit_jump (start_label);
2335 emit_note (NULL_PTR, NOTE_INSN_LOOP_END);
2337 emit_label (loop_stack->data.loop.end_label);
2339 POPSTACK (loop_stack);
2341 last_expr_type = 0;
2344 /* Generate a jump to the current loop's continue-point.
2345 This is usually the top of the loop, but may be specified
2346 explicitly elsewhere. If not currently inside a loop,
2347 return 0 and do nothing; caller will print an error message. */
2350 expand_continue_loop (whichloop)
2351 struct nesting *whichloop;
2353 last_expr_type = 0;
2354 if (whichloop == 0)
2355 whichloop = loop_stack;
2356 if (whichloop == 0)
2357 return 0;
2358 expand_goto_internal (NULL_TREE, whichloop->data.loop.continue_label,
2359 NULL_RTX);
2360 return 1;
2363 /* Generate a jump to exit the current loop. If not currently inside a loop,
2364 return 0 and do nothing; caller will print an error message. */
2367 expand_exit_loop (whichloop)
2368 struct nesting *whichloop;
2370 last_expr_type = 0;
2371 if (whichloop == 0)
2372 whichloop = loop_stack;
2373 if (whichloop == 0)
2374 return 0;
2375 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label, NULL_RTX);
2376 return 1;
2379 /* Generate a conditional jump to exit the current loop if COND
2380 evaluates to zero. If not currently inside a loop,
2381 return 0 and do nothing; caller will print an error message. */
2384 expand_exit_loop_if_false (whichloop, cond)
2385 struct nesting *whichloop;
2386 tree cond;
2388 rtx label = gen_label_rtx ();
2389 rtx last_insn;
2390 last_expr_type = 0;
2392 if (whichloop == 0)
2393 whichloop = loop_stack;
2394 if (whichloop == 0)
2395 return 0;
2396 /* In order to handle fixups, we actually create a conditional jump
2397 around a unconditional branch to exit the loop. If fixups are
2398 necessary, they go before the unconditional branch. */
2401 do_jump (cond, NULL_RTX, label);
2402 last_insn = get_last_insn ();
2403 if (GET_CODE (last_insn) == CODE_LABEL)
2404 whichloop->data.loop.alt_end_label = last_insn;
2405 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label,
2406 NULL_RTX);
2407 emit_label (label);
2409 return 1;
2412 /* Return nonzero if the loop nest is empty. Else return zero. */
2415 stmt_loop_nest_empty ()
2417 return (loop_stack == NULL);
2420 /* Return non-zero if we should preserve sub-expressions as separate
2421 pseudos. We never do so if we aren't optimizing. We always do so
2422 if -fexpensive-optimizations.
2424 Otherwise, we only do so if we are in the "early" part of a loop. I.e.,
2425 the loop may still be a small one. */
2428 preserve_subexpressions_p ()
2430 rtx insn;
2432 if (flag_expensive_optimizations)
2433 return 1;
2435 if (optimize == 0 || current_function == 0 || loop_stack == 0)
2436 return 0;
2438 insn = get_last_insn_anywhere ();
2440 return (insn
2441 && (INSN_UID (insn) - INSN_UID (loop_stack->data.loop.start_label)
2442 < n_non_fixed_regs * 3));
2446 /* Generate a jump to exit the current loop, conditional, binding contour
2447 or case statement. Not all such constructs are visible to this function,
2448 only those started with EXIT_FLAG nonzero. Individual languages use
2449 the EXIT_FLAG parameter to control which kinds of constructs you can
2450 exit this way.
2452 If not currently inside anything that can be exited,
2453 return 0 and do nothing; caller will print an error message. */
2456 expand_exit_something ()
2458 struct nesting *n;
2459 last_expr_type = 0;
2460 for (n = nesting_stack; n; n = n->all)
2461 if (n->exit_label != 0)
2463 expand_goto_internal (NULL_TREE, n->exit_label, NULL_RTX);
2464 return 1;
2467 return 0;
2470 /* Generate RTL to return from the current function, with no value.
2471 (That is, we do not do anything about returning any value.) */
2473 void
2474 expand_null_return ()
2476 struct nesting *block = block_stack;
2477 rtx last_insn = 0;
2479 /* Does any pending block have cleanups? */
2481 while (block && block->data.block.cleanups == 0)
2482 block = block->next;
2484 /* If yes, use a goto to return, since that runs cleanups. */
2486 expand_null_return_1 (last_insn, block != 0);
2489 /* Generate RTL to return from the current function, with value VAL. */
2491 static void
2492 expand_value_return (val)
2493 rtx val;
2495 struct nesting *block = block_stack;
2496 rtx last_insn = get_last_insn ();
2497 rtx return_reg = DECL_RTL (DECL_RESULT (current_function_decl));
2499 /* Copy the value to the return location
2500 unless it's already there. */
2502 if (return_reg != val)
2504 #ifdef PROMOTE_FUNCTION_RETURN
2505 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
2506 int unsignedp = TREE_UNSIGNED (type);
2507 enum machine_mode mode
2508 = promote_mode (type, DECL_MODE (DECL_RESULT (current_function_decl)),
2509 &unsignedp, 1);
2511 if (GET_MODE (val) != VOIDmode && GET_MODE (val) != mode)
2512 convert_move (return_reg, val, unsignedp);
2513 else
2514 #endif
2515 emit_move_insn (return_reg, val);
2517 if (GET_CODE (return_reg) == REG
2518 && REGNO (return_reg) < FIRST_PSEUDO_REGISTER)
2519 emit_insn (gen_rtx_USE (VOIDmode, return_reg));
2520 /* Handle calls that return values in multiple non-contiguous locations.
2521 The Irix 6 ABI has examples of this. */
2522 else if (GET_CODE (return_reg) == PARALLEL)
2524 int i;
2526 for (i = 0; i < XVECLEN (return_reg, 0); i++)
2528 rtx x = XEXP (XVECEXP (return_reg, 0, i), 0);
2530 if (GET_CODE (x) == REG
2531 && REGNO (x) < FIRST_PSEUDO_REGISTER)
2532 emit_insn (gen_rtx_USE (VOIDmode, x));
2536 /* Does any pending block have cleanups? */
2538 while (block && block->data.block.cleanups == 0)
2539 block = block->next;
2541 /* If yes, use a goto to return, since that runs cleanups.
2542 Use LAST_INSN to put cleanups *before* the move insn emitted above. */
2544 expand_null_return_1 (last_insn, block != 0);
2547 /* Output a return with no value. If LAST_INSN is nonzero,
2548 pretend that the return takes place after LAST_INSN.
2549 If USE_GOTO is nonzero then don't use a return instruction;
2550 go to the return label instead. This causes any cleanups
2551 of pending blocks to be executed normally. */
2553 static void
2554 expand_null_return_1 (last_insn, use_goto)
2555 rtx last_insn;
2556 int use_goto;
2558 rtx end_label = cleanup_label ? cleanup_label : return_label;
2560 clear_pending_stack_adjust ();
2561 do_pending_stack_adjust ();
2562 last_expr_type = 0;
2564 /* PCC-struct return always uses an epilogue. */
2565 if (current_function_returns_pcc_struct || use_goto)
2567 if (end_label == 0)
2568 end_label = return_label = gen_label_rtx ();
2569 expand_goto_internal (NULL_TREE, end_label, last_insn);
2570 return;
2573 /* Otherwise output a simple return-insn if one is available,
2574 unless it won't do the job. */
2575 #ifdef HAVE_return
2576 if (HAVE_return && use_goto == 0 && cleanup_label == 0)
2578 emit_jump_insn (gen_return ());
2579 emit_barrier ();
2580 return;
2582 #endif
2584 /* Otherwise jump to the epilogue. */
2585 expand_goto_internal (NULL_TREE, end_label, last_insn);
2588 /* Generate RTL to evaluate the expression RETVAL and return it
2589 from the current function. */
2591 void
2592 expand_return (retval)
2593 tree retval;
2595 /* If there are any cleanups to be performed, then they will
2596 be inserted following LAST_INSN. It is desirable
2597 that the last_insn, for such purposes, should be the
2598 last insn before computing the return value. Otherwise, cleanups
2599 which call functions can clobber the return value. */
2600 /* ??? rms: I think that is erroneous, because in C++ it would
2601 run destructors on variables that might be used in the subsequent
2602 computation of the return value. */
2603 rtx last_insn = 0;
2604 register rtx val = 0;
2605 register rtx op0;
2606 tree retval_rhs;
2607 int cleanups;
2609 /* If function wants no value, give it none. */
2610 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
2612 expand_expr (retval, NULL_RTX, VOIDmode, 0);
2613 emit_queue ();
2614 expand_null_return ();
2615 return;
2618 /* Are any cleanups needed? E.g. C++ destructors to be run? */
2619 /* This is not sufficient. We also need to watch for cleanups of the
2620 expression we are about to expand. Unfortunately, we cannot know
2621 if it has cleanups until we expand it, and we want to change how we
2622 expand it depending upon if we need cleanups. We can't win. */
2623 #if 0
2624 cleanups = any_pending_cleanups (1);
2625 #else
2626 cleanups = 1;
2627 #endif
2629 if (TREE_CODE (retval) == RESULT_DECL)
2630 retval_rhs = retval;
2631 else if ((TREE_CODE (retval) == MODIFY_EXPR || TREE_CODE (retval) == INIT_EXPR)
2632 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
2633 retval_rhs = TREE_OPERAND (retval, 1);
2634 else if (TREE_TYPE (retval) == void_type_node)
2635 /* Recognize tail-recursive call to void function. */
2636 retval_rhs = retval;
2637 else
2638 retval_rhs = NULL_TREE;
2640 /* Only use `last_insn' if there are cleanups which must be run. */
2641 if (cleanups || cleanup_label != 0)
2642 last_insn = get_last_insn ();
2644 /* Distribute return down conditional expr if either of the sides
2645 may involve tail recursion (see test below). This enhances the number
2646 of tail recursions we see. Don't do this always since it can produce
2647 sub-optimal code in some cases and we distribute assignments into
2648 conditional expressions when it would help. */
2650 if (optimize && retval_rhs != 0
2651 && frame_offset == 0
2652 && TREE_CODE (retval_rhs) == COND_EXPR
2653 && (TREE_CODE (TREE_OPERAND (retval_rhs, 1)) == CALL_EXPR
2654 || TREE_CODE (TREE_OPERAND (retval_rhs, 2)) == CALL_EXPR))
2656 rtx label = gen_label_rtx ();
2657 tree expr;
2659 do_jump (TREE_OPERAND (retval_rhs, 0), label, NULL_RTX);
2660 start_cleanup_deferral ();
2661 expr = build (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (current_function_decl)),
2662 DECL_RESULT (current_function_decl),
2663 TREE_OPERAND (retval_rhs, 1));
2664 TREE_SIDE_EFFECTS (expr) = 1;
2665 expand_return (expr);
2666 emit_label (label);
2668 expr = build (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (current_function_decl)),
2669 DECL_RESULT (current_function_decl),
2670 TREE_OPERAND (retval_rhs, 2));
2671 TREE_SIDE_EFFECTS (expr) = 1;
2672 expand_return (expr);
2673 end_cleanup_deferral ();
2674 return;
2677 /* Attempt to optimize the call if it is tail recursive. */
2678 if (optimize_tail_recursion (retval_rhs, last_insn))
2679 return;
2681 #ifdef HAVE_return
2682 /* This optimization is safe if there are local cleanups
2683 because expand_null_return takes care of them.
2684 ??? I think it should also be safe when there is a cleanup label,
2685 because expand_null_return takes care of them, too.
2686 Any reason why not? */
2687 if (HAVE_return && cleanup_label == 0
2688 && ! current_function_returns_pcc_struct
2689 && BRANCH_COST <= 1)
2691 /* If this is return x == y; then generate
2692 if (x == y) return 1; else return 0;
2693 if we can do it with explicit return insns and branches are cheap,
2694 but not if we have the corresponding scc insn. */
2695 int has_scc = 0;
2696 if (retval_rhs)
2697 switch (TREE_CODE (retval_rhs))
2699 case EQ_EXPR:
2700 #ifdef HAVE_seq
2701 has_scc = HAVE_seq;
2702 #endif
2703 case NE_EXPR:
2704 #ifdef HAVE_sne
2705 has_scc = HAVE_sne;
2706 #endif
2707 case GT_EXPR:
2708 #ifdef HAVE_sgt
2709 has_scc = HAVE_sgt;
2710 #endif
2711 case GE_EXPR:
2712 #ifdef HAVE_sge
2713 has_scc = HAVE_sge;
2714 #endif
2715 case LT_EXPR:
2716 #ifdef HAVE_slt
2717 has_scc = HAVE_slt;
2718 #endif
2719 case LE_EXPR:
2720 #ifdef HAVE_sle
2721 has_scc = HAVE_sle;
2722 #endif
2723 case TRUTH_ANDIF_EXPR:
2724 case TRUTH_ORIF_EXPR:
2725 case TRUTH_AND_EXPR:
2726 case TRUTH_OR_EXPR:
2727 case TRUTH_NOT_EXPR:
2728 case TRUTH_XOR_EXPR:
2729 if (! has_scc)
2731 op0 = gen_label_rtx ();
2732 jumpifnot (retval_rhs, op0);
2733 expand_value_return (const1_rtx);
2734 emit_label (op0);
2735 expand_value_return (const0_rtx);
2736 return;
2738 break;
2740 default:
2741 break;
2744 #endif /* HAVE_return */
2746 /* If the result is an aggregate that is being returned in one (or more)
2747 registers, load the registers here. The compiler currently can't handle
2748 copying a BLKmode value into registers. We could put this code in a
2749 more general area (for use by everyone instead of just function
2750 call/return), but until this feature is generally usable it is kept here
2751 (and in expand_call). The value must go into a pseudo in case there
2752 are cleanups that will clobber the real return register. */
2754 if (retval_rhs != 0
2755 && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
2756 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG)
2758 int i, bitpos, xbitpos;
2759 int big_endian_correction = 0;
2760 int bytes = int_size_in_bytes (TREE_TYPE (retval_rhs));
2761 int n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
2762 int bitsize = MIN (TYPE_ALIGN (TREE_TYPE (retval_rhs)),
2763 (unsigned int)BITS_PER_WORD);
2764 rtx *result_pseudos = (rtx *) alloca (sizeof (rtx) * n_regs);
2765 rtx result_reg, src = NULL_RTX, dst = NULL_RTX;
2766 rtx result_val = expand_expr (retval_rhs, NULL_RTX, VOIDmode, 0);
2767 enum machine_mode tmpmode, result_reg_mode;
2769 /* Structures whose size is not a multiple of a word are aligned
2770 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2771 machine, this means we must skip the empty high order bytes when
2772 calculating the bit offset. */
2773 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2774 big_endian_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2775 * BITS_PER_UNIT));
2777 /* Copy the structure BITSIZE bits at a time. */
2778 for (bitpos = 0, xbitpos = big_endian_correction;
2779 bitpos < bytes * BITS_PER_UNIT;
2780 bitpos += bitsize, xbitpos += bitsize)
2782 /* We need a new destination pseudo each time xbitpos is
2783 on a word boundary and when xbitpos == big_endian_correction
2784 (the first time through). */
2785 if (xbitpos % BITS_PER_WORD == 0
2786 || xbitpos == big_endian_correction)
2788 /* Generate an appropriate register. */
2789 dst = gen_reg_rtx (word_mode);
2790 result_pseudos[xbitpos / BITS_PER_WORD] = dst;
2792 /* Clobber the destination before we move anything into it. */
2793 emit_insn (gen_rtx_CLOBBER (VOIDmode, dst));
2796 /* We need a new source operand each time bitpos is on a word
2797 boundary. */
2798 if (bitpos % BITS_PER_WORD == 0)
2799 src = operand_subword_force (result_val,
2800 bitpos / BITS_PER_WORD,
2801 BLKmode);
2803 /* Use bitpos for the source extraction (left justified) and
2804 xbitpos for the destination store (right justified). */
2805 store_bit_field (dst, bitsize, xbitpos % BITS_PER_WORD, word_mode,
2806 extract_bit_field (src, bitsize,
2807 bitpos % BITS_PER_WORD, 1,
2808 NULL_RTX, word_mode,
2809 word_mode,
2810 bitsize / BITS_PER_UNIT,
2811 BITS_PER_WORD),
2812 bitsize / BITS_PER_UNIT, BITS_PER_WORD);
2815 /* Find the smallest integer mode large enough to hold the
2816 entire structure and use that mode instead of BLKmode
2817 on the USE insn for the return register. */
2818 bytes = int_size_in_bytes (TREE_TYPE (retval_rhs));
2819 for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2820 tmpmode != MAX_MACHINE_MODE;
2821 tmpmode = GET_MODE_WIDER_MODE (tmpmode))
2823 /* Have we found a large enough mode? */
2824 if (GET_MODE_SIZE (tmpmode) >= bytes)
2825 break;
2828 /* No suitable mode found. */
2829 if (tmpmode == MAX_MACHINE_MODE)
2830 abort ();
2832 PUT_MODE (DECL_RTL (DECL_RESULT (current_function_decl)), tmpmode);
2834 if (GET_MODE_SIZE (tmpmode) < GET_MODE_SIZE (word_mode))
2835 result_reg_mode = word_mode;
2836 else
2837 result_reg_mode = tmpmode;
2838 result_reg = gen_reg_rtx (result_reg_mode);
2840 emit_queue ();
2841 for (i = 0; i < n_regs; i++)
2842 emit_move_insn (operand_subword (result_reg, i, 0, result_reg_mode),
2843 result_pseudos[i]);
2845 if (tmpmode != result_reg_mode)
2846 result_reg = gen_lowpart (tmpmode, result_reg);
2848 expand_value_return (result_reg);
2850 else if (cleanups
2851 && retval_rhs != 0
2852 && TREE_TYPE (retval_rhs) != void_type_node
2853 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG)
2855 /* Calculate the return value into a pseudo reg. */
2856 val = gen_reg_rtx (DECL_MODE (DECL_RESULT (current_function_decl)));
2857 val = expand_expr (retval_rhs, val, GET_MODE (val), 0);
2858 val = force_not_mem (val);
2859 emit_queue ();
2860 /* Return the calculated value, doing cleanups first. */
2861 expand_value_return (val);
2863 else
2865 /* No cleanups or no hard reg used;
2866 calculate value into hard return reg. */
2867 expand_expr (retval, const0_rtx, VOIDmode, 0);
2868 emit_queue ();
2869 expand_value_return (DECL_RTL (DECL_RESULT (current_function_decl)));
2873 /* Return 1 if the end of the generated RTX is not a barrier.
2874 This means code already compiled can drop through. */
2877 drop_through_at_end_p ()
2879 rtx insn = get_last_insn ();
2880 while (insn && GET_CODE (insn) == NOTE)
2881 insn = PREV_INSN (insn);
2882 return insn && GET_CODE (insn) != BARRIER;
2885 /* Test CALL_EXPR to determine if it is a potential tail recursion call
2886 and emit code to optimize the tail recursion. LAST_INSN indicates where
2887 to place the jump to the tail recursion label. Return TRUE if the
2888 call was optimized into a goto.
2890 This is only used by expand_return, but expand_call is expected to
2891 use it soon. */
2894 optimize_tail_recursion (call_expr, last_insn)
2895 tree call_expr;
2896 rtx last_insn;
2898 /* For tail-recursive call to current function,
2899 just jump back to the beginning.
2900 It's unsafe if any auto variable in this function
2901 has its address taken; for simplicity,
2902 require stack frame to be empty. */
2903 if (optimize && call_expr != 0
2904 && frame_offset == 0
2905 && TREE_CODE (call_expr) == CALL_EXPR
2906 && TREE_CODE (TREE_OPERAND (call_expr, 0)) == ADDR_EXPR
2907 && TREE_OPERAND (TREE_OPERAND (call_expr, 0), 0) == current_function_decl
2908 /* Finish checking validity, and if valid emit code
2909 to set the argument variables for the new call. */
2910 && tail_recursion_args (TREE_OPERAND (call_expr, 1),
2911 DECL_ARGUMENTS (current_function_decl)))
2913 if (tail_recursion_label == 0)
2915 tail_recursion_label = gen_label_rtx ();
2916 emit_label_after (tail_recursion_label,
2917 tail_recursion_reentry);
2919 emit_queue ();
2920 expand_goto_internal (NULL_TREE, tail_recursion_label, last_insn);
2921 emit_barrier ();
2922 return 1;
2925 return 0;
2928 /* Emit code to alter this function's formal parms for a tail-recursive call.
2929 ACTUALS is a list of actual parameter expressions (chain of TREE_LISTs).
2930 FORMALS is the chain of decls of formals.
2931 Return 1 if this can be done;
2932 otherwise return 0 and do not emit any code. */
2934 static int
2935 tail_recursion_args (actuals, formals)
2936 tree actuals, formals;
2938 register tree a = actuals, f = formals;
2939 register int i;
2940 register rtx *argvec;
2942 /* Check that number and types of actuals are compatible
2943 with the formals. This is not always true in valid C code.
2944 Also check that no formal needs to be addressable
2945 and that all formals are scalars. */
2947 /* Also count the args. */
2949 for (a = actuals, f = formals, i = 0; a && f; a = TREE_CHAIN (a), f = TREE_CHAIN (f), i++)
2951 if (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_VALUE (a)))
2952 != TYPE_MAIN_VARIANT (TREE_TYPE (f)))
2953 return 0;
2954 if (GET_CODE (DECL_RTL (f)) != REG || DECL_MODE (f) == BLKmode)
2955 return 0;
2957 if (a != 0 || f != 0)
2958 return 0;
2960 /* Compute all the actuals. */
2962 argvec = (rtx *) alloca (i * sizeof (rtx));
2964 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
2965 argvec[i] = expand_expr (TREE_VALUE (a), NULL_RTX, VOIDmode, 0);
2967 /* Find which actual values refer to current values of previous formals.
2968 Copy each of them now, before any formal is changed. */
2970 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
2972 int copy = 0;
2973 register int j;
2974 for (f = formals, j = 0; j < i; f = TREE_CHAIN (f), j++)
2975 if (reg_mentioned_p (DECL_RTL (f), argvec[i]))
2976 { copy = 1; break; }
2977 if (copy)
2978 argvec[i] = copy_to_reg (argvec[i]);
2981 /* Store the values of the actuals into the formals. */
2983 for (f = formals, a = actuals, i = 0; f;
2984 f = TREE_CHAIN (f), a = TREE_CHAIN (a), i++)
2986 if (GET_MODE (DECL_RTL (f)) == GET_MODE (argvec[i]))
2987 emit_move_insn (DECL_RTL (f), argvec[i]);
2988 else
2989 convert_move (DECL_RTL (f), argvec[i],
2990 TREE_UNSIGNED (TREE_TYPE (TREE_VALUE (a))));
2993 free_temp_slots ();
2994 return 1;
2997 /* Generate the RTL code for entering a binding contour.
2998 The variables are declared one by one, by calls to `expand_decl'.
3000 EXIT_FLAG is nonzero if this construct should be visible to
3001 `exit_something'. */
3003 void
3004 expand_start_bindings (exit_flag)
3005 int exit_flag;
3007 struct nesting *thisblock = ALLOC_NESTING ();
3008 rtx note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_BEG);
3010 /* Make an entry on block_stack for the block we are entering. */
3012 thisblock->next = block_stack;
3013 thisblock->all = nesting_stack;
3014 thisblock->depth = ++nesting_depth;
3015 thisblock->data.block.stack_level = 0;
3016 thisblock->data.block.cleanups = 0;
3017 thisblock->data.block.n_function_calls = 0;
3018 thisblock->data.block.exception_region = 0;
3019 thisblock->data.block.block_target_temp_slot_level = target_temp_slot_level;
3021 thisblock->data.block.conditional_code = 0;
3022 thisblock->data.block.last_unconditional_cleanup = note;
3023 thisblock->data.block.cleanup_ptr = &thisblock->data.block.cleanups;
3025 if (block_stack
3026 && !(block_stack->data.block.cleanups == NULL_TREE
3027 && block_stack->data.block.outer_cleanups == NULL_TREE))
3028 thisblock->data.block.outer_cleanups
3029 = tree_cons (NULL_TREE, block_stack->data.block.cleanups,
3030 block_stack->data.block.outer_cleanups);
3031 else
3032 thisblock->data.block.outer_cleanups = 0;
3033 thisblock->data.block.label_chain = 0;
3034 thisblock->data.block.innermost_stack_block = stack_block_stack;
3035 thisblock->data.block.first_insn = note;
3036 thisblock->data.block.block_start_count = ++current_block_start_count;
3037 thisblock->exit_label = exit_flag ? gen_label_rtx () : 0;
3038 block_stack = thisblock;
3039 nesting_stack = thisblock;
3041 /* Make a new level for allocating stack slots. */
3042 push_temp_slots ();
3045 /* Specify the scope of temporaries created by TARGET_EXPRs. Similar
3046 to CLEANUP_POINT_EXPR, but handles cases when a series of calls to
3047 expand_expr are made. After we end the region, we know that all
3048 space for all temporaries that were created by TARGET_EXPRs will be
3049 destroyed and their space freed for reuse. */
3051 void
3052 expand_start_target_temps ()
3054 /* This is so that even if the result is preserved, the space
3055 allocated will be freed, as we know that it is no longer in use. */
3056 push_temp_slots ();
3058 /* Start a new binding layer that will keep track of all cleanup
3059 actions to be performed. */
3060 expand_start_bindings (0);
3062 target_temp_slot_level = temp_slot_level;
3065 void
3066 expand_end_target_temps ()
3068 expand_end_bindings (NULL_TREE, 0, 0);
3070 /* This is so that even if the result is preserved, the space
3071 allocated will be freed, as we know that it is no longer in use. */
3072 pop_temp_slots ();
3075 /* Mark top block of block_stack as an implicit binding for an
3076 exception region. This is used to prevent infinite recursion when
3077 ending a binding with expand_end_bindings. It is only ever called
3078 by expand_eh_region_start, as that it the only way to create a
3079 block stack for a exception region. */
3081 void
3082 mark_block_as_eh_region ()
3084 block_stack->data.block.exception_region = 1;
3085 if (block_stack->next
3086 && block_stack->next->data.block.conditional_code)
3088 block_stack->data.block.conditional_code
3089 = block_stack->next->data.block.conditional_code;
3090 block_stack->data.block.last_unconditional_cleanup
3091 = block_stack->next->data.block.last_unconditional_cleanup;
3092 block_stack->data.block.cleanup_ptr
3093 = block_stack->next->data.block.cleanup_ptr;
3097 /* True if we are currently emitting insns in an area of output code
3098 that is controlled by a conditional expression. This is used by
3099 the cleanup handling code to generate conditional cleanup actions. */
3102 conditional_context ()
3104 return block_stack && block_stack->data.block.conditional_code;
3107 /* Mark top block of block_stack as not for an implicit binding for an
3108 exception region. This is only ever done by expand_eh_region_end
3109 to let expand_end_bindings know that it is being called explicitly
3110 to end the binding layer for just the binding layer associated with
3111 the exception region, otherwise expand_end_bindings would try and
3112 end all implicit binding layers for exceptions regions, and then
3113 one normal binding layer. */
3115 void
3116 mark_block_as_not_eh_region ()
3118 block_stack->data.block.exception_region = 0;
3121 /* True if the top block of block_stack was marked as for an exception
3122 region by mark_block_as_eh_region. */
3125 is_eh_region ()
3127 return block_stack && block_stack->data.block.exception_region;
3130 /* Given a pointer to a BLOCK node, save a pointer to the most recently
3131 generated NOTE_INSN_BLOCK_END in the BLOCK_END_NOTE field of the given
3132 BLOCK node. */
3134 void
3135 remember_end_note (block)
3136 register tree block;
3138 BLOCK_END_NOTE (block) = last_block_end_note;
3139 last_block_end_note = NULL_RTX;
3142 /* Emit a handler label for a nonlocal goto handler.
3143 Also emit code to store the handler label in SLOT before BEFORE_INSN. */
3145 static rtx
3146 expand_nl_handler_label (slot, before_insn)
3147 rtx slot, before_insn;
3149 rtx insns;
3150 rtx handler_label = gen_label_rtx ();
3152 /* Don't let jump_optimize delete the handler. */
3153 LABEL_PRESERVE_P (handler_label) = 1;
3155 start_sequence ();
3156 emit_move_insn (slot, gen_rtx_LABEL_REF (Pmode, handler_label));
3157 insns = get_insns ();
3158 end_sequence ();
3159 emit_insns_before (insns, before_insn);
3161 emit_label (handler_label);
3163 return handler_label;
3166 /* Emit code to restore vital registers at the beginning of a nonlocal goto
3167 handler. */
3168 static void
3169 expand_nl_goto_receiver ()
3171 #ifdef HAVE_nonlocal_goto
3172 if (! HAVE_nonlocal_goto)
3173 #endif
3174 /* First adjust our frame pointer to its actual value. It was
3175 previously set to the start of the virtual area corresponding to
3176 the stacked variables when we branched here and now needs to be
3177 adjusted to the actual hardware fp value.
3179 Assignments are to virtual registers are converted by
3180 instantiate_virtual_regs into the corresponding assignment
3181 to the underlying register (fp in this case) that makes
3182 the original assignment true.
3183 So the following insn will actually be
3184 decrementing fp by STARTING_FRAME_OFFSET. */
3185 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
3187 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3188 if (fixed_regs[ARG_POINTER_REGNUM])
3190 #ifdef ELIMINABLE_REGS
3191 /* If the argument pointer can be eliminated in favor of the
3192 frame pointer, we don't need to restore it. We assume here
3193 that if such an elimination is present, it can always be used.
3194 This is the case on all known machines; if we don't make this
3195 assumption, we do unnecessary saving on many machines. */
3196 static struct elims {int from, to;} elim_regs[] = ELIMINABLE_REGS;
3197 size_t i;
3199 for (i = 0; i < sizeof elim_regs / sizeof elim_regs[0]; i++)
3200 if (elim_regs[i].from == ARG_POINTER_REGNUM
3201 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
3202 break;
3204 if (i == sizeof elim_regs / sizeof elim_regs [0])
3205 #endif
3207 /* Now restore our arg pointer from the address at which it
3208 was saved in our stack frame.
3209 If there hasn't be space allocated for it yet, make
3210 some now. */
3211 if (arg_pointer_save_area == 0)
3212 arg_pointer_save_area
3213 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
3214 emit_move_insn (virtual_incoming_args_rtx,
3215 /* We need a pseudo here, or else
3216 instantiate_virtual_regs_1 complains. */
3217 copy_to_reg (arg_pointer_save_area));
3220 #endif
3222 #ifdef HAVE_nonlocal_goto_receiver
3223 if (HAVE_nonlocal_goto_receiver)
3224 emit_insn (gen_nonlocal_goto_receiver ());
3225 #endif
3228 /* Make handlers for nonlocal gotos taking place in the function calls in
3229 block THISBLOCK. */
3231 static void
3232 expand_nl_goto_receivers (thisblock)
3233 struct nesting *thisblock;
3235 tree link;
3236 rtx afterward = gen_label_rtx ();
3237 rtx insns, slot;
3238 rtx label_list;
3239 int any_invalid;
3241 /* Record the handler address in the stack slot for that purpose,
3242 during this block, saving and restoring the outer value. */
3243 if (thisblock->next != 0)
3244 for (slot = nonlocal_goto_handler_slots; slot; slot = XEXP (slot, 1))
3246 rtx save_receiver = gen_reg_rtx (Pmode);
3247 emit_move_insn (XEXP (slot, 0), save_receiver);
3249 start_sequence ();
3250 emit_move_insn (save_receiver, XEXP (slot, 0));
3251 insns = get_insns ();
3252 end_sequence ();
3253 emit_insns_before (insns, thisblock->data.block.first_insn);
3256 /* Jump around the handlers; they run only when specially invoked. */
3257 emit_jump (afterward);
3259 /* Make a separate handler for each label. */
3260 link = nonlocal_labels;
3261 slot = nonlocal_goto_handler_slots;
3262 label_list = NULL_RTX;
3263 for (; link; link = TREE_CHAIN (link), slot = XEXP (slot, 1))
3264 /* Skip any labels we shouldn't be able to jump to from here,
3265 we generate one special handler for all of them below which just calls
3266 abort. */
3267 if (! DECL_TOO_LATE (TREE_VALUE (link)))
3269 rtx lab;
3270 lab = expand_nl_handler_label (XEXP (slot, 0),
3271 thisblock->data.block.first_insn);
3272 label_list = gen_rtx_EXPR_LIST (VOIDmode, lab, label_list);
3274 expand_nl_goto_receiver ();
3276 /* Jump to the "real" nonlocal label. */
3277 expand_goto (TREE_VALUE (link));
3280 /* A second pass over all nonlocal labels; this time we handle those
3281 we should not be able to jump to at this point. */
3282 link = nonlocal_labels;
3283 slot = nonlocal_goto_handler_slots;
3284 any_invalid = 0;
3285 for (; link; link = TREE_CHAIN (link), slot = XEXP (slot, 1))
3286 if (DECL_TOO_LATE (TREE_VALUE (link)))
3288 rtx lab;
3289 lab = expand_nl_handler_label (XEXP (slot, 0),
3290 thisblock->data.block.first_insn);
3291 label_list = gen_rtx_EXPR_LIST (VOIDmode, lab, label_list);
3292 any_invalid = 1;
3295 if (any_invalid)
3297 expand_nl_goto_receiver ();
3298 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "abort"), 0,
3299 VOIDmode, 0);
3300 emit_barrier ();
3303 nonlocal_goto_handler_labels = label_list;
3304 emit_label (afterward);
3307 /* Generate RTL code to terminate a binding contour.
3309 VARS is the chain of VAR_DECL nodes for the variables bound in this
3310 contour. There may actually be other nodes in this chain, but any
3311 nodes other than VAR_DECLS are ignored.
3313 MARK_ENDS is nonzero if we should put a note at the beginning
3314 and end of this binding contour.
3316 DONT_JUMP_IN is nonzero if it is not valid to jump into this contour.
3317 (That is true automatically if the contour has a saved stack level.) */
3319 void
3320 expand_end_bindings (vars, mark_ends, dont_jump_in)
3321 tree vars;
3322 int mark_ends;
3323 int dont_jump_in;
3325 register struct nesting *thisblock;
3326 register tree decl;
3328 while (block_stack->data.block.exception_region)
3330 /* Because we don't need or want a new temporary level and
3331 because we didn't create one in expand_eh_region_start,
3332 create a fake one now to avoid removing one in
3333 expand_end_bindings. */
3334 push_temp_slots ();
3336 block_stack->data.block.exception_region = 0;
3338 expand_end_bindings (NULL_TREE, 0, 0);
3341 /* Since expand_eh_region_start does an expand_start_bindings, we
3342 have to first end all the bindings that were created by
3343 expand_eh_region_start. */
3345 thisblock = block_stack;
3347 if (warn_unused)
3348 for (decl = vars; decl; decl = TREE_CHAIN (decl))
3349 if (TREE_CODE (decl) == VAR_DECL
3350 && ! TREE_USED (decl)
3351 && ! DECL_IN_SYSTEM_HEADER (decl)
3352 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
3353 warning_with_decl (decl, "unused variable `%s'");
3355 if (thisblock->exit_label)
3357 do_pending_stack_adjust ();
3358 emit_label (thisblock->exit_label);
3361 /* If necessary, make handlers for nonlocal gotos taking
3362 place in the function calls in this block. */
3363 if (function_call_count != thisblock->data.block.n_function_calls
3364 && nonlocal_labels
3365 /* Make handler for outermost block
3366 if there were any nonlocal gotos to this function. */
3367 && (thisblock->next == 0 ? current_function_has_nonlocal_label
3368 /* Make handler for inner block if it has something
3369 special to do when you jump out of it. */
3370 : (thisblock->data.block.cleanups != 0
3371 || thisblock->data.block.stack_level != 0)))
3372 expand_nl_goto_receivers (thisblock);
3374 /* Don't allow jumping into a block that has a stack level.
3375 Cleanups are allowed, though. */
3376 if (dont_jump_in
3377 || thisblock->data.block.stack_level != 0)
3379 struct label_chain *chain;
3381 /* Any labels in this block are no longer valid to go to.
3382 Mark them to cause an error message. */
3383 for (chain = thisblock->data.block.label_chain; chain; chain = chain->next)
3385 DECL_TOO_LATE (chain->label) = 1;
3386 /* If any goto without a fixup came to this label,
3387 that must be an error, because gotos without fixups
3388 come from outside all saved stack-levels. */
3389 if (TREE_ADDRESSABLE (chain->label))
3390 error_with_decl (chain->label,
3391 "label `%s' used before containing binding contour");
3395 /* Restore stack level in effect before the block
3396 (only if variable-size objects allocated). */
3397 /* Perform any cleanups associated with the block. */
3399 if (thisblock->data.block.stack_level != 0
3400 || thisblock->data.block.cleanups != 0)
3402 /* Only clean up here if this point can actually be reached. */
3403 int reachable = GET_CODE (get_last_insn ()) != BARRIER;
3405 /* Don't let cleanups affect ({...}) constructs. */
3406 int old_expr_stmts_for_value = expr_stmts_for_value;
3407 rtx old_last_expr_value = last_expr_value;
3408 tree old_last_expr_type = last_expr_type;
3409 expr_stmts_for_value = 0;
3411 /* Do the cleanups. */
3412 expand_cleanups (thisblock->data.block.cleanups, NULL_TREE, 0, reachable);
3413 if (reachable)
3414 do_pending_stack_adjust ();
3416 expr_stmts_for_value = old_expr_stmts_for_value;
3417 last_expr_value = old_last_expr_value;
3418 last_expr_type = old_last_expr_type;
3420 /* Restore the stack level. */
3422 if (reachable && thisblock->data.block.stack_level != 0)
3424 emit_stack_restore (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3425 thisblock->data.block.stack_level, NULL_RTX);
3426 if (nonlocal_goto_handler_slots != 0)
3427 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level,
3428 NULL_RTX);
3431 /* Any gotos out of this block must also do these things.
3432 Also report any gotos with fixups that came to labels in this
3433 level. */
3434 fixup_gotos (thisblock,
3435 thisblock->data.block.stack_level,
3436 thisblock->data.block.cleanups,
3437 thisblock->data.block.first_insn,
3438 dont_jump_in);
3441 /* Mark the beginning and end of the scope if requested.
3442 We do this now, after running cleanups on the variables
3443 just going out of scope, so they are in scope for their cleanups. */
3445 if (mark_ends)
3446 last_block_end_note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_END);
3447 else
3448 /* Get rid of the beginning-mark if we don't make an end-mark. */
3449 NOTE_LINE_NUMBER (thisblock->data.block.first_insn) = NOTE_INSN_DELETED;
3451 /* If doing stupid register allocation, make sure lives of all
3452 register variables declared here extend thru end of scope. */
3454 if (obey_regdecls)
3455 for (decl = vars; decl; decl = TREE_CHAIN (decl))
3456 if (TREE_CODE (decl) == VAR_DECL && DECL_RTL (decl))
3457 use_variable (DECL_RTL (decl));
3459 /* Restore the temporary level of TARGET_EXPRs. */
3460 target_temp_slot_level = thisblock->data.block.block_target_temp_slot_level;
3462 /* Restore block_stack level for containing block. */
3464 stack_block_stack = thisblock->data.block.innermost_stack_block;
3465 POPSTACK (block_stack);
3467 /* Pop the stack slot nesting and free any slots at this level. */
3468 pop_temp_slots ();
3471 /* Generate RTL for the automatic variable declaration DECL.
3472 (Other kinds of declarations are simply ignored if seen here.) */
3474 void
3475 expand_decl (decl)
3476 register tree decl;
3478 struct nesting *thisblock;
3479 tree type;
3481 type = TREE_TYPE (decl);
3483 /* Only automatic variables need any expansion done.
3484 Static and external variables, and external functions,
3485 will be handled by `assemble_variable' (called from finish_decl).
3486 TYPE_DECL and CONST_DECL require nothing.
3487 PARM_DECLs are handled in `assign_parms'. */
3489 if (TREE_CODE (decl) != VAR_DECL)
3490 return;
3491 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
3492 return;
3494 thisblock = block_stack;
3496 /* Create the RTL representation for the variable. */
3498 if (type == error_mark_node)
3499 DECL_RTL (decl) = gen_rtx_MEM (BLKmode, const0_rtx);
3500 else if (DECL_SIZE (decl) == 0)
3501 /* Variable with incomplete type. */
3503 if (DECL_INITIAL (decl) == 0)
3504 /* Error message was already done; now avoid a crash. */
3505 DECL_RTL (decl) = assign_stack_temp (DECL_MODE (decl), 0, 1);
3506 else
3507 /* An initializer is going to decide the size of this array.
3508 Until we know the size, represent its address with a reg. */
3509 DECL_RTL (decl) = gen_rtx_MEM (BLKmode, gen_reg_rtx (Pmode));
3510 MEM_SET_IN_STRUCT_P (DECL_RTL (decl), AGGREGATE_TYPE_P (type));
3512 else if (DECL_MODE (decl) != BLKmode
3513 /* If -ffloat-store, don't put explicit float vars
3514 into regs. */
3515 && !(flag_float_store
3516 && TREE_CODE (type) == REAL_TYPE)
3517 && ! TREE_THIS_VOLATILE (decl)
3518 && ! TREE_ADDRESSABLE (decl)
3519 && (DECL_REGISTER (decl) || ! obey_regdecls)
3520 /* if -fcheck-memory-usage, check all variables. */
3521 && ! current_function_check_memory_usage)
3523 /* Automatic variable that can go in a register. */
3524 int unsignedp = TREE_UNSIGNED (type);
3525 enum machine_mode reg_mode
3526 = promote_mode (type, DECL_MODE (decl), &unsignedp, 0);
3528 DECL_RTL (decl) = gen_reg_rtx (reg_mode);
3529 mark_user_reg (DECL_RTL (decl));
3531 if (POINTER_TYPE_P (type))
3532 mark_reg_pointer (DECL_RTL (decl),
3533 (TYPE_ALIGN (TREE_TYPE (TREE_TYPE (decl)))
3534 / BITS_PER_UNIT));
3537 else if (TREE_CODE (DECL_SIZE (decl)) == INTEGER_CST
3538 && ! (flag_stack_check && ! STACK_CHECK_BUILTIN
3539 && (TREE_INT_CST_HIGH (DECL_SIZE (decl)) != 0
3540 || (TREE_INT_CST_LOW (DECL_SIZE (decl))
3541 > STACK_CHECK_MAX_VAR_SIZE * BITS_PER_UNIT))))
3543 /* Variable of fixed size that goes on the stack. */
3544 rtx oldaddr = 0;
3545 rtx addr;
3547 /* If we previously made RTL for this decl, it must be an array
3548 whose size was determined by the initializer.
3549 The old address was a register; set that register now
3550 to the proper address. */
3551 if (DECL_RTL (decl) != 0)
3553 if (GET_CODE (DECL_RTL (decl)) != MEM
3554 || GET_CODE (XEXP (DECL_RTL (decl), 0)) != REG)
3555 abort ();
3556 oldaddr = XEXP (DECL_RTL (decl), 0);
3559 DECL_RTL (decl) = assign_temp (TREE_TYPE (decl), 1, 1, 1);
3560 MEM_SET_IN_STRUCT_P (DECL_RTL (decl),
3561 AGGREGATE_TYPE_P (TREE_TYPE (decl)));
3563 /* Set alignment we actually gave this decl. */
3564 DECL_ALIGN (decl) = (DECL_MODE (decl) == BLKmode ? BIGGEST_ALIGNMENT
3565 : GET_MODE_BITSIZE (DECL_MODE (decl)));
3567 if (oldaddr)
3569 addr = force_operand (XEXP (DECL_RTL (decl), 0), oldaddr);
3570 if (addr != oldaddr)
3571 emit_move_insn (oldaddr, addr);
3574 /* If this is a memory ref that contains aggregate components,
3575 mark it as such for cse and loop optimize. */
3576 MEM_SET_IN_STRUCT_P (DECL_RTL (decl),
3577 AGGREGATE_TYPE_P (TREE_TYPE (decl)));
3578 #if 0
3579 /* If this is in memory because of -ffloat-store,
3580 set the volatile bit, to prevent optimizations from
3581 undoing the effects. */
3582 if (flag_float_store && TREE_CODE (type) == REAL_TYPE)
3583 MEM_VOLATILE_P (DECL_RTL (decl)) = 1;
3584 #endif
3586 MEM_ALIAS_SET (DECL_RTL (decl)) = get_alias_set (decl);
3588 else
3589 /* Dynamic-size object: must push space on the stack. */
3591 rtx address, size;
3593 /* Record the stack pointer on entry to block, if have
3594 not already done so. */
3595 if (thisblock->data.block.stack_level == 0)
3597 do_pending_stack_adjust ();
3598 emit_stack_save (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3599 &thisblock->data.block.stack_level,
3600 thisblock->data.block.first_insn);
3601 stack_block_stack = thisblock;
3604 /* Compute the variable's size, in bytes. */
3605 size = expand_expr (size_binop (CEIL_DIV_EXPR,
3606 DECL_SIZE (decl),
3607 size_int (BITS_PER_UNIT)),
3608 NULL_RTX, VOIDmode, 0);
3609 free_temp_slots ();
3611 /* Allocate space on the stack for the variable. Note that
3612 DECL_ALIGN says how the variable is to be aligned and we
3613 cannot use it to conclude anything about the alignment of
3614 the size. */
3615 address = allocate_dynamic_stack_space (size, NULL_RTX,
3616 TYPE_ALIGN (TREE_TYPE (decl)));
3618 /* Reference the variable indirect through that rtx. */
3619 DECL_RTL (decl) = gen_rtx_MEM (DECL_MODE (decl), address);
3621 /* If this is a memory ref that contains aggregate components,
3622 mark it as such for cse and loop optimize. */
3623 MEM_SET_IN_STRUCT_P (DECL_RTL (decl),
3624 AGGREGATE_TYPE_P (TREE_TYPE (decl)));
3626 /* Indicate the alignment we actually gave this variable. */
3627 #ifdef STACK_BOUNDARY
3628 DECL_ALIGN (decl) = STACK_BOUNDARY;
3629 #else
3630 DECL_ALIGN (decl) = BIGGEST_ALIGNMENT;
3631 #endif
3634 if (TREE_THIS_VOLATILE (decl))
3635 MEM_VOLATILE_P (DECL_RTL (decl)) = 1;
3636 #if 0 /* A variable is not necessarily unchanging
3637 just because it is const. RTX_UNCHANGING_P
3638 means no change in the function,
3639 not merely no change in the variable's scope.
3640 It is correct to set RTX_UNCHANGING_P if the variable's scope
3641 is the whole function. There's no convenient way to test that. */
3642 if (TREE_READONLY (decl))
3643 RTX_UNCHANGING_P (DECL_RTL (decl)) = 1;
3644 #endif
3646 /* If doing stupid register allocation, make sure life of any
3647 register variable starts here, at the start of its scope. */
3649 if (obey_regdecls)
3650 use_variable (DECL_RTL (decl));
3655 /* Emit code to perform the initialization of a declaration DECL. */
3657 void
3658 expand_decl_init (decl)
3659 tree decl;
3661 int was_used = TREE_USED (decl);
3663 /* If this is a CONST_DECL, we don't have to generate any code, but
3664 if DECL_INITIAL is a constant, call expand_expr to force TREE_CST_RTL
3665 to be set while in the obstack containing the constant. If we don't
3666 do this, we can lose if we have functions nested three deep and the middle
3667 function makes a CONST_DECL whose DECL_INITIAL is a STRING_CST while
3668 the innermost function is the first to expand that STRING_CST. */
3669 if (TREE_CODE (decl) == CONST_DECL)
3671 if (DECL_INITIAL (decl) && TREE_CONSTANT (DECL_INITIAL (decl)))
3672 expand_expr (DECL_INITIAL (decl), NULL_RTX, VOIDmode,
3673 EXPAND_INITIALIZER);
3674 return;
3677 if (TREE_STATIC (decl))
3678 return;
3680 /* Compute and store the initial value now. */
3682 if (DECL_INITIAL (decl) == error_mark_node)
3684 enum tree_code code = TREE_CODE (TREE_TYPE (decl));
3686 if (code == INTEGER_TYPE || code == REAL_TYPE || code == ENUMERAL_TYPE
3687 || code == POINTER_TYPE || code == REFERENCE_TYPE)
3688 expand_assignment (decl, convert (TREE_TYPE (decl), integer_zero_node),
3689 0, 0);
3690 emit_queue ();
3692 else if (DECL_INITIAL (decl) && TREE_CODE (DECL_INITIAL (decl)) != TREE_LIST)
3694 emit_line_note (DECL_SOURCE_FILE (decl), DECL_SOURCE_LINE (decl));
3695 expand_assignment (decl, DECL_INITIAL (decl), 0, 0);
3696 emit_queue ();
3699 /* Don't let the initialization count as "using" the variable. */
3700 TREE_USED (decl) = was_used;
3702 /* Free any temporaries we made while initializing the decl. */
3703 preserve_temp_slots (NULL_RTX);
3704 free_temp_slots ();
3707 /* CLEANUP is an expression to be executed at exit from this binding contour;
3708 for example, in C++, it might call the destructor for this variable.
3710 We wrap CLEANUP in an UNSAVE_EXPR node, so that we can expand the
3711 CLEANUP multiple times, and have the correct semantics. This
3712 happens in exception handling, for gotos, returns, breaks that
3713 leave the current scope.
3715 If CLEANUP is nonzero and DECL is zero, we record a cleanup
3716 that is not associated with any particular variable. */
3719 expand_decl_cleanup (decl, cleanup)
3720 tree decl, cleanup;
3722 struct nesting *thisblock;
3724 /* Error if we are not in any block. */
3725 if (current_function == 0 || block_stack == 0)
3726 return 0;
3728 thisblock = block_stack;
3730 /* Record the cleanup if there is one. */
3732 if (cleanup != 0)
3734 tree t;
3735 rtx seq;
3736 tree *cleanups = &thisblock->data.block.cleanups;
3737 int cond_context = conditional_context ();
3739 if (cond_context)
3741 rtx flag = gen_reg_rtx (word_mode);
3742 rtx set_flag_0;
3743 tree cond;
3745 start_sequence ();
3746 emit_move_insn (flag, const0_rtx);
3747 set_flag_0 = get_insns ();
3748 end_sequence ();
3750 thisblock->data.block.last_unconditional_cleanup
3751 = emit_insns_after (set_flag_0,
3752 thisblock->data.block.last_unconditional_cleanup);
3754 emit_move_insn (flag, const1_rtx);
3756 /* All cleanups must be on the function_obstack. */
3757 push_obstacks_nochange ();
3758 resume_temporary_allocation ();
3760 cond = build_decl (VAR_DECL, NULL_TREE, type_for_mode (word_mode, 1));
3761 DECL_RTL (cond) = flag;
3763 /* Conditionalize the cleanup. */
3764 cleanup = build (COND_EXPR, void_type_node,
3765 truthvalue_conversion (cond),
3766 cleanup, integer_zero_node);
3767 cleanup = fold (cleanup);
3769 pop_obstacks ();
3771 cleanups = thisblock->data.block.cleanup_ptr;
3774 /* All cleanups must be on the function_obstack. */
3775 push_obstacks_nochange ();
3776 resume_temporary_allocation ();
3777 cleanup = unsave_expr (cleanup);
3778 pop_obstacks ();
3780 t = *cleanups = temp_tree_cons (decl, cleanup, *cleanups);
3782 if (! cond_context)
3783 /* If this block has a cleanup, it belongs in stack_block_stack. */
3784 stack_block_stack = thisblock;
3786 if (cond_context)
3788 start_sequence ();
3791 /* If this was optimized so that there is no exception region for the
3792 cleanup, then mark the TREE_LIST node, so that we can later tell
3793 if we need to call expand_eh_region_end. */
3794 if (! using_eh_for_cleanups_p
3795 || expand_eh_region_start_tree (decl, cleanup))
3796 TREE_ADDRESSABLE (t) = 1;
3797 /* If that started a new EH region, we're in a new block. */
3798 thisblock = block_stack;
3800 if (cond_context)
3802 seq = get_insns ();
3803 end_sequence ();
3804 if (seq)
3805 thisblock->data.block.last_unconditional_cleanup
3806 = emit_insns_after (seq,
3807 thisblock->data.block.last_unconditional_cleanup);
3809 else
3811 thisblock->data.block.last_unconditional_cleanup
3812 = get_last_insn ();
3813 thisblock->data.block.cleanup_ptr = &thisblock->data.block.cleanups;
3816 return 1;
3819 /* Like expand_decl_cleanup, but suppress generating an exception handler
3820 to perform the cleanup. */
3823 expand_decl_cleanup_no_eh (decl, cleanup)
3824 tree decl, cleanup;
3826 int save_eh = using_eh_for_cleanups_p;
3827 int result;
3829 using_eh_for_cleanups_p = 0;
3830 result = expand_decl_cleanup (decl, cleanup);
3831 using_eh_for_cleanups_p = save_eh;
3833 return result;
3836 /* Arrange for the top element of the dynamic cleanup chain to be
3837 popped if we exit the current binding contour. DECL is the
3838 associated declaration, if any, otherwise NULL_TREE. If the
3839 current contour is left via an exception, then __sjthrow will pop
3840 the top element off the dynamic cleanup chain. The code that
3841 avoids doing the action we push into the cleanup chain in the
3842 exceptional case is contained in expand_cleanups.
3844 This routine is only used by expand_eh_region_start, and that is
3845 the only way in which an exception region should be started. This
3846 routine is only used when using the setjmp/longjmp codegen method
3847 for exception handling. */
3850 expand_dcc_cleanup (decl)
3851 tree decl;
3853 struct nesting *thisblock;
3854 tree cleanup;
3856 /* Error if we are not in any block. */
3857 if (current_function == 0 || block_stack == 0)
3858 return 0;
3859 thisblock = block_stack;
3861 /* Record the cleanup for the dynamic handler chain. */
3863 /* All cleanups must be on the function_obstack. */
3864 push_obstacks_nochange ();
3865 resume_temporary_allocation ();
3866 cleanup = make_node (POPDCC_EXPR);
3867 pop_obstacks ();
3869 /* Add the cleanup in a manner similar to expand_decl_cleanup. */
3870 thisblock->data.block.cleanups
3871 = temp_tree_cons (decl, cleanup, thisblock->data.block.cleanups);
3873 /* If this block has a cleanup, it belongs in stack_block_stack. */
3874 stack_block_stack = thisblock;
3875 return 1;
3878 /* Arrange for the top element of the dynamic handler chain to be
3879 popped if we exit the current binding contour. DECL is the
3880 associated declaration, if any, otherwise NULL_TREE. If the current
3881 contour is left via an exception, then __sjthrow will pop the top
3882 element off the dynamic handler chain. The code that avoids doing
3883 the action we push into the handler chain in the exceptional case
3884 is contained in expand_cleanups.
3886 This routine is only used by expand_eh_region_start, and that is
3887 the only way in which an exception region should be started. This
3888 routine is only used when using the setjmp/longjmp codegen method
3889 for exception handling. */
3892 expand_dhc_cleanup (decl)
3893 tree decl;
3895 struct nesting *thisblock;
3896 tree cleanup;
3898 /* Error if we are not in any block. */
3899 if (current_function == 0 || block_stack == 0)
3900 return 0;
3901 thisblock = block_stack;
3903 /* Record the cleanup for the dynamic handler chain. */
3905 /* All cleanups must be on the function_obstack. */
3906 push_obstacks_nochange ();
3907 resume_temporary_allocation ();
3908 cleanup = make_node (POPDHC_EXPR);
3909 pop_obstacks ();
3911 /* Add the cleanup in a manner similar to expand_decl_cleanup. */
3912 thisblock->data.block.cleanups
3913 = temp_tree_cons (decl, cleanup, thisblock->data.block.cleanups);
3915 /* If this block has a cleanup, it belongs in stack_block_stack. */
3916 stack_block_stack = thisblock;
3917 return 1;
3920 /* DECL is an anonymous union. CLEANUP is a cleanup for DECL.
3921 DECL_ELTS is the list of elements that belong to DECL's type.
3922 In each, the TREE_VALUE is a VAR_DECL, and the TREE_PURPOSE a cleanup. */
3924 void
3925 expand_anon_union_decl (decl, cleanup, decl_elts)
3926 tree decl, cleanup, decl_elts;
3928 struct nesting *thisblock = current_function == 0 ? 0 : block_stack;
3929 rtx x;
3931 expand_decl (decl);
3932 expand_decl_cleanup (decl, cleanup);
3933 x = DECL_RTL (decl);
3935 while (decl_elts)
3937 tree decl_elt = TREE_VALUE (decl_elts);
3938 tree cleanup_elt = TREE_PURPOSE (decl_elts);
3939 enum machine_mode mode = TYPE_MODE (TREE_TYPE (decl_elt));
3941 /* Propagate the union's alignment to the elements. */
3942 DECL_ALIGN (decl_elt) = DECL_ALIGN (decl);
3944 /* If the element has BLKmode and the union doesn't, the union is
3945 aligned such that the element doesn't need to have BLKmode, so
3946 change the element's mode to the appropriate one for its size. */
3947 if (mode == BLKmode && DECL_MODE (decl) != BLKmode)
3948 DECL_MODE (decl_elt) = mode
3949 = mode_for_size (TREE_INT_CST_LOW (DECL_SIZE (decl_elt)),
3950 MODE_INT, 1);
3952 /* (SUBREG (MEM ...)) at RTL generation time is invalid, so we
3953 instead create a new MEM rtx with the proper mode. */
3954 if (GET_CODE (x) == MEM)
3956 if (mode == GET_MODE (x))
3957 DECL_RTL (decl_elt) = x;
3958 else
3960 DECL_RTL (decl_elt) = gen_rtx_MEM (mode, copy_rtx (XEXP (x, 0)));
3961 MEM_COPY_ATTRIBUTES (DECL_RTL (decl_elt), x);
3962 RTX_UNCHANGING_P (DECL_RTL (decl_elt)) = RTX_UNCHANGING_P (x);
3965 else if (GET_CODE (x) == REG)
3967 if (mode == GET_MODE (x))
3968 DECL_RTL (decl_elt) = x;
3969 else
3970 DECL_RTL (decl_elt) = gen_rtx_SUBREG (mode, x, 0);
3972 else
3973 abort ();
3975 /* Record the cleanup if there is one. */
3977 if (cleanup != 0)
3978 thisblock->data.block.cleanups
3979 = temp_tree_cons (decl_elt, cleanup_elt,
3980 thisblock->data.block.cleanups);
3982 decl_elts = TREE_CHAIN (decl_elts);
3986 /* Expand a list of cleanups LIST.
3987 Elements may be expressions or may be nested lists.
3989 If DONT_DO is nonnull, then any list-element
3990 whose TREE_PURPOSE matches DONT_DO is omitted.
3991 This is sometimes used to avoid a cleanup associated with
3992 a value that is being returned out of the scope.
3994 If IN_FIXUP is non-zero, we are generating this cleanup for a fixup
3995 goto and handle protection regions specially in that case.
3997 If REACHABLE, we emit code, otherwise just inform the exception handling
3998 code about this finalization. */
4000 static void
4001 expand_cleanups (list, dont_do, in_fixup, reachable)
4002 tree list;
4003 tree dont_do;
4004 int in_fixup;
4005 int reachable;
4007 tree tail;
4008 for (tail = list; tail; tail = TREE_CHAIN (tail))
4009 if (dont_do == 0 || TREE_PURPOSE (tail) != dont_do)
4011 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
4012 expand_cleanups (TREE_VALUE (tail), dont_do, in_fixup, reachable);
4013 else
4015 if (! in_fixup)
4017 tree cleanup = TREE_VALUE (tail);
4019 /* See expand_d{h,c}c_cleanup for why we avoid this. */
4020 if (TREE_CODE (cleanup) != POPDHC_EXPR
4021 && TREE_CODE (cleanup) != POPDCC_EXPR
4022 /* See expand_eh_region_start_tree for this case. */
4023 && ! TREE_ADDRESSABLE (tail))
4025 cleanup = protect_with_terminate (cleanup);
4026 expand_eh_region_end (cleanup);
4030 if (reachable)
4032 /* Cleanups may be run multiple times. For example,
4033 when exiting a binding contour, we expand the
4034 cleanups associated with that contour. When a goto
4035 within that binding contour has a target outside that
4036 contour, it will expand all cleanups from its scope to
4037 the target. Though the cleanups are expanded multiple
4038 times, the control paths are non-overlapping so the
4039 cleanups will not be executed twice. */
4041 /* We may need to protect fixups with rethrow regions. */
4042 int protect = (in_fixup && ! TREE_ADDRESSABLE (tail));
4044 if (protect)
4045 expand_fixup_region_start ();
4047 expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
4048 if (protect)
4049 expand_fixup_region_end (TREE_VALUE (tail));
4050 free_temp_slots ();
4056 /* Mark when the context we are emitting RTL for as a conditional
4057 context, so that any cleanup actions we register with
4058 expand_decl_init will be properly conditionalized when those
4059 cleanup actions are later performed. Must be called before any
4060 expression (tree) is expanded that is within a conditional context. */
4062 void
4063 start_cleanup_deferral ()
4065 /* block_stack can be NULL if we are inside the parameter list. It is
4066 OK to do nothing, because cleanups aren't possible here. */
4067 if (block_stack)
4068 ++block_stack->data.block.conditional_code;
4071 /* Mark the end of a conditional region of code. Because cleanup
4072 deferrals may be nested, we may still be in a conditional region
4073 after we end the currently deferred cleanups, only after we end all
4074 deferred cleanups, are we back in unconditional code. */
4076 void
4077 end_cleanup_deferral ()
4079 /* block_stack can be NULL if we are inside the parameter list. It is
4080 OK to do nothing, because cleanups aren't possible here. */
4081 if (block_stack)
4082 --block_stack->data.block.conditional_code;
4085 /* Move all cleanups from the current block_stack
4086 to the containing block_stack, where they are assumed to
4087 have been created. If anything can cause a temporary to
4088 be created, but not expanded for more than one level of
4089 block_stacks, then this code will have to change. */
4091 void
4092 move_cleanups_up ()
4094 struct nesting *block = block_stack;
4095 struct nesting *outer = block->next;
4097 outer->data.block.cleanups
4098 = chainon (block->data.block.cleanups,
4099 outer->data.block.cleanups);
4100 block->data.block.cleanups = 0;
4103 tree
4104 last_cleanup_this_contour ()
4106 if (block_stack == 0)
4107 return 0;
4109 return block_stack->data.block.cleanups;
4112 /* Return 1 if there are any pending cleanups at this point.
4113 If THIS_CONTOUR is nonzero, check the current contour as well.
4114 Otherwise, look only at the contours that enclose this one. */
4117 any_pending_cleanups (this_contour)
4118 int this_contour;
4120 struct nesting *block;
4122 if (block_stack == 0)
4123 return 0;
4125 if (this_contour && block_stack->data.block.cleanups != NULL)
4126 return 1;
4127 if (block_stack->data.block.cleanups == 0
4128 && block_stack->data.block.outer_cleanups == 0)
4129 return 0;
4131 for (block = block_stack->next; block; block = block->next)
4132 if (block->data.block.cleanups != 0)
4133 return 1;
4135 return 0;
4138 /* Enter a case (Pascal) or switch (C) statement.
4139 Push a block onto case_stack and nesting_stack
4140 to accumulate the case-labels that are seen
4141 and to record the labels generated for the statement.
4143 EXIT_FLAG is nonzero if `exit_something' should exit this case stmt.
4144 Otherwise, this construct is transparent for `exit_something'.
4146 EXPR is the index-expression to be dispatched on.
4147 TYPE is its nominal type. We could simply convert EXPR to this type,
4148 but instead we take short cuts. */
4150 void
4151 expand_start_case (exit_flag, expr, type, printname)
4152 int exit_flag;
4153 tree expr;
4154 tree type;
4155 const char *printname;
4157 register struct nesting *thiscase = ALLOC_NESTING ();
4159 /* Make an entry on case_stack for the case we are entering. */
4161 thiscase->next = case_stack;
4162 thiscase->all = nesting_stack;
4163 thiscase->depth = ++nesting_depth;
4164 thiscase->exit_label = exit_flag ? gen_label_rtx () : 0;
4165 thiscase->data.case_stmt.case_list = 0;
4166 thiscase->data.case_stmt.index_expr = expr;
4167 thiscase->data.case_stmt.nominal_type = type;
4168 thiscase->data.case_stmt.default_label = 0;
4169 thiscase->data.case_stmt.num_ranges = 0;
4170 thiscase->data.case_stmt.printname = printname;
4171 thiscase->data.case_stmt.line_number_status = force_line_numbers ();
4172 case_stack = thiscase;
4173 nesting_stack = thiscase;
4175 do_pending_stack_adjust ();
4177 /* Make sure case_stmt.start points to something that won't
4178 need any transformation before expand_end_case. */
4179 if (GET_CODE (get_last_insn ()) != NOTE)
4180 emit_note (NULL_PTR, NOTE_INSN_DELETED);
4182 thiscase->data.case_stmt.start = get_last_insn ();
4184 start_cleanup_deferral ();
4188 /* Start a "dummy case statement" within which case labels are invalid
4189 and are not connected to any larger real case statement.
4190 This can be used if you don't want to let a case statement jump
4191 into the middle of certain kinds of constructs. */
4193 void
4194 expand_start_case_dummy ()
4196 register struct nesting *thiscase = ALLOC_NESTING ();
4198 /* Make an entry on case_stack for the dummy. */
4200 thiscase->next = case_stack;
4201 thiscase->all = nesting_stack;
4202 thiscase->depth = ++nesting_depth;
4203 thiscase->exit_label = 0;
4204 thiscase->data.case_stmt.case_list = 0;
4205 thiscase->data.case_stmt.start = 0;
4206 thiscase->data.case_stmt.nominal_type = 0;
4207 thiscase->data.case_stmt.default_label = 0;
4208 thiscase->data.case_stmt.num_ranges = 0;
4209 case_stack = thiscase;
4210 nesting_stack = thiscase;
4211 start_cleanup_deferral ();
4214 /* End a dummy case statement. */
4216 void
4217 expand_end_case_dummy ()
4219 end_cleanup_deferral ();
4220 POPSTACK (case_stack);
4223 /* Return the data type of the index-expression
4224 of the innermost case statement, or null if none. */
4226 tree
4227 case_index_expr_type ()
4229 if (case_stack)
4230 return TREE_TYPE (case_stack->data.case_stmt.index_expr);
4231 return 0;
4234 static void
4235 check_seenlabel ()
4237 /* If this is the first label, warn if any insns have been emitted. */
4238 if (case_stack->data.case_stmt.line_number_status >= 0)
4240 rtx insn;
4242 restore_line_number_status
4243 (case_stack->data.case_stmt.line_number_status);
4244 case_stack->data.case_stmt.line_number_status = -1;
4246 for (insn = case_stack->data.case_stmt.start;
4247 insn;
4248 insn = NEXT_INSN (insn))
4250 if (GET_CODE (insn) == CODE_LABEL)
4251 break;
4252 if (GET_CODE (insn) != NOTE
4253 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn)) != USE))
4256 insn = PREV_INSN (insn);
4257 while (insn && (GET_CODE (insn) != NOTE || NOTE_LINE_NUMBER (insn) < 0));
4259 /* If insn is zero, then there must have been a syntax error. */
4260 if (insn)
4261 warning_with_file_and_line (NOTE_SOURCE_FILE(insn),
4262 NOTE_LINE_NUMBER(insn),
4263 "unreachable code at beginning of %s",
4264 case_stack->data.case_stmt.printname);
4265 break;
4271 /* Accumulate one case or default label inside a case or switch statement.
4272 VALUE is the value of the case (a null pointer, for a default label).
4273 The function CONVERTER, when applied to arguments T and V,
4274 converts the value V to the type T.
4276 If not currently inside a case or switch statement, return 1 and do
4277 nothing. The caller will print a language-specific error message.
4278 If VALUE is a duplicate or overlaps, return 2 and do nothing
4279 except store the (first) duplicate node in *DUPLICATE.
4280 If VALUE is out of range, return 3 and do nothing.
4281 If we are jumping into the scope of a cleanup or var-sized array, return 5.
4282 Return 0 on success.
4284 Extended to handle range statements. */
4287 pushcase (value, converter, label, duplicate)
4288 register tree value;
4289 tree (*converter) PROTO((tree, tree));
4290 register tree label;
4291 tree *duplicate;
4293 tree index_type;
4294 tree nominal_type;
4296 /* Fail if not inside a real case statement. */
4297 if (! (case_stack && case_stack->data.case_stmt.start))
4298 return 1;
4300 if (stack_block_stack
4301 && stack_block_stack->depth > case_stack->depth)
4302 return 5;
4304 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4305 nominal_type = case_stack->data.case_stmt.nominal_type;
4307 /* If the index is erroneous, avoid more problems: pretend to succeed. */
4308 if (index_type == error_mark_node)
4309 return 0;
4311 /* Convert VALUE to the type in which the comparisons are nominally done. */
4312 if (value != 0)
4313 value = (*converter) (nominal_type, value);
4315 check_seenlabel ();
4317 /* Fail if this value is out of range for the actual type of the index
4318 (which may be narrower than NOMINAL_TYPE). */
4319 if (value != 0 && ! int_fits_type_p (value, index_type))
4320 return 3;
4322 /* Fail if this is a duplicate or overlaps another entry. */
4323 if (value == 0)
4325 if (case_stack->data.case_stmt.default_label != 0)
4327 *duplicate = case_stack->data.case_stmt.default_label;
4328 return 2;
4330 case_stack->data.case_stmt.default_label = label;
4332 else
4333 return add_case_node (value, value, label, duplicate);
4335 expand_label (label);
4336 return 0;
4339 /* Like pushcase but this case applies to all values between VALUE1 and
4340 VALUE2 (inclusive). If VALUE1 is NULL, the range starts at the lowest
4341 value of the index type and ends at VALUE2. If VALUE2 is NULL, the range
4342 starts at VALUE1 and ends at the highest value of the index type.
4343 If both are NULL, this case applies to all values.
4345 The return value is the same as that of pushcase but there is one
4346 additional error code: 4 means the specified range was empty. */
4349 pushcase_range (value1, value2, converter, label, duplicate)
4350 register tree value1, value2;
4351 tree (*converter) PROTO((tree, tree));
4352 register tree label;
4353 tree *duplicate;
4355 tree index_type;
4356 tree nominal_type;
4358 /* Fail if not inside a real case statement. */
4359 if (! (case_stack && case_stack->data.case_stmt.start))
4360 return 1;
4362 if (stack_block_stack
4363 && stack_block_stack->depth > case_stack->depth)
4364 return 5;
4366 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4367 nominal_type = case_stack->data.case_stmt.nominal_type;
4369 /* If the index is erroneous, avoid more problems: pretend to succeed. */
4370 if (index_type == error_mark_node)
4371 return 0;
4373 check_seenlabel ();
4375 /* Convert VALUEs to type in which the comparisons are nominally done
4376 and replace any unspecified value with the corresponding bound. */
4377 if (value1 == 0)
4378 value1 = TYPE_MIN_VALUE (index_type);
4379 if (value2 == 0)
4380 value2 = TYPE_MAX_VALUE (index_type);
4382 /* Fail if the range is empty. Do this before any conversion since
4383 we want to allow out-of-range empty ranges. */
4384 if (value2 && tree_int_cst_lt (value2, value1))
4385 return 4;
4387 value1 = (*converter) (nominal_type, value1);
4389 /* If the max was unbounded, use the max of the nominal_type we are
4390 converting to. Do this after the < check above to suppress false
4391 positives. */
4392 if (!value2)
4393 value2 = TYPE_MAX_VALUE (nominal_type);
4394 value2 = (*converter) (nominal_type, value2);
4396 /* Fail if these values are out of range. */
4397 if (TREE_CONSTANT_OVERFLOW (value1)
4398 || ! int_fits_type_p (value1, index_type))
4399 return 3;
4401 if (TREE_CONSTANT_OVERFLOW (value2)
4402 || ! int_fits_type_p (value2, index_type))
4403 return 3;
4405 return add_case_node (value1, value2, label, duplicate);
4408 /* Do the actual insertion of a case label for pushcase and pushcase_range
4409 into case_stack->data.case_stmt.case_list. Use an AVL tree to avoid
4410 slowdown for large switch statements. */
4412 static int
4413 add_case_node (low, high, label, duplicate)
4414 tree low, high;
4415 tree label;
4416 tree *duplicate;
4418 struct case_node *p, **q, *r;
4420 q = &case_stack->data.case_stmt.case_list;
4421 p = *q;
4423 while ((r = *q))
4425 p = r;
4427 /* Keep going past elements distinctly greater than HIGH. */
4428 if (tree_int_cst_lt (high, p->low))
4429 q = &p->left;
4431 /* or distinctly less than LOW. */
4432 else if (tree_int_cst_lt (p->high, low))
4433 q = &p->right;
4435 else
4437 /* We have an overlap; this is an error. */
4438 *duplicate = p->code_label;
4439 return 2;
4443 /* Add this label to the chain, and succeed.
4444 Copy LOW, HIGH so they are on temporary rather than momentary
4445 obstack and will thus survive till the end of the case statement. */
4447 r = (struct case_node *) oballoc (sizeof (struct case_node));
4448 r->low = copy_node (low);
4450 /* If the bounds are equal, turn this into the one-value case. */
4452 if (tree_int_cst_equal (low, high))
4453 r->high = r->low;
4454 else
4456 r->high = copy_node (high);
4457 case_stack->data.case_stmt.num_ranges++;
4460 r->code_label = label;
4461 expand_label (label);
4463 *q = r;
4464 r->parent = p;
4465 r->left = 0;
4466 r->right = 0;
4467 r->balance = 0;
4469 while (p)
4471 struct case_node *s;
4473 if (r == p->left)
4475 int b;
4477 if (! (b = p->balance))
4478 /* Growth propagation from left side. */
4479 p->balance = -1;
4480 else if (b < 0)
4482 if (r->balance < 0)
4484 /* R-Rotation */
4485 if ((p->left = s = r->right))
4486 s->parent = p;
4488 r->right = p;
4489 p->balance = 0;
4490 r->balance = 0;
4491 s = p->parent;
4492 p->parent = r;
4494 if ((r->parent = s))
4496 if (s->left == p)
4497 s->left = r;
4498 else
4499 s->right = r;
4501 else
4502 case_stack->data.case_stmt.case_list = r;
4504 else
4505 /* r->balance == +1 */
4507 /* LR-Rotation */
4509 int b2;
4510 struct case_node *t = r->right;
4512 if ((p->left = s = t->right))
4513 s->parent = p;
4515 t->right = p;
4516 if ((r->right = s = t->left))
4517 s->parent = r;
4519 t->left = r;
4520 b = t->balance;
4521 b2 = b < 0;
4522 p->balance = b2;
4523 b2 = -b2 - b;
4524 r->balance = b2;
4525 t->balance = 0;
4526 s = p->parent;
4527 p->parent = t;
4528 r->parent = t;
4530 if ((t->parent = s))
4532 if (s->left == p)
4533 s->left = t;
4534 else
4535 s->right = t;
4537 else
4538 case_stack->data.case_stmt.case_list = t;
4540 break;
4543 else
4545 /* p->balance == +1; growth of left side balances the node. */
4546 p->balance = 0;
4547 break;
4550 else
4551 /* r == p->right */
4553 int b;
4555 if (! (b = p->balance))
4556 /* Growth propagation from right side. */
4557 p->balance++;
4558 else if (b > 0)
4560 if (r->balance > 0)
4562 /* L-Rotation */
4564 if ((p->right = s = r->left))
4565 s->parent = p;
4567 r->left = p;
4568 p->balance = 0;
4569 r->balance = 0;
4570 s = p->parent;
4571 p->parent = r;
4572 if ((r->parent = s))
4574 if (s->left == p)
4575 s->left = r;
4576 else
4577 s->right = r;
4580 else
4581 case_stack->data.case_stmt.case_list = r;
4584 else
4585 /* r->balance == -1 */
4587 /* RL-Rotation */
4588 int b2;
4589 struct case_node *t = r->left;
4591 if ((p->right = s = t->left))
4592 s->parent = p;
4594 t->left = p;
4596 if ((r->left = s = t->right))
4597 s->parent = r;
4599 t->right = r;
4600 b = t->balance;
4601 b2 = b < 0;
4602 r->balance = b2;
4603 b2 = -b2 - b;
4604 p->balance = b2;
4605 t->balance = 0;
4606 s = p->parent;
4607 p->parent = t;
4608 r->parent = t;
4610 if ((t->parent = s))
4612 if (s->left == p)
4613 s->left = t;
4614 else
4615 s->right = t;
4618 else
4619 case_stack->data.case_stmt.case_list = t;
4621 break;
4623 else
4625 /* p->balance == -1; growth of right side balances the node. */
4626 p->balance = 0;
4627 break;
4631 r = p;
4632 p = p->parent;
4635 return 0;
4639 /* Returns the number of possible values of TYPE.
4640 Returns -1 if the number is unknown or variable.
4641 Returns -2 if the number does not fit in a HOST_WIDE_INT.
4642 Sets *SPARENESS to 2 if TYPE is an ENUMERAL_TYPE whose values
4643 do not increase monotonically (there may be duplicates);
4644 to 1 if the values increase monotonically, but not always by 1;
4645 otherwise sets it to 0. */
4647 HOST_WIDE_INT
4648 all_cases_count (type, spareness)
4649 tree type;
4650 int *spareness;
4652 HOST_WIDE_INT count;
4653 *spareness = 0;
4655 switch (TREE_CODE (type))
4657 tree t;
4658 case BOOLEAN_TYPE:
4659 count = 2;
4660 break;
4661 case CHAR_TYPE:
4662 count = 1 << BITS_PER_UNIT;
4663 break;
4664 default:
4665 case INTEGER_TYPE:
4666 if (TREE_CODE (TYPE_MIN_VALUE (type)) != INTEGER_CST
4667 || TYPE_MAX_VALUE (type) == NULL
4668 || TREE_CODE (TYPE_MAX_VALUE (type)) != INTEGER_CST)
4669 return -1;
4670 else
4672 /* count
4673 = TREE_INT_CST_LOW (TYPE_MAX_VALUE (type))
4674 - TREE_INT_CST_LOW (TYPE_MIN_VALUE (type)) + 1
4675 but with overflow checking. */
4676 tree mint = TYPE_MIN_VALUE (type);
4677 tree maxt = TYPE_MAX_VALUE (type);
4678 HOST_WIDE_INT lo, hi;
4679 neg_double(TREE_INT_CST_LOW (mint), TREE_INT_CST_HIGH (mint),
4680 &lo, &hi);
4681 add_double(TREE_INT_CST_LOW (maxt), TREE_INT_CST_HIGH (maxt),
4682 lo, hi, &lo, &hi);
4683 add_double (lo, hi, 1, 0, &lo, &hi);
4684 if (hi != 0 || lo < 0)
4685 return -2;
4686 count = lo;
4688 break;
4689 case ENUMERAL_TYPE:
4690 count = 0;
4691 for (t = TYPE_VALUES (type); t != NULL_TREE; t = TREE_CHAIN (t))
4693 if (TREE_CODE (TYPE_MIN_VALUE (type)) != INTEGER_CST
4694 || TREE_CODE (TREE_VALUE (t)) != INTEGER_CST
4695 || TREE_INT_CST_LOW (TYPE_MIN_VALUE (type)) + count
4696 != TREE_INT_CST_LOW (TREE_VALUE (t)))
4697 *spareness = 1;
4698 count++;
4700 if (*spareness == 1)
4702 tree prev = TREE_VALUE (TYPE_VALUES (type));
4703 for (t = TYPE_VALUES (type); t = TREE_CHAIN (t), t != NULL_TREE; )
4705 if (! tree_int_cst_lt (prev, TREE_VALUE (t)))
4707 *spareness = 2;
4708 break;
4710 prev = TREE_VALUE (t);
4715 return count;
4719 #define BITARRAY_TEST(ARRAY, INDEX) \
4720 ((ARRAY)[(unsigned) (INDEX) / HOST_BITS_PER_CHAR]\
4721 & (1 << ((unsigned) (INDEX) % HOST_BITS_PER_CHAR)))
4722 #define BITARRAY_SET(ARRAY, INDEX) \
4723 ((ARRAY)[(unsigned) (INDEX) / HOST_BITS_PER_CHAR]\
4724 |= 1 << ((unsigned) (INDEX) % HOST_BITS_PER_CHAR))
4726 /* Set the elements of the bitstring CASES_SEEN (which has length COUNT),
4727 with the case values we have seen, assuming the case expression
4728 has the given TYPE.
4729 SPARSENESS is as determined by all_cases_count.
4731 The time needed is proportional to COUNT, unless
4732 SPARSENESS is 2, in which case quadratic time is needed. */
4734 void
4735 mark_seen_cases (type, cases_seen, count, sparseness)
4736 tree type;
4737 unsigned char *cases_seen;
4738 long count;
4739 int sparseness;
4741 tree next_node_to_try = NULL_TREE;
4742 long next_node_offset = 0;
4744 register struct case_node *n, *root = case_stack->data.case_stmt.case_list;
4745 tree val = make_node (INTEGER_CST);
4746 TREE_TYPE (val) = type;
4747 if (! root)
4748 ; /* Do nothing */
4749 else if (sparseness == 2)
4751 tree t;
4752 HOST_WIDE_INT xlo;
4754 /* This less efficient loop is only needed to handle
4755 duplicate case values (multiple enum constants
4756 with the same value). */
4757 TREE_TYPE (val) = TREE_TYPE (root->low);
4758 for (t = TYPE_VALUES (type), xlo = 0; t != NULL_TREE;
4759 t = TREE_CHAIN (t), xlo++)
4761 TREE_INT_CST_LOW (val) = TREE_INT_CST_LOW (TREE_VALUE (t));
4762 TREE_INT_CST_HIGH (val) = TREE_INT_CST_HIGH (TREE_VALUE (t));
4763 n = root;
4766 /* Keep going past elements distinctly greater than VAL. */
4767 if (tree_int_cst_lt (val, n->low))
4768 n = n->left;
4770 /* or distinctly less than VAL. */
4771 else if (tree_int_cst_lt (n->high, val))
4772 n = n->right;
4774 else
4776 /* We have found a matching range. */
4777 BITARRAY_SET (cases_seen, xlo);
4778 break;
4781 while (n);
4784 else
4786 if (root->left)
4787 case_stack->data.case_stmt.case_list = root = case_tree2list (root, 0);
4788 for (n = root; n; n = n->right)
4790 TREE_INT_CST_LOW (val) = TREE_INT_CST_LOW (n->low);
4791 TREE_INT_CST_HIGH (val) = TREE_INT_CST_HIGH (n->low);
4792 while ( ! tree_int_cst_lt (n->high, val))
4794 /* Calculate (into xlo) the "offset" of the integer (val).
4795 The element with lowest value has offset 0, the next smallest
4796 element has offset 1, etc. */
4798 HOST_WIDE_INT xlo, xhi;
4799 tree t;
4800 if (sparseness && TYPE_VALUES (type) != NULL_TREE)
4802 /* The TYPE_VALUES will be in increasing order, so
4803 starting searching where we last ended. */
4804 t = next_node_to_try;
4805 xlo = next_node_offset;
4806 xhi = 0;
4807 for (;;)
4809 if (t == NULL_TREE)
4811 t = TYPE_VALUES (type);
4812 xlo = 0;
4814 if (tree_int_cst_equal (val, TREE_VALUE (t)))
4816 next_node_to_try = TREE_CHAIN (t);
4817 next_node_offset = xlo + 1;
4818 break;
4820 xlo++;
4821 t = TREE_CHAIN (t);
4822 if (t == next_node_to_try)
4824 xlo = -1;
4825 break;
4829 else
4831 t = TYPE_MIN_VALUE (type);
4832 if (t)
4833 neg_double (TREE_INT_CST_LOW (t), TREE_INT_CST_HIGH (t),
4834 &xlo, &xhi);
4835 else
4836 xlo = xhi = 0;
4837 add_double (xlo, xhi,
4838 TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
4839 &xlo, &xhi);
4842 if (xhi == 0 && xlo >= 0 && xlo < count)
4843 BITARRAY_SET (cases_seen, xlo);
4844 add_double (TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
4845 1, 0,
4846 &TREE_INT_CST_LOW (val), &TREE_INT_CST_HIGH (val));
4852 /* Called when the index of a switch statement is an enumerated type
4853 and there is no default label.
4855 Checks that all enumeration literals are covered by the case
4856 expressions of a switch. Also, warn if there are any extra
4857 switch cases that are *not* elements of the enumerated type.
4859 If all enumeration literals were covered by the case expressions,
4860 turn one of the expressions into the default expression since it should
4861 not be possible to fall through such a switch. */
4863 void
4864 check_for_full_enumeration_handling (type)
4865 tree type;
4867 register struct case_node *n;
4868 register tree chain;
4869 #if 0 /* variable used by 'if 0'ed code below. */
4870 register struct case_node **l;
4871 int all_values = 1;
4872 #endif
4874 /* True iff the selector type is a numbered set mode. */
4875 int sparseness = 0;
4877 /* The number of possible selector values. */
4878 HOST_WIDE_INT size;
4880 /* For each possible selector value. a one iff it has been matched
4881 by a case value alternative. */
4882 unsigned char *cases_seen;
4884 /* The allocated size of cases_seen, in chars. */
4885 long bytes_needed;
4887 if (! warn_switch)
4888 return;
4890 size = all_cases_count (type, &sparseness);
4891 bytes_needed = (size + HOST_BITS_PER_CHAR) / HOST_BITS_PER_CHAR;
4893 if (size > 0 && size < 600000
4894 /* We deliberately use malloc here - not xmalloc. */
4895 && (cases_seen = (unsigned char *) malloc (bytes_needed)) != NULL)
4897 long i;
4898 tree v = TYPE_VALUES (type);
4899 bzero (cases_seen, bytes_needed);
4901 /* The time complexity of this code is normally O(N), where
4902 N being the number of members in the enumerated type.
4903 However, if type is a ENUMERAL_TYPE whose values do not
4904 increase monotonically, O(N*log(N)) time may be needed. */
4906 mark_seen_cases (type, cases_seen, size, sparseness);
4908 for (i = 0; v != NULL_TREE && i < size; i++, v = TREE_CHAIN (v))
4910 if (BITARRAY_TEST(cases_seen, i) == 0)
4911 warning ("enumeration value `%s' not handled in switch",
4912 IDENTIFIER_POINTER (TREE_PURPOSE (v)));
4915 free (cases_seen);
4918 /* Now we go the other way around; we warn if there are case
4919 expressions that don't correspond to enumerators. This can
4920 occur since C and C++ don't enforce type-checking of
4921 assignments to enumeration variables. */
4923 if (case_stack->data.case_stmt.case_list
4924 && case_stack->data.case_stmt.case_list->left)
4925 case_stack->data.case_stmt.case_list
4926 = case_tree2list (case_stack->data.case_stmt.case_list, 0);
4927 if (warn_switch)
4928 for (n = case_stack->data.case_stmt.case_list; n; n = n->right)
4930 for (chain = TYPE_VALUES (type);
4931 chain && !tree_int_cst_equal (n->low, TREE_VALUE (chain));
4932 chain = TREE_CHAIN (chain))
4935 if (!chain)
4937 if (TYPE_NAME (type) == 0)
4938 warning ("case value `%ld' not in enumerated type",
4939 (long) TREE_INT_CST_LOW (n->low));
4940 else
4941 warning ("case value `%ld' not in enumerated type `%s'",
4942 (long) TREE_INT_CST_LOW (n->low),
4943 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
4944 == IDENTIFIER_NODE)
4945 ? TYPE_NAME (type)
4946 : DECL_NAME (TYPE_NAME (type))));
4948 if (!tree_int_cst_equal (n->low, n->high))
4950 for (chain = TYPE_VALUES (type);
4951 chain && !tree_int_cst_equal (n->high, TREE_VALUE (chain));
4952 chain = TREE_CHAIN (chain))
4955 if (!chain)
4957 if (TYPE_NAME (type) == 0)
4958 warning ("case value `%ld' not in enumerated type",
4959 (long) TREE_INT_CST_LOW (n->high));
4960 else
4961 warning ("case value `%ld' not in enumerated type `%s'",
4962 (long) TREE_INT_CST_LOW (n->high),
4963 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
4964 == IDENTIFIER_NODE)
4965 ? TYPE_NAME (type)
4966 : DECL_NAME (TYPE_NAME (type))));
4971 #if 0
4972 /* ??? This optimization is disabled because it causes valid programs to
4973 fail. ANSI C does not guarantee that an expression with enum type
4974 will have a value that is the same as one of the enumeration literals. */
4976 /* If all values were found as case labels, make one of them the default
4977 label. Thus, this switch will never fall through. We arbitrarily pick
4978 the last one to make the default since this is likely the most
4979 efficient choice. */
4981 if (all_values)
4983 for (l = &case_stack->data.case_stmt.case_list;
4984 (*l)->right != 0;
4985 l = &(*l)->right)
4988 case_stack->data.case_stmt.default_label = (*l)->code_label;
4989 *l = 0;
4991 #endif /* 0 */
4995 /* Terminate a case (Pascal) or switch (C) statement
4996 in which ORIG_INDEX is the expression to be tested.
4997 Generate the code to test it and jump to the right place. */
4999 void
5000 expand_end_case (orig_index)
5001 tree orig_index;
5003 tree minval = NULL_TREE, maxval = NULL_TREE, range, orig_minval;
5004 rtx default_label = 0;
5005 register struct case_node *n;
5006 unsigned int count;
5007 rtx index;
5008 rtx table_label;
5009 int ncases;
5010 rtx *labelvec;
5011 register int i;
5012 rtx before_case;
5013 register struct nesting *thiscase = case_stack;
5014 tree index_expr, index_type;
5015 int unsignedp;
5017 table_label = gen_label_rtx ();
5018 index_expr = thiscase->data.case_stmt.index_expr;
5019 index_type = TREE_TYPE (index_expr);
5020 unsignedp = TREE_UNSIGNED (index_type);
5022 do_pending_stack_adjust ();
5024 /* This might get an spurious warning in the presence of a syntax error;
5025 it could be fixed by moving the call to check_seenlabel after the
5026 check for error_mark_node, and copying the code of check_seenlabel that
5027 deals with case_stack->data.case_stmt.line_number_status /
5028 restore_line_number_status in front of the call to end_cleanup_deferral;
5029 However, this might miss some useful warnings in the presence of
5030 non-syntax errors. */
5031 check_seenlabel ();
5033 /* An ERROR_MARK occurs for various reasons including invalid data type. */
5034 if (index_type != error_mark_node)
5036 /* If switch expression was an enumerated type, check that all
5037 enumeration literals are covered by the cases.
5038 No sense trying this if there's a default case, however. */
5040 if (!thiscase->data.case_stmt.default_label
5041 && TREE_CODE (TREE_TYPE (orig_index)) == ENUMERAL_TYPE
5042 && TREE_CODE (index_expr) != INTEGER_CST)
5043 check_for_full_enumeration_handling (TREE_TYPE (orig_index));
5045 /* If we don't have a default-label, create one here,
5046 after the body of the switch. */
5047 if (thiscase->data.case_stmt.default_label == 0)
5049 thiscase->data.case_stmt.default_label
5050 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
5051 expand_label (thiscase->data.case_stmt.default_label);
5053 default_label = label_rtx (thiscase->data.case_stmt.default_label);
5055 before_case = get_last_insn ();
5057 if (thiscase->data.case_stmt.case_list
5058 && thiscase->data.case_stmt.case_list->left)
5059 thiscase->data.case_stmt.case_list
5060 = case_tree2list(thiscase->data.case_stmt.case_list, 0);
5062 /* Simplify the case-list before we count it. */
5063 group_case_nodes (thiscase->data.case_stmt.case_list);
5065 /* Get upper and lower bounds of case values.
5066 Also convert all the case values to the index expr's data type. */
5068 count = 0;
5069 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5071 /* Check low and high label values are integers. */
5072 if (TREE_CODE (n->low) != INTEGER_CST)
5073 abort ();
5074 if (TREE_CODE (n->high) != INTEGER_CST)
5075 abort ();
5077 n->low = convert (index_type, n->low);
5078 n->high = convert (index_type, n->high);
5080 /* Count the elements and track the largest and smallest
5081 of them (treating them as signed even if they are not). */
5082 if (count++ == 0)
5084 minval = n->low;
5085 maxval = n->high;
5087 else
5089 if (INT_CST_LT (n->low, minval))
5090 minval = n->low;
5091 if (INT_CST_LT (maxval, n->high))
5092 maxval = n->high;
5094 /* A range counts double, since it requires two compares. */
5095 if (! tree_int_cst_equal (n->low, n->high))
5096 count++;
5099 orig_minval = minval;
5101 /* Compute span of values. */
5102 if (count != 0)
5103 range = fold (build (MINUS_EXPR, index_type, maxval, minval));
5105 end_cleanup_deferral ();
5107 if (count == 0)
5109 expand_expr (index_expr, const0_rtx, VOIDmode, 0);
5110 emit_queue ();
5111 emit_jump (default_label);
5114 /* If range of values is much bigger than number of values,
5115 make a sequence of conditional branches instead of a dispatch.
5116 If the switch-index is a constant, do it this way
5117 because we can optimize it. */
5119 #ifndef CASE_VALUES_THRESHOLD
5120 #ifdef HAVE_casesi
5121 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
5122 #else
5123 /* If machine does not have a case insn that compares the
5124 bounds, this means extra overhead for dispatch tables
5125 which raises the threshold for using them. */
5126 #define CASE_VALUES_THRESHOLD 5
5127 #endif /* HAVE_casesi */
5128 #endif /* CASE_VALUES_THRESHOLD */
5130 else if (TREE_INT_CST_HIGH (range) != 0
5131 || count < (unsigned int) CASE_VALUES_THRESHOLD
5132 || ((unsigned HOST_WIDE_INT) (TREE_INT_CST_LOW (range))
5133 > 10 * count)
5134 #ifndef ASM_OUTPUT_ADDR_DIFF_ELT
5135 || flag_pic
5136 #endif
5137 || TREE_CODE (index_expr) == INTEGER_CST
5138 /* These will reduce to a constant. */
5139 || (TREE_CODE (index_expr) == CALL_EXPR
5140 && TREE_CODE (TREE_OPERAND (index_expr, 0)) == ADDR_EXPR
5141 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == FUNCTION_DECL
5142 && DECL_FUNCTION_CODE (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == BUILT_IN_CLASSIFY_TYPE)
5143 || (TREE_CODE (index_expr) == COMPOUND_EXPR
5144 && TREE_CODE (TREE_OPERAND (index_expr, 1)) == INTEGER_CST))
5146 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5148 /* If the index is a short or char that we do not have
5149 an insn to handle comparisons directly, convert it to
5150 a full integer now, rather than letting each comparison
5151 generate the conversion. */
5153 if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT
5154 && (cmp_optab->handlers[(int) GET_MODE(index)].insn_code
5155 == CODE_FOR_nothing))
5157 enum machine_mode wider_mode;
5158 for (wider_mode = GET_MODE (index); wider_mode != VOIDmode;
5159 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
5160 if (cmp_optab->handlers[(int) wider_mode].insn_code
5161 != CODE_FOR_nothing)
5163 index = convert_to_mode (wider_mode, index, unsignedp);
5164 break;
5168 emit_queue ();
5169 do_pending_stack_adjust ();
5171 index = protect_from_queue (index, 0);
5172 if (GET_CODE (index) == MEM)
5173 index = copy_to_reg (index);
5174 if (GET_CODE (index) == CONST_INT
5175 || TREE_CODE (index_expr) == INTEGER_CST)
5177 /* Make a tree node with the proper constant value
5178 if we don't already have one. */
5179 if (TREE_CODE (index_expr) != INTEGER_CST)
5181 index_expr
5182 = build_int_2 (INTVAL (index),
5183 unsignedp || INTVAL (index) >= 0 ? 0 : -1);
5184 index_expr = convert (index_type, index_expr);
5187 /* For constant index expressions we need only
5188 issue a unconditional branch to the appropriate
5189 target code. The job of removing any unreachable
5190 code is left to the optimisation phase if the
5191 "-O" option is specified. */
5192 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5193 if (! tree_int_cst_lt (index_expr, n->low)
5194 && ! tree_int_cst_lt (n->high, index_expr))
5195 break;
5197 if (n)
5198 emit_jump (label_rtx (n->code_label));
5199 else
5200 emit_jump (default_label);
5202 else
5204 /* If the index expression is not constant we generate
5205 a binary decision tree to select the appropriate
5206 target code. This is done as follows:
5208 The list of cases is rearranged into a binary tree,
5209 nearly optimal assuming equal probability for each case.
5211 The tree is transformed into RTL, eliminating
5212 redundant test conditions at the same time.
5214 If program flow could reach the end of the
5215 decision tree an unconditional jump to the
5216 default code is emitted. */
5218 use_cost_table
5219 = (TREE_CODE (TREE_TYPE (orig_index)) != ENUMERAL_TYPE
5220 && estimate_case_costs (thiscase->data.case_stmt.case_list));
5221 balance_case_nodes (&thiscase->data.case_stmt.case_list,
5222 NULL_PTR);
5223 emit_case_nodes (index, thiscase->data.case_stmt.case_list,
5224 default_label, index_type);
5225 emit_jump_if_reachable (default_label);
5228 else
5230 int win = 0;
5231 #ifdef HAVE_casesi
5232 if (HAVE_casesi)
5234 enum machine_mode index_mode = SImode;
5235 int index_bits = GET_MODE_BITSIZE (index_mode);
5236 rtx op1, op2;
5237 enum machine_mode op_mode;
5239 /* Convert the index to SImode. */
5240 if (GET_MODE_BITSIZE (TYPE_MODE (index_type))
5241 > GET_MODE_BITSIZE (index_mode))
5243 enum machine_mode omode = TYPE_MODE (index_type);
5244 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
5246 /* We must handle the endpoints in the original mode. */
5247 index_expr = build (MINUS_EXPR, index_type,
5248 index_expr, minval);
5249 minval = integer_zero_node;
5250 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5251 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
5252 omode, 1, 0, default_label);
5253 /* Now we can safely truncate. */
5254 index = convert_to_mode (index_mode, index, 0);
5256 else
5258 if (TYPE_MODE (index_type) != index_mode)
5260 index_expr = convert (type_for_size (index_bits, 0),
5261 index_expr);
5262 index_type = TREE_TYPE (index_expr);
5265 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5267 emit_queue ();
5268 index = protect_from_queue (index, 0);
5269 do_pending_stack_adjust ();
5271 op_mode = insn_operand_mode[(int)CODE_FOR_casesi][0];
5272 if (! (*insn_operand_predicate[(int)CODE_FOR_casesi][0])
5273 (index, op_mode))
5274 index = copy_to_mode_reg (op_mode, index);
5276 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
5278 op_mode = insn_operand_mode[(int)CODE_FOR_casesi][1];
5279 if (! (*insn_operand_predicate[(int)CODE_FOR_casesi][1])
5280 (op1, op_mode))
5281 op1 = copy_to_mode_reg (op_mode, op1);
5283 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
5285 op_mode = insn_operand_mode[(int)CODE_FOR_casesi][2];
5286 if (! (*insn_operand_predicate[(int)CODE_FOR_casesi][2])
5287 (op2, op_mode))
5288 op2 = copy_to_mode_reg (op_mode, op2);
5290 emit_jump_insn (gen_casesi (index, op1, op2,
5291 table_label, default_label));
5292 win = 1;
5294 #endif
5295 #ifdef HAVE_tablejump
5296 if (! win && HAVE_tablejump)
5298 index_expr = convert (thiscase->data.case_stmt.nominal_type,
5299 fold (build (MINUS_EXPR, index_type,
5300 index_expr, minval)));
5301 index_type = TREE_TYPE (index_expr);
5302 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5303 emit_queue ();
5304 index = protect_from_queue (index, 0);
5305 do_pending_stack_adjust ();
5307 do_tablejump (index, TYPE_MODE (index_type),
5308 expand_expr (range, NULL_RTX, VOIDmode, 0),
5309 table_label, default_label);
5310 win = 1;
5312 #endif
5313 if (! win)
5314 abort ();
5316 /* Get table of labels to jump to, in order of case index. */
5318 ncases = TREE_INT_CST_LOW (range) + 1;
5319 labelvec = (rtx *) alloca (ncases * sizeof (rtx));
5320 bzero ((char *) labelvec, ncases * sizeof (rtx));
5322 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5324 register HOST_WIDE_INT i
5325 = TREE_INT_CST_LOW (n->low) - TREE_INT_CST_LOW (orig_minval);
5327 while (1)
5329 labelvec[i]
5330 = gen_rtx_LABEL_REF (Pmode, label_rtx (n->code_label));
5331 if (i + TREE_INT_CST_LOW (orig_minval)
5332 == TREE_INT_CST_LOW (n->high))
5333 break;
5334 i++;
5338 /* Fill in the gaps with the default. */
5339 for (i = 0; i < ncases; i++)
5340 if (labelvec[i] == 0)
5341 labelvec[i] = gen_rtx_LABEL_REF (Pmode, default_label);
5343 /* Output the table */
5344 emit_label (table_label);
5346 if (CASE_VECTOR_PC_RELATIVE || flag_pic)
5347 emit_jump_insn (gen_rtx_ADDR_DIFF_VEC (CASE_VECTOR_MODE,
5348 gen_rtx_LABEL_REF (Pmode, table_label),
5349 gen_rtvec_v (ncases, labelvec),
5350 const0_rtx, const0_rtx, 0));
5351 else
5352 emit_jump_insn (gen_rtx_ADDR_VEC (CASE_VECTOR_MODE,
5353 gen_rtvec_v (ncases, labelvec)));
5355 /* If the case insn drops through the table,
5356 after the table we must jump to the default-label.
5357 Otherwise record no drop-through after the table. */
5358 #ifdef CASE_DROPS_THROUGH
5359 emit_jump (default_label);
5360 #else
5361 emit_barrier ();
5362 #endif
5365 before_case = squeeze_notes (NEXT_INSN (before_case), get_last_insn ());
5366 reorder_insns (before_case, get_last_insn (),
5367 thiscase->data.case_stmt.start);
5369 else
5370 end_cleanup_deferral ();
5372 if (thiscase->exit_label)
5373 emit_label (thiscase->exit_label);
5375 POPSTACK (case_stack);
5377 free_temp_slots ();
5380 /* Convert the tree NODE into a list linked by the right field, with the left
5381 field zeroed. RIGHT is used for recursion; it is a list to be placed
5382 rightmost in the resulting list. */
5384 static struct case_node *
5385 case_tree2list (node, right)
5386 struct case_node *node, *right;
5388 struct case_node *left;
5390 if (node->right)
5391 right = case_tree2list (node->right, right);
5393 node->right = right;
5394 if ((left = node->left))
5396 node->left = 0;
5397 return case_tree2list (left, node);
5400 return node;
5403 /* Generate code to jump to LABEL if OP1 and OP2 are equal. */
5405 static void
5406 do_jump_if_equal (op1, op2, label, unsignedp)
5407 rtx op1, op2, label;
5408 int unsignedp;
5410 if (GET_CODE (op1) == CONST_INT
5411 && GET_CODE (op2) == CONST_INT)
5413 if (INTVAL (op1) == INTVAL (op2))
5414 emit_jump (label);
5416 else
5418 enum machine_mode mode = GET_MODE (op1);
5419 if (mode == VOIDmode)
5420 mode = GET_MODE (op2);
5421 emit_cmp_and_jump_insns (op1, op2, EQ, NULL_RTX, mode, unsignedp,
5422 0, label);
5426 /* Not all case values are encountered equally. This function
5427 uses a heuristic to weight case labels, in cases where that
5428 looks like a reasonable thing to do.
5430 Right now, all we try to guess is text, and we establish the
5431 following weights:
5433 chars above space: 16
5434 digits: 16
5435 default: 12
5436 space, punct: 8
5437 tab: 4
5438 newline: 2
5439 other "\" chars: 1
5440 remaining chars: 0
5442 If we find any cases in the switch that are not either -1 or in the range
5443 of valid ASCII characters, or are control characters other than those
5444 commonly used with "\", don't treat this switch scanning text.
5446 Return 1 if these nodes are suitable for cost estimation, otherwise
5447 return 0. */
5449 static int
5450 estimate_case_costs (node)
5451 case_node_ptr node;
5453 tree min_ascii = build_int_2 (-1, -1);
5454 tree max_ascii = convert (TREE_TYPE (node->high), build_int_2 (127, 0));
5455 case_node_ptr n;
5456 int i;
5458 /* If we haven't already made the cost table, make it now. Note that the
5459 lower bound of the table is -1, not zero. */
5461 if (cost_table == NULL)
5463 cost_table = ((short *) xmalloc (129 * sizeof (short))) + 1;
5464 bzero ((char *) (cost_table - 1), 129 * sizeof (short));
5466 for (i = 0; i < 128; i++)
5468 if (ISALNUM (i))
5469 cost_table[i] = 16;
5470 else if (ISPUNCT (i))
5471 cost_table[i] = 8;
5472 else if (ISCNTRL (i))
5473 cost_table[i] = -1;
5476 cost_table[' '] = 8;
5477 cost_table['\t'] = 4;
5478 cost_table['\0'] = 4;
5479 cost_table['\n'] = 2;
5480 cost_table['\f'] = 1;
5481 cost_table['\v'] = 1;
5482 cost_table['\b'] = 1;
5485 /* See if all the case expressions look like text. It is text if the
5486 constant is >= -1 and the highest constant is <= 127. Do all comparisons
5487 as signed arithmetic since we don't want to ever access cost_table with a
5488 value less than -1. Also check that none of the constants in a range
5489 are strange control characters. */
5491 for (n = node; n; n = n->right)
5493 if ((INT_CST_LT (n->low, min_ascii)) || INT_CST_LT (max_ascii, n->high))
5494 return 0;
5496 for (i = TREE_INT_CST_LOW (n->low); i <= TREE_INT_CST_LOW (n->high); i++)
5497 if (cost_table[i] < 0)
5498 return 0;
5501 /* All interesting values are within the range of interesting
5502 ASCII characters. */
5503 return 1;
5506 /* Scan an ordered list of case nodes
5507 combining those with consecutive values or ranges.
5509 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
5511 static void
5512 group_case_nodes (head)
5513 case_node_ptr head;
5515 case_node_ptr node = head;
5517 while (node)
5519 rtx lb = next_real_insn (label_rtx (node->code_label));
5520 rtx lb2;
5521 case_node_ptr np = node;
5523 /* Try to group the successors of NODE with NODE. */
5524 while (((np = np->right) != 0)
5525 /* Do they jump to the same place? */
5526 && ((lb2 = next_real_insn (label_rtx (np->code_label))) == lb
5527 || (lb != 0 && lb2 != 0
5528 && simplejump_p (lb)
5529 && simplejump_p (lb2)
5530 && rtx_equal_p (SET_SRC (PATTERN (lb)),
5531 SET_SRC (PATTERN (lb2)))))
5532 /* Are their ranges consecutive? */
5533 && tree_int_cst_equal (np->low,
5534 fold (build (PLUS_EXPR,
5535 TREE_TYPE (node->high),
5536 node->high,
5537 integer_one_node)))
5538 /* An overflow is not consecutive. */
5539 && tree_int_cst_lt (node->high,
5540 fold (build (PLUS_EXPR,
5541 TREE_TYPE (node->high),
5542 node->high,
5543 integer_one_node))))
5545 node->high = np->high;
5547 /* NP is the first node after NODE which can't be grouped with it.
5548 Delete the nodes in between, and move on to that node. */
5549 node->right = np;
5550 node = np;
5554 /* Take an ordered list of case nodes
5555 and transform them into a near optimal binary tree,
5556 on the assumption that any target code selection value is as
5557 likely as any other.
5559 The transformation is performed by splitting the ordered
5560 list into two equal sections plus a pivot. The parts are
5561 then attached to the pivot as left and right branches. Each
5562 branch is then transformed recursively. */
5564 static void
5565 balance_case_nodes (head, parent)
5566 case_node_ptr *head;
5567 case_node_ptr parent;
5569 register case_node_ptr np;
5571 np = *head;
5572 if (np)
5574 int cost = 0;
5575 int i = 0;
5576 int ranges = 0;
5577 register case_node_ptr *npp;
5578 case_node_ptr left;
5580 /* Count the number of entries on branch. Also count the ranges. */
5582 while (np)
5584 if (!tree_int_cst_equal (np->low, np->high))
5586 ranges++;
5587 if (use_cost_table)
5588 cost += cost_table[TREE_INT_CST_LOW (np->high)];
5591 if (use_cost_table)
5592 cost += cost_table[TREE_INT_CST_LOW (np->low)];
5594 i++;
5595 np = np->right;
5598 if (i > 2)
5600 /* Split this list if it is long enough for that to help. */
5601 npp = head;
5602 left = *npp;
5603 if (use_cost_table)
5605 /* Find the place in the list that bisects the list's total cost,
5606 Here I gets half the total cost. */
5607 int n_moved = 0;
5608 i = (cost + 1) / 2;
5609 while (1)
5611 /* Skip nodes while their cost does not reach that amount. */
5612 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5613 i -= cost_table[TREE_INT_CST_LOW ((*npp)->high)];
5614 i -= cost_table[TREE_INT_CST_LOW ((*npp)->low)];
5615 if (i <= 0)
5616 break;
5617 npp = &(*npp)->right;
5618 n_moved += 1;
5620 if (n_moved == 0)
5622 /* Leave this branch lopsided, but optimize left-hand
5623 side and fill in `parent' fields for right-hand side. */
5624 np = *head;
5625 np->parent = parent;
5626 balance_case_nodes (&np->left, np);
5627 for (; np->right; np = np->right)
5628 np->right->parent = np;
5629 return;
5632 /* If there are just three nodes, split at the middle one. */
5633 else if (i == 3)
5634 npp = &(*npp)->right;
5635 else
5637 /* Find the place in the list that bisects the list's total cost,
5638 where ranges count as 2.
5639 Here I gets half the total cost. */
5640 i = (i + ranges + 1) / 2;
5641 while (1)
5643 /* Skip nodes while their cost does not reach that amount. */
5644 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5645 i--;
5646 i--;
5647 if (i <= 0)
5648 break;
5649 npp = &(*npp)->right;
5652 *head = np = *npp;
5653 *npp = 0;
5654 np->parent = parent;
5655 np->left = left;
5657 /* Optimize each of the two split parts. */
5658 balance_case_nodes (&np->left, np);
5659 balance_case_nodes (&np->right, np);
5661 else
5663 /* Else leave this branch as one level,
5664 but fill in `parent' fields. */
5665 np = *head;
5666 np->parent = parent;
5667 for (; np->right; np = np->right)
5668 np->right->parent = np;
5673 /* Search the parent sections of the case node tree
5674 to see if a test for the lower bound of NODE would be redundant.
5675 INDEX_TYPE is the type of the index expression.
5677 The instructions to generate the case decision tree are
5678 output in the same order as nodes are processed so it is
5679 known that if a parent node checks the range of the current
5680 node minus one that the current node is bounded at its lower
5681 span. Thus the test would be redundant. */
5683 static int
5684 node_has_low_bound (node, index_type)
5685 case_node_ptr node;
5686 tree index_type;
5688 tree low_minus_one;
5689 case_node_ptr pnode;
5691 /* If the lower bound of this node is the lowest value in the index type,
5692 we need not test it. */
5694 if (tree_int_cst_equal (node->low, TYPE_MIN_VALUE (index_type)))
5695 return 1;
5697 /* If this node has a left branch, the value at the left must be less
5698 than that at this node, so it cannot be bounded at the bottom and
5699 we need not bother testing any further. */
5701 if (node->left)
5702 return 0;
5704 low_minus_one = fold (build (MINUS_EXPR, TREE_TYPE (node->low),
5705 node->low, integer_one_node));
5707 /* If the subtraction above overflowed, we can't verify anything.
5708 Otherwise, look for a parent that tests our value - 1. */
5710 if (! tree_int_cst_lt (low_minus_one, node->low))
5711 return 0;
5713 for (pnode = node->parent; pnode; pnode = pnode->parent)
5714 if (tree_int_cst_equal (low_minus_one, pnode->high))
5715 return 1;
5717 return 0;
5720 /* Search the parent sections of the case node tree
5721 to see if a test for the upper bound of NODE would be redundant.
5722 INDEX_TYPE is the type of the index expression.
5724 The instructions to generate the case decision tree are
5725 output in the same order as nodes are processed so it is
5726 known that if a parent node checks the range of the current
5727 node plus one that the current node is bounded at its upper
5728 span. Thus the test would be redundant. */
5730 static int
5731 node_has_high_bound (node, index_type)
5732 case_node_ptr node;
5733 tree index_type;
5735 tree high_plus_one;
5736 case_node_ptr pnode;
5738 /* If there is no upper bound, obviously no test is needed. */
5740 if (TYPE_MAX_VALUE (index_type) == NULL)
5741 return 1;
5743 /* If the upper bound of this node is the highest value in the type
5744 of the index expression, we need not test against it. */
5746 if (tree_int_cst_equal (node->high, TYPE_MAX_VALUE (index_type)))
5747 return 1;
5749 /* If this node has a right branch, the value at the right must be greater
5750 than that at this node, so it cannot be bounded at the top and
5751 we need not bother testing any further. */
5753 if (node->right)
5754 return 0;
5756 high_plus_one = fold (build (PLUS_EXPR, TREE_TYPE (node->high),
5757 node->high, integer_one_node));
5759 /* If the addition above overflowed, we can't verify anything.
5760 Otherwise, look for a parent that tests our value + 1. */
5762 if (! tree_int_cst_lt (node->high, high_plus_one))
5763 return 0;
5765 for (pnode = node->parent; pnode; pnode = pnode->parent)
5766 if (tree_int_cst_equal (high_plus_one, pnode->low))
5767 return 1;
5769 return 0;
5772 /* Search the parent sections of the
5773 case node tree to see if both tests for the upper and lower
5774 bounds of NODE would be redundant. */
5776 static int
5777 node_is_bounded (node, index_type)
5778 case_node_ptr node;
5779 tree index_type;
5781 return (node_has_low_bound (node, index_type)
5782 && node_has_high_bound (node, index_type));
5785 /* Emit an unconditional jump to LABEL unless it would be dead code. */
5787 static void
5788 emit_jump_if_reachable (label)
5789 rtx label;
5791 if (GET_CODE (get_last_insn ()) != BARRIER)
5792 emit_jump (label);
5795 /* Emit step-by-step code to select a case for the value of INDEX.
5796 The thus generated decision tree follows the form of the
5797 case-node binary tree NODE, whose nodes represent test conditions.
5798 INDEX_TYPE is the type of the index of the switch.
5800 Care is taken to prune redundant tests from the decision tree
5801 by detecting any boundary conditions already checked by
5802 emitted rtx. (See node_has_high_bound, node_has_low_bound
5803 and node_is_bounded, above.)
5805 Where the test conditions can be shown to be redundant we emit
5806 an unconditional jump to the target code. As a further
5807 optimization, the subordinates of a tree node are examined to
5808 check for bounded nodes. In this case conditional and/or
5809 unconditional jumps as a result of the boundary check for the
5810 current node are arranged to target the subordinates associated
5811 code for out of bound conditions on the current node.
5813 We can assume that when control reaches the code generated here,
5814 the index value has already been compared with the parents
5815 of this node, and determined to be on the same side of each parent
5816 as this node is. Thus, if this node tests for the value 51,
5817 and a parent tested for 52, we don't need to consider
5818 the possibility of a value greater than 51. If another parent
5819 tests for the value 50, then this node need not test anything. */
5821 static void
5822 emit_case_nodes (index, node, default_label, index_type)
5823 rtx index;
5824 case_node_ptr node;
5825 rtx default_label;
5826 tree index_type;
5828 /* If INDEX has an unsigned type, we must make unsigned branches. */
5829 int unsignedp = TREE_UNSIGNED (index_type);
5830 typedef rtx rtx_fn ();
5831 enum machine_mode mode = GET_MODE (index);
5833 /* See if our parents have already tested everything for us.
5834 If they have, emit an unconditional jump for this node. */
5835 if (node_is_bounded (node, index_type))
5836 emit_jump (label_rtx (node->code_label));
5838 else if (tree_int_cst_equal (node->low, node->high))
5840 /* Node is single valued. First see if the index expression matches
5841 this node and then check our children, if any. */
5843 do_jump_if_equal (index, expand_expr (node->low, NULL_RTX, VOIDmode, 0),
5844 label_rtx (node->code_label), unsignedp);
5846 if (node->right != 0 && node->left != 0)
5848 /* This node has children on both sides.
5849 Dispatch to one side or the other
5850 by comparing the index value with this node's value.
5851 If one subtree is bounded, check that one first,
5852 so we can avoid real branches in the tree. */
5854 if (node_is_bounded (node->right, index_type))
5856 emit_cmp_and_jump_insns (index, expand_expr (node->high, NULL_RTX,
5857 VOIDmode, 0),
5858 GT, NULL_RTX, mode, unsignedp, 0,
5859 label_rtx (node->right->code_label));
5860 emit_case_nodes (index, node->left, default_label, index_type);
5863 else if (node_is_bounded (node->left, index_type))
5865 emit_cmp_and_jump_insns (index, expand_expr (node->high, NULL_RTX,
5866 VOIDmode, 0),
5867 LT, NULL_RTX, mode, unsignedp, 0,
5868 label_rtx (node->left->code_label));
5869 emit_case_nodes (index, node->right, default_label, index_type);
5872 else
5874 /* Neither node is bounded. First distinguish the two sides;
5875 then emit the code for one side at a time. */
5877 tree test_label
5878 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
5880 /* See if the value is on the right. */
5881 emit_cmp_and_jump_insns (index, expand_expr (node->high, NULL_RTX,
5882 VOIDmode, 0),
5883 GT, NULL_RTX, mode, unsignedp, 0,
5884 label_rtx (test_label));
5886 /* Value must be on the left.
5887 Handle the left-hand subtree. */
5888 emit_case_nodes (index, node->left, default_label, index_type);
5889 /* If left-hand subtree does nothing,
5890 go to default. */
5891 emit_jump_if_reachable (default_label);
5893 /* Code branches here for the right-hand subtree. */
5894 expand_label (test_label);
5895 emit_case_nodes (index, node->right, default_label, index_type);
5899 else if (node->right != 0 && node->left == 0)
5901 /* Here we have a right child but no left so we issue conditional
5902 branch to default and process the right child.
5904 Omit the conditional branch to default if we it avoid only one
5905 right child; it costs too much space to save so little time. */
5907 if (node->right->right || node->right->left
5908 || !tree_int_cst_equal (node->right->low, node->right->high))
5910 if (!node_has_low_bound (node, index_type))
5912 emit_cmp_and_jump_insns (index, expand_expr (node->high,
5913 NULL_RTX,
5914 VOIDmode, 0),
5915 LT, NULL_RTX, mode, unsignedp, 0,
5916 default_label);
5919 emit_case_nodes (index, node->right, default_label, index_type);
5921 else
5922 /* We cannot process node->right normally
5923 since we haven't ruled out the numbers less than
5924 this node's value. So handle node->right explicitly. */
5925 do_jump_if_equal (index,
5926 expand_expr (node->right->low, NULL_RTX,
5927 VOIDmode, 0),
5928 label_rtx (node->right->code_label), unsignedp);
5931 else if (node->right == 0 && node->left != 0)
5933 /* Just one subtree, on the left. */
5935 #if 0 /* The following code and comment were formerly part
5936 of the condition here, but they didn't work
5937 and I don't understand what the idea was. -- rms. */
5938 /* If our "most probable entry" is less probable
5939 than the default label, emit a jump to
5940 the default label using condition codes
5941 already lying around. With no right branch,
5942 a branch-greater-than will get us to the default
5943 label correctly. */
5944 if (use_cost_table
5945 && cost_table[TREE_INT_CST_LOW (node->high)] < 12)
5947 #endif /* 0 */
5948 if (node->left->left || node->left->right
5949 || !tree_int_cst_equal (node->left->low, node->left->high))
5951 if (!node_has_high_bound (node, index_type))
5953 emit_cmp_and_jump_insns (index, expand_expr (node->high,
5954 NULL_RTX,
5955 VOIDmode, 0),
5956 GT, NULL_RTX, mode, unsignedp, 0,
5957 default_label);
5960 emit_case_nodes (index, node->left, default_label, index_type);
5962 else
5963 /* We cannot process node->left normally
5964 since we haven't ruled out the numbers less than
5965 this node's value. So handle node->left explicitly. */
5966 do_jump_if_equal (index,
5967 expand_expr (node->left->low, NULL_RTX,
5968 VOIDmode, 0),
5969 label_rtx (node->left->code_label), unsignedp);
5972 else
5974 /* Node is a range. These cases are very similar to those for a single
5975 value, except that we do not start by testing whether this node
5976 is the one to branch to. */
5978 if (node->right != 0 && node->left != 0)
5980 /* Node has subtrees on both sides.
5981 If the right-hand subtree is bounded,
5982 test for it first, since we can go straight there.
5983 Otherwise, we need to make a branch in the control structure,
5984 then handle the two subtrees. */
5985 tree test_label = 0;
5988 if (node_is_bounded (node->right, index_type))
5989 /* Right hand node is fully bounded so we can eliminate any
5990 testing and branch directly to the target code. */
5991 emit_cmp_and_jump_insns (index, expand_expr (node->high, NULL_RTX,
5992 VOIDmode, 0),
5993 GT, NULL_RTX, mode, unsignedp, 0,
5994 label_rtx (node->right->code_label));
5995 else
5997 /* Right hand node requires testing.
5998 Branch to a label where we will handle it later. */
6000 test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
6001 emit_cmp_and_jump_insns (index, expand_expr (node->high, NULL_RTX,
6002 VOIDmode, 0),
6003 GT, NULL_RTX, mode, unsignedp, 0,
6004 label_rtx (test_label));
6007 /* Value belongs to this node or to the left-hand subtree. */
6009 emit_cmp_and_jump_insns (index, expand_expr (node->low, NULL_RTX,
6010 VOIDmode, 0),
6011 GE, NULL_RTX, mode, unsignedp, 0,
6012 label_rtx (node->code_label));
6014 /* Handle the left-hand subtree. */
6015 emit_case_nodes (index, node->left, default_label, index_type);
6017 /* If right node had to be handled later, do that now. */
6019 if (test_label)
6021 /* If the left-hand subtree fell through,
6022 don't let it fall into the right-hand subtree. */
6023 emit_jump_if_reachable (default_label);
6025 expand_label (test_label);
6026 emit_case_nodes (index, node->right, default_label, index_type);
6030 else if (node->right != 0 && node->left == 0)
6032 /* Deal with values to the left of this node,
6033 if they are possible. */
6034 if (!node_has_low_bound (node, index_type))
6036 emit_cmp_and_jump_insns (index, expand_expr (node->low, NULL_RTX,
6037 VOIDmode, 0),
6038 LT, NULL_RTX, mode, unsignedp, 0,
6039 default_label);
6042 /* Value belongs to this node or to the right-hand subtree. */
6044 emit_cmp_and_jump_insns (index, expand_expr (node->high, NULL_RTX,
6045 VOIDmode, 0),
6046 LE, NULL_RTX, mode, unsignedp, 0,
6047 label_rtx (node->code_label));
6049 emit_case_nodes (index, node->right, default_label, index_type);
6052 else if (node->right == 0 && node->left != 0)
6054 /* Deal with values to the right of this node,
6055 if they are possible. */
6056 if (!node_has_high_bound (node, index_type))
6058 emit_cmp_and_jump_insns (index, expand_expr (node->high, NULL_RTX,
6059 VOIDmode, 0),
6060 GT, NULL_RTX, mode, unsignedp, 0,
6061 default_label);
6064 /* Value belongs to this node or to the left-hand subtree. */
6066 emit_cmp_and_jump_insns (index, expand_expr (node->low, NULL_RTX,
6067 VOIDmode, 0),
6068 GE, NULL_RTX, mode, unsignedp, 0,
6069 label_rtx (node->code_label));
6071 emit_case_nodes (index, node->left, default_label, index_type);
6074 else
6076 /* Node has no children so we check low and high bounds to remove
6077 redundant tests. Only one of the bounds can exist,
6078 since otherwise this node is bounded--a case tested already. */
6080 if (!node_has_high_bound (node, index_type))
6082 emit_cmp_and_jump_insns (index, expand_expr (node->high, NULL_RTX,
6083 VOIDmode, 0),
6084 GT, NULL_RTX, mode, unsignedp, 0,
6085 default_label);
6088 if (!node_has_low_bound (node, index_type))
6090 emit_cmp_and_jump_insns (index, expand_expr (node->low, NULL_RTX,
6091 VOIDmode, 0),
6092 LT, NULL_RTX, mode, unsignedp, 0,
6093 default_label);
6096 emit_jump (label_rtx (node->code_label));
6101 /* These routines are used by the loop unrolling code. They copy BLOCK trees
6102 so that the debugging info will be correct for the unrolled loop. */
6104 /* Indexed by block number, contains a pointer to the N'th block node.
6106 Allocated by the call to identify_blocks, then released after the call
6107 to reorder_blocks in the function unroll_block_trees. */
6109 static tree *block_vector;
6111 void
6112 find_loop_tree_blocks ()
6114 tree block = DECL_INITIAL (current_function_decl);
6116 block_vector = identify_blocks (block, get_insns ());
6119 void
6120 unroll_block_trees ()
6122 tree block = DECL_INITIAL (current_function_decl);
6124 reorder_blocks (block_vector, block, get_insns ());
6126 /* Release any memory allocated by identify_blocks. */
6127 if (block_vector)
6128 free (block_vector);