* linux_threads.c: Don't reference __pthread_initial_thread_bos.
[official-gcc.git] / gcc / stmt.c
blob41bc736681d0b23206c361c8dabfd072087ab674
1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 88, 89, 92-98, 1999 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
22 /* This file handles the generation of rtl code from tree structure
23 above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
24 It also creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
27 The functions whose names start with `expand_' are called by the
28 parser to generate RTL instructions for various kinds of constructs.
30 Some control and binding constructs require calling several such
31 functions at different times. For example, a simple if-then
32 is expanded by calling `expand_start_cond' (with the condition-expression
33 as argument) before parsing the then-clause and calling `expand_end_cond'
34 after parsing the then-clause. */
36 #include "config.h"
37 #include "system.h"
39 #include "rtl.h"
40 #include "tree.h"
41 #include "flags.h"
42 #include "except.h"
43 #include "function.h"
44 #include "insn-flags.h"
45 #include "insn-config.h"
46 #include "insn-codes.h"
47 #include "expr.h"
48 #include "hard-reg-set.h"
49 #include "obstack.h"
50 #include "loop.h"
51 #include "recog.h"
52 #include "machmode.h"
53 #include "toplev.h"
54 #include "output.h"
56 #define obstack_chunk_alloc xmalloc
57 #define obstack_chunk_free free
58 struct obstack stmt_obstack;
60 /* Assume that case vectors are not pc-relative. */
61 #ifndef CASE_VECTOR_PC_RELATIVE
62 #define CASE_VECTOR_PC_RELATIVE 0
63 #endif
65 /* Filename and line number of last line-number note,
66 whether we actually emitted it or not. */
67 char *emit_filename;
68 int emit_lineno;
70 /* Nonzero if within a ({...}) grouping, in which case we must
71 always compute a value for each expr-stmt in case it is the last one. */
73 int expr_stmts_for_value;
75 /* Each time we expand an expression-statement,
76 record the expr's type and its RTL value here. */
78 static tree last_expr_type;
79 static rtx last_expr_value;
81 /* Each time we expand the end of a binding contour (in `expand_end_bindings')
82 and we emit a new NOTE_INSN_BLOCK_END note, we save a pointer to it here.
83 This is used by the `remember_end_note' function to record the endpoint
84 of each generated block in its associated BLOCK node. */
86 static rtx last_block_end_note;
88 /* Number of binding contours started so far in this function. */
90 int block_start_count;
92 /* Nonzero if function being compiled needs to
93 return the address of where it has put a structure value. */
95 extern int current_function_returns_pcc_struct;
97 /* Label that will go on parm cleanup code, if any.
98 Jumping to this label runs cleanup code for parameters, if
99 such code must be run. Following this code is the logical return label. */
101 extern rtx cleanup_label;
103 /* Label that will go on function epilogue.
104 Jumping to this label serves as a "return" instruction
105 on machines which require execution of the epilogue on all returns. */
107 extern rtx return_label;
109 /* Offset to end of allocated area of stack frame.
110 If stack grows down, this is the address of the last stack slot allocated.
111 If stack grows up, this is the address for the next slot. */
112 extern int frame_offset;
114 /* Label to jump back to for tail recursion, or 0 if we have
115 not yet needed one for this function. */
116 extern rtx tail_recursion_label;
118 /* Place after which to insert the tail_recursion_label if we need one. */
119 extern rtx tail_recursion_reentry;
121 /* Location at which to save the argument pointer if it will need to be
122 referenced. There are two cases where this is done: if nonlocal gotos
123 exist, or if vars whose is an offset from the argument pointer will be
124 needed by inner routines. */
126 extern rtx arg_pointer_save_area;
128 /* Chain of all RTL_EXPRs that have insns in them. */
129 extern tree rtl_expr_chain;
131 /* Functions and data structures for expanding case statements. */
133 /* Case label structure, used to hold info on labels within case
134 statements. We handle "range" labels; for a single-value label
135 as in C, the high and low limits are the same.
137 An AVL tree of case nodes is initially created, and later transformed
138 to a list linked via the RIGHT fields in the nodes. Nodes with
139 higher case values are later in the list.
141 Switch statements can be output in one of two forms. A branch table
142 is used if there are more than a few labels and the labels are dense
143 within the range between the smallest and largest case value. If a
144 branch table is used, no further manipulations are done with the case
145 node chain.
147 The alternative to the use of a branch table is to generate a series
148 of compare and jump insns. When that is done, we use the LEFT, RIGHT,
149 and PARENT fields to hold a binary tree. Initially the tree is
150 totally unbalanced, with everything on the right. We balance the tree
151 with nodes on the left having lower case values than the parent
152 and nodes on the right having higher values. We then output the tree
153 in order. */
155 struct case_node
157 struct case_node *left; /* Left son in binary tree */
158 struct case_node *right; /* Right son in binary tree; also node chain */
159 struct case_node *parent; /* Parent of node in binary tree */
160 tree low; /* Lowest index value for this label */
161 tree high; /* Highest index value for this label */
162 tree code_label; /* Label to jump to when node matches */
163 int balance;
166 typedef struct case_node case_node;
167 typedef struct case_node *case_node_ptr;
169 /* These are used by estimate_case_costs and balance_case_nodes. */
171 /* This must be a signed type, and non-ANSI compilers lack signed char. */
172 static short *cost_table;
173 static int use_cost_table;
175 /* Stack of control and binding constructs we are currently inside.
177 These constructs begin when you call `expand_start_WHATEVER'
178 and end when you call `expand_end_WHATEVER'. This stack records
179 info about how the construct began that tells the end-function
180 what to do. It also may provide information about the construct
181 to alter the behavior of other constructs within the body.
182 For example, they may affect the behavior of C `break' and `continue'.
184 Each construct gets one `struct nesting' object.
185 All of these objects are chained through the `all' field.
186 `nesting_stack' points to the first object (innermost construct).
187 The position of an entry on `nesting_stack' is in its `depth' field.
189 Each type of construct has its own individual stack.
190 For example, loops have `loop_stack'. Each object points to the
191 next object of the same type through the `next' field.
193 Some constructs are visible to `break' exit-statements and others
194 are not. Which constructs are visible depends on the language.
195 Therefore, the data structure allows each construct to be visible
196 or not, according to the args given when the construct is started.
197 The construct is visible if the `exit_label' field is non-null.
198 In that case, the value should be a CODE_LABEL rtx. */
200 struct nesting
202 struct nesting *all;
203 struct nesting *next;
204 int depth;
205 rtx exit_label;
206 union
208 /* For conds (if-then and if-then-else statements). */
209 struct
211 /* Label for the end of the if construct.
212 There is none if EXITFLAG was not set
213 and no `else' has been seen yet. */
214 rtx endif_label;
215 /* Label for the end of this alternative.
216 This may be the end of the if or the next else/elseif. */
217 rtx next_label;
218 } cond;
219 /* For loops. */
220 struct
222 /* Label at the top of the loop; place to loop back to. */
223 rtx start_label;
224 /* Label at the end of the whole construct. */
225 rtx end_label;
226 /* Label before a jump that branches to the end of the whole
227 construct. This is where destructors go if any. */
228 rtx alt_end_label;
229 /* Label for `continue' statement to jump to;
230 this is in front of the stepper of the loop. */
231 rtx continue_label;
232 } loop;
233 /* For variable binding contours. */
234 struct
236 /* Sequence number of this binding contour within the function,
237 in order of entry. */
238 int block_start_count;
239 /* Nonzero => value to restore stack to on exit. */
240 rtx stack_level;
241 /* The NOTE that starts this contour.
242 Used by expand_goto to check whether the destination
243 is within each contour or not. */
244 rtx first_insn;
245 /* Innermost containing binding contour that has a stack level. */
246 struct nesting *innermost_stack_block;
247 /* List of cleanups to be run on exit from this contour.
248 This is a list of expressions to be evaluated.
249 The TREE_PURPOSE of each link is the ..._DECL node
250 which the cleanup pertains to. */
251 tree cleanups;
252 /* List of cleanup-lists of blocks containing this block,
253 as they were at the locus where this block appears.
254 There is an element for each containing block,
255 ordered innermost containing block first.
256 The tail of this list can be 0,
257 if all remaining elements would be empty lists.
258 The element's TREE_VALUE is the cleanup-list of that block,
259 which may be null. */
260 tree outer_cleanups;
261 /* Chain of labels defined inside this binding contour.
262 For contours that have stack levels or cleanups. */
263 struct label_chain *label_chain;
264 /* Number of function calls seen, as of start of this block. */
265 int function_call_count;
266 /* Nonzero if this is associated with a EH region. */
267 int exception_region;
268 /* The saved target_temp_slot_level from our outer block.
269 We may reset target_temp_slot_level to be the level of
270 this block, if that is done, target_temp_slot_level
271 reverts to the saved target_temp_slot_level at the very
272 end of the block. */
273 int target_temp_slot_level;
274 /* True if we are currently emitting insns in an area of
275 output code that is controlled by a conditional
276 expression. This is used by the cleanup handling code to
277 generate conditional cleanup actions. */
278 int conditional_code;
279 /* A place to move the start of the exception region for any
280 of the conditional cleanups, must be at the end or after
281 the start of the last unconditional cleanup, and before any
282 conditional branch points. */
283 rtx last_unconditional_cleanup;
284 /* When in a conditional context, this is the specific
285 cleanup list associated with last_unconditional_cleanup,
286 where we place the conditionalized cleanups. */
287 tree *cleanup_ptr;
288 } block;
289 /* For switch (C) or case (Pascal) statements,
290 and also for dummies (see `expand_start_case_dummy'). */
291 struct
293 /* The insn after which the case dispatch should finally
294 be emitted. Zero for a dummy. */
295 rtx start;
296 /* A list of case labels; it is first built as an AVL tree.
297 During expand_end_case, this is converted to a list, and may be
298 rearranged into a nearly balanced binary tree. */
299 struct case_node *case_list;
300 /* Label to jump to if no case matches. */
301 tree default_label;
302 /* The expression to be dispatched on. */
303 tree index_expr;
304 /* Type that INDEX_EXPR should be converted to. */
305 tree nominal_type;
306 /* Number of range exprs in case statement. */
307 int num_ranges;
308 /* Name of this kind of statement, for warnings. */
309 const char *printname;
310 /* Used to save no_line_numbers till we see the first case label.
311 We set this to -1 when we see the first case label in this
312 case statement. */
313 int line_number_status;
314 } case_stmt;
315 } data;
318 /* Chain of all pending binding contours. */
319 struct nesting *block_stack;
321 /* If any new stacks are added here, add them to POPSTACKS too. */
323 /* Chain of all pending binding contours that restore stack levels
324 or have cleanups. */
325 struct nesting *stack_block_stack;
327 /* Chain of all pending conditional statements. */
328 struct nesting *cond_stack;
330 /* Chain of all pending loops. */
331 struct nesting *loop_stack;
333 /* Chain of all pending case or switch statements. */
334 struct nesting *case_stack;
336 /* Separate chain including all of the above,
337 chained through the `all' field. */
338 struct nesting *nesting_stack;
340 /* Number of entries on nesting_stack now. */
341 int nesting_depth;
343 /* Allocate and return a new `struct nesting'. */
345 #define ALLOC_NESTING() \
346 (struct nesting *) obstack_alloc (&stmt_obstack, sizeof (struct nesting))
348 /* Pop the nesting stack element by element until we pop off
349 the element which is at the top of STACK.
350 Update all the other stacks, popping off elements from them
351 as we pop them from nesting_stack. */
353 #define POPSTACK(STACK) \
354 do { struct nesting *target = STACK; \
355 struct nesting *this; \
356 do { this = nesting_stack; \
357 if (loop_stack == this) \
358 loop_stack = loop_stack->next; \
359 if (cond_stack == this) \
360 cond_stack = cond_stack->next; \
361 if (block_stack == this) \
362 block_stack = block_stack->next; \
363 if (stack_block_stack == this) \
364 stack_block_stack = stack_block_stack->next; \
365 if (case_stack == this) \
366 case_stack = case_stack->next; \
367 nesting_depth = nesting_stack->depth - 1; \
368 nesting_stack = this->all; \
369 obstack_free (&stmt_obstack, this); } \
370 while (this != target); } while (0)
372 /* In some cases it is impossible to generate code for a forward goto
373 until the label definition is seen. This happens when it may be necessary
374 for the goto to reset the stack pointer: we don't yet know how to do that.
375 So expand_goto puts an entry on this fixup list.
376 Each time a binding contour that resets the stack is exited,
377 we check each fixup.
378 If the target label has now been defined, we can insert the proper code. */
380 struct goto_fixup
382 /* Points to following fixup. */
383 struct goto_fixup *next;
384 /* Points to the insn before the jump insn.
385 If more code must be inserted, it goes after this insn. */
386 rtx before_jump;
387 /* The LABEL_DECL that this jump is jumping to, or 0
388 for break, continue or return. */
389 tree target;
390 /* The BLOCK for the place where this goto was found. */
391 tree context;
392 /* The CODE_LABEL rtx that this is jumping to. */
393 rtx target_rtl;
394 /* Number of binding contours started in current function
395 before the label reference. */
396 int block_start_count;
397 /* The outermost stack level that should be restored for this jump.
398 Each time a binding contour that resets the stack is exited,
399 if the target label is *not* yet defined, this slot is updated. */
400 rtx stack_level;
401 /* List of lists of cleanup expressions to be run by this goto.
402 There is one element for each block that this goto is within.
403 The tail of this list can be 0,
404 if all remaining elements would be empty.
405 The TREE_VALUE contains the cleanup list of that block as of the
406 time this goto was seen.
407 The TREE_ADDRESSABLE flag is 1 for a block that has been exited. */
408 tree cleanup_list_list;
411 static struct goto_fixup *goto_fixup_chain;
413 /* Within any binding contour that must restore a stack level,
414 all labels are recorded with a chain of these structures. */
416 struct label_chain
418 /* Points to following fixup. */
419 struct label_chain *next;
420 tree label;
424 /* Non-zero if we are using EH to handle cleanus. */
425 static int using_eh_for_cleanups_p = 0;
428 static int n_occurrences PROTO((int, const char *));
429 static void expand_goto_internal PROTO((tree, rtx, rtx));
430 static int expand_fixup PROTO((tree, rtx, rtx));
431 static rtx expand_nl_handler_label PROTO((rtx, rtx));
432 static void expand_nl_goto_receiver PROTO((void));
433 static void expand_nl_goto_receivers PROTO((struct nesting *));
434 static void fixup_gotos PROTO((struct nesting *, rtx, tree,
435 rtx, int));
436 static void expand_null_return_1 PROTO((rtx, int));
437 static void expand_value_return PROTO((rtx));
438 static int tail_recursion_args PROTO((tree, tree));
439 static void expand_cleanups PROTO((tree, tree, int, int));
440 static void check_seenlabel PROTO((void));
441 static void do_jump_if_equal PROTO((rtx, rtx, rtx, int));
442 static int estimate_case_costs PROTO((case_node_ptr));
443 static void group_case_nodes PROTO((case_node_ptr));
444 static void balance_case_nodes PROTO((case_node_ptr *,
445 case_node_ptr));
446 static int node_has_low_bound PROTO((case_node_ptr, tree));
447 static int node_has_high_bound PROTO((case_node_ptr, tree));
448 static int node_is_bounded PROTO((case_node_ptr, tree));
449 static void emit_jump_if_reachable PROTO((rtx));
450 static void emit_case_nodes PROTO((rtx, case_node_ptr, rtx, tree));
451 static int add_case_node PROTO((tree, tree, tree, tree *));
452 static struct case_node *case_tree2list PROTO((case_node *, case_node *));
454 void
455 using_eh_for_cleanups ()
457 using_eh_for_cleanups_p = 1;
460 void
461 init_stmt ()
463 gcc_obstack_init (&stmt_obstack);
464 init_eh ();
467 void
468 init_stmt_for_function ()
470 /* We are not currently within any block, conditional, loop or case. */
471 block_stack = 0;
472 stack_block_stack = 0;
473 loop_stack = 0;
474 case_stack = 0;
475 cond_stack = 0;
476 nesting_stack = 0;
477 nesting_depth = 0;
479 block_start_count = 0;
481 /* No gotos have been expanded yet. */
482 goto_fixup_chain = 0;
484 /* We are not processing a ({...}) grouping. */
485 expr_stmts_for_value = 0;
486 last_expr_type = 0;
488 init_eh_for_function ();
491 void
492 save_stmt_status (p)
493 struct function *p;
495 p->block_stack = block_stack;
496 p->stack_block_stack = stack_block_stack;
497 p->cond_stack = cond_stack;
498 p->loop_stack = loop_stack;
499 p->case_stack = case_stack;
500 p->nesting_stack = nesting_stack;
501 p->nesting_depth = nesting_depth;
502 p->block_start_count = block_start_count;
503 p->last_expr_type = last_expr_type;
504 p->last_expr_value = last_expr_value;
505 p->expr_stmts_for_value = expr_stmts_for_value;
506 p->emit_filename = emit_filename;
507 p->emit_lineno = emit_lineno;
508 p->goto_fixup_chain = goto_fixup_chain;
509 save_eh_status (p);
512 void
513 restore_stmt_status (p)
514 struct function *p;
516 block_stack = p->block_stack;
517 stack_block_stack = p->stack_block_stack;
518 cond_stack = p->cond_stack;
519 loop_stack = p->loop_stack;
520 case_stack = p->case_stack;
521 nesting_stack = p->nesting_stack;
522 nesting_depth = p->nesting_depth;
523 block_start_count = p->block_start_count;
524 last_expr_type = p->last_expr_type;
525 last_expr_value = p->last_expr_value;
526 expr_stmts_for_value = p->expr_stmts_for_value;
527 emit_filename = p->emit_filename;
528 emit_lineno = p->emit_lineno;
529 goto_fixup_chain = p->goto_fixup_chain;
530 restore_eh_status (p);
533 /* Emit a no-op instruction. */
535 void
536 emit_nop ()
538 rtx last_insn;
540 last_insn = get_last_insn ();
541 if (!optimize
542 && (GET_CODE (last_insn) == CODE_LABEL
543 || (GET_CODE (last_insn) == NOTE
544 && prev_real_insn (last_insn) == 0)))
545 emit_insn (gen_nop ());
548 /* Return the rtx-label that corresponds to a LABEL_DECL,
549 creating it if necessary. */
552 label_rtx (label)
553 tree label;
555 if (TREE_CODE (label) != LABEL_DECL)
556 abort ();
558 if (DECL_RTL (label))
559 return DECL_RTL (label);
561 return DECL_RTL (label) = gen_label_rtx ();
564 /* Add an unconditional jump to LABEL as the next sequential instruction. */
566 void
567 emit_jump (label)
568 rtx label;
570 do_pending_stack_adjust ();
571 emit_jump_insn (gen_jump (label));
572 emit_barrier ();
575 /* Emit code to jump to the address
576 specified by the pointer expression EXP. */
578 void
579 expand_computed_goto (exp)
580 tree exp;
582 rtx x = expand_expr (exp, NULL_RTX, VOIDmode, 0);
584 #ifdef POINTERS_EXTEND_UNSIGNED
585 x = convert_memory_address (Pmode, x);
586 #endif
588 emit_queue ();
589 /* Be sure the function is executable. */
590 if (current_function_check_memory_usage)
591 emit_library_call (chkr_check_exec_libfunc, 1,
592 VOIDmode, 1, x, ptr_mode);
594 do_pending_stack_adjust ();
595 emit_indirect_jump (x);
597 current_function_has_computed_jump = 1;
600 /* Handle goto statements and the labels that they can go to. */
602 /* Specify the location in the RTL code of a label LABEL,
603 which is a LABEL_DECL tree node.
605 This is used for the kind of label that the user can jump to with a
606 goto statement, and for alternatives of a switch or case statement.
607 RTL labels generated for loops and conditionals don't go through here;
608 they are generated directly at the RTL level, by other functions below.
610 Note that this has nothing to do with defining label *names*.
611 Languages vary in how they do that and what that even means. */
613 void
614 expand_label (label)
615 tree label;
617 struct label_chain *p;
619 do_pending_stack_adjust ();
620 emit_label (label_rtx (label));
621 if (DECL_NAME (label))
622 LABEL_NAME (DECL_RTL (label)) = IDENTIFIER_POINTER (DECL_NAME (label));
624 if (stack_block_stack != 0)
626 p = (struct label_chain *) oballoc (sizeof (struct label_chain));
627 p->next = stack_block_stack->data.block.label_chain;
628 stack_block_stack->data.block.label_chain = p;
629 p->label = label;
633 /* Declare that LABEL (a LABEL_DECL) may be used for nonlocal gotos
634 from nested functions. */
636 void
637 declare_nonlocal_label (label)
638 tree label;
640 rtx slot = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
642 nonlocal_labels = tree_cons (NULL_TREE, label, nonlocal_labels);
643 LABEL_PRESERVE_P (label_rtx (label)) = 1;
644 if (nonlocal_goto_handler_slots == 0)
646 emit_stack_save (SAVE_NONLOCAL,
647 &nonlocal_goto_stack_level,
648 PREV_INSN (tail_recursion_reentry));
650 nonlocal_goto_handler_slots
651 = gen_rtx_EXPR_LIST (VOIDmode, slot, nonlocal_goto_handler_slots);
654 /* Generate RTL code for a `goto' statement with target label LABEL.
655 LABEL should be a LABEL_DECL tree node that was or will later be
656 defined with `expand_label'. */
658 void
659 expand_goto (label)
660 tree label;
662 tree context;
664 /* Check for a nonlocal goto to a containing function. */
665 context = decl_function_context (label);
666 if (context != 0 && context != current_function_decl)
668 struct function *p = find_function_data (context);
669 rtx label_ref = gen_rtx_LABEL_REF (Pmode, label_rtx (label));
670 rtx temp, handler_slot;
671 tree link;
673 /* Find the corresponding handler slot for this label. */
674 handler_slot = p->nonlocal_goto_handler_slots;
675 for (link = p->nonlocal_labels; TREE_VALUE (link) != label;
676 link = TREE_CHAIN (link))
677 handler_slot = XEXP (handler_slot, 1);
678 handler_slot = XEXP (handler_slot, 0);
680 p->has_nonlocal_label = 1;
681 current_function_has_nonlocal_goto = 1;
682 LABEL_REF_NONLOCAL_P (label_ref) = 1;
684 /* Copy the rtl for the slots so that they won't be shared in
685 case the virtual stack vars register gets instantiated differently
686 in the parent than in the child. */
688 #if HAVE_nonlocal_goto
689 if (HAVE_nonlocal_goto)
690 emit_insn (gen_nonlocal_goto (lookup_static_chain (label),
691 copy_rtx (handler_slot),
692 copy_rtx (p->nonlocal_goto_stack_level),
693 label_ref));
694 else
695 #endif
697 rtx addr;
699 /* Restore frame pointer for containing function.
700 This sets the actual hard register used for the frame pointer
701 to the location of the function's incoming static chain info.
702 The non-local goto handler will then adjust it to contain the
703 proper value and reload the argument pointer, if needed. */
704 emit_move_insn (hard_frame_pointer_rtx, lookup_static_chain (label));
706 /* We have now loaded the frame pointer hardware register with
707 the address of that corresponds to the start of the virtual
708 stack vars. So replace virtual_stack_vars_rtx in all
709 addresses we use with stack_pointer_rtx. */
711 /* Get addr of containing function's current nonlocal goto handler,
712 which will do any cleanups and then jump to the label. */
713 addr = copy_rtx (handler_slot);
714 temp = copy_to_reg (replace_rtx (addr, virtual_stack_vars_rtx,
715 hard_frame_pointer_rtx));
717 /* Restore the stack pointer. Note this uses fp just restored. */
718 addr = p->nonlocal_goto_stack_level;
719 if (addr)
720 addr = replace_rtx (copy_rtx (addr),
721 virtual_stack_vars_rtx,
722 hard_frame_pointer_rtx);
724 emit_stack_restore (SAVE_NONLOCAL, addr, NULL_RTX);
726 /* USE of hard_frame_pointer_rtx added for consistency; not clear if
727 really needed. */
728 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
729 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
730 emit_indirect_jump (temp);
733 else
734 expand_goto_internal (label, label_rtx (label), NULL_RTX);
737 /* Generate RTL code for a `goto' statement with target label BODY.
738 LABEL should be a LABEL_REF.
739 LAST_INSN, if non-0, is the rtx we should consider as the last
740 insn emitted (for the purposes of cleaning up a return). */
742 static void
743 expand_goto_internal (body, label, last_insn)
744 tree body;
745 rtx label;
746 rtx last_insn;
748 struct nesting *block;
749 rtx stack_level = 0;
751 if (GET_CODE (label) != CODE_LABEL)
752 abort ();
754 /* If label has already been defined, we can tell now
755 whether and how we must alter the stack level. */
757 if (PREV_INSN (label) != 0)
759 /* Find the innermost pending block that contains the label.
760 (Check containment by comparing insn-uids.)
761 Then restore the outermost stack level within that block,
762 and do cleanups of all blocks contained in it. */
763 for (block = block_stack; block; block = block->next)
765 if (INSN_UID (block->data.block.first_insn) < INSN_UID (label))
766 break;
767 if (block->data.block.stack_level != 0)
768 stack_level = block->data.block.stack_level;
769 /* Execute the cleanups for blocks we are exiting. */
770 if (block->data.block.cleanups != 0)
772 expand_cleanups (block->data.block.cleanups, NULL_TREE, 1, 1);
773 do_pending_stack_adjust ();
777 if (stack_level)
779 /* Ensure stack adjust isn't done by emit_jump, as this
780 would clobber the stack pointer. This one should be
781 deleted as dead by flow. */
782 clear_pending_stack_adjust ();
783 do_pending_stack_adjust ();
784 emit_stack_restore (SAVE_BLOCK, stack_level, NULL_RTX);
787 if (body != 0 && DECL_TOO_LATE (body))
788 error ("jump to `%s' invalidly jumps into binding contour",
789 IDENTIFIER_POINTER (DECL_NAME (body)));
791 /* Label not yet defined: may need to put this goto
792 on the fixup list. */
793 else if (! expand_fixup (body, label, last_insn))
795 /* No fixup needed. Record that the label is the target
796 of at least one goto that has no fixup. */
797 if (body != 0)
798 TREE_ADDRESSABLE (body) = 1;
801 emit_jump (label);
804 /* Generate if necessary a fixup for a goto
805 whose target label in tree structure (if any) is TREE_LABEL
806 and whose target in rtl is RTL_LABEL.
808 If LAST_INSN is nonzero, we pretend that the jump appears
809 after insn LAST_INSN instead of at the current point in the insn stream.
811 The fixup will be used later to insert insns just before the goto.
812 Those insns will restore the stack level as appropriate for the
813 target label, and will (in the case of C++) also invoke any object
814 destructors which have to be invoked when we exit the scopes which
815 are exited by the goto.
817 Value is nonzero if a fixup is made. */
819 static int
820 expand_fixup (tree_label, rtl_label, last_insn)
821 tree tree_label;
822 rtx rtl_label;
823 rtx last_insn;
825 struct nesting *block, *end_block;
827 /* See if we can recognize which block the label will be output in.
828 This is possible in some very common cases.
829 If we succeed, set END_BLOCK to that block.
830 Otherwise, set it to 0. */
832 if (cond_stack
833 && (rtl_label == cond_stack->data.cond.endif_label
834 || rtl_label == cond_stack->data.cond.next_label))
835 end_block = cond_stack;
836 /* If we are in a loop, recognize certain labels which
837 are likely targets. This reduces the number of fixups
838 we need to create. */
839 else if (loop_stack
840 && (rtl_label == loop_stack->data.loop.start_label
841 || rtl_label == loop_stack->data.loop.end_label
842 || rtl_label == loop_stack->data.loop.continue_label))
843 end_block = loop_stack;
844 else
845 end_block = 0;
847 /* Now set END_BLOCK to the binding level to which we will return. */
849 if (end_block)
851 struct nesting *next_block = end_block->all;
852 block = block_stack;
854 /* First see if the END_BLOCK is inside the innermost binding level.
855 If so, then no cleanups or stack levels are relevant. */
856 while (next_block && next_block != block)
857 next_block = next_block->all;
859 if (next_block)
860 return 0;
862 /* Otherwise, set END_BLOCK to the innermost binding level
863 which is outside the relevant control-structure nesting. */
864 next_block = block_stack->next;
865 for (block = block_stack; block != end_block; block = block->all)
866 if (block == next_block)
867 next_block = next_block->next;
868 end_block = next_block;
871 /* Does any containing block have a stack level or cleanups?
872 If not, no fixup is needed, and that is the normal case
873 (the only case, for standard C). */
874 for (block = block_stack; block != end_block; block = block->next)
875 if (block->data.block.stack_level != 0
876 || block->data.block.cleanups != 0)
877 break;
879 if (block != end_block)
881 /* Ok, a fixup is needed. Add a fixup to the list of such. */
882 struct goto_fixup *fixup
883 = (struct goto_fixup *) oballoc (sizeof (struct goto_fixup));
884 /* In case an old stack level is restored, make sure that comes
885 after any pending stack adjust. */
886 /* ?? If the fixup isn't to come at the present position,
887 doing the stack adjust here isn't useful. Doing it with our
888 settings at that location isn't useful either. Let's hope
889 someone does it! */
890 if (last_insn == 0)
891 do_pending_stack_adjust ();
892 fixup->target = tree_label;
893 fixup->target_rtl = rtl_label;
895 /* Create a BLOCK node and a corresponding matched set of
896 NOTE_INSN_BEGIN_BLOCK and NOTE_INSN_END_BLOCK notes at
897 this point. The notes will encapsulate any and all fixup
898 code which we might later insert at this point in the insn
899 stream. Also, the BLOCK node will be the parent (i.e. the
900 `SUPERBLOCK') of any other BLOCK nodes which we might create
901 later on when we are expanding the fixup code.
903 Note that optimization passes (including expand_end_loop)
904 might move the *_BLOCK notes away, so we use a NOTE_INSN_DELETED
905 as a placeholder. */
908 register rtx original_before_jump
909 = last_insn ? last_insn : get_last_insn ();
910 rtx start;
912 start_sequence ();
913 pushlevel (0);
914 start = emit_note (NULL_PTR, NOTE_INSN_BLOCK_BEG);
915 fixup->before_jump = emit_note (NULL_PTR, NOTE_INSN_DELETED);
916 last_block_end_note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_END);
917 fixup->context = poplevel (1, 0, 0); /* Create the BLOCK node now! */
918 end_sequence ();
919 emit_insns_after (start, original_before_jump);
922 fixup->block_start_count = block_start_count;
923 fixup->stack_level = 0;
924 fixup->cleanup_list_list
925 = ((block->data.block.outer_cleanups
926 || block->data.block.cleanups)
927 ? tree_cons (NULL_TREE, block->data.block.cleanups,
928 block->data.block.outer_cleanups)
929 : 0);
930 fixup->next = goto_fixup_chain;
931 goto_fixup_chain = fixup;
934 return block != 0;
939 /* Expand any needed fixups in the outputmost binding level of the
940 function. FIRST_INSN is the first insn in the function. */
942 void
943 expand_fixups (first_insn)
944 rtx first_insn;
946 fixup_gotos (NULL_PTR, NULL_RTX, NULL_TREE, first_insn, 0);
949 /* When exiting a binding contour, process all pending gotos requiring fixups.
950 THISBLOCK is the structure that describes the block being exited.
951 STACK_LEVEL is the rtx for the stack level to restore exiting this contour.
952 CLEANUP_LIST is a list of expressions to evaluate on exiting this contour.
953 FIRST_INSN is the insn that began this contour.
955 Gotos that jump out of this contour must restore the
956 stack level and do the cleanups before actually jumping.
958 DONT_JUMP_IN nonzero means report error there is a jump into this
959 contour from before the beginning of the contour.
960 This is also done if STACK_LEVEL is nonzero. */
962 static void
963 fixup_gotos (thisblock, stack_level, cleanup_list, first_insn, dont_jump_in)
964 struct nesting *thisblock;
965 rtx stack_level;
966 tree cleanup_list;
967 rtx first_insn;
968 int dont_jump_in;
970 register struct goto_fixup *f, *prev;
972 /* F is the fixup we are considering; PREV is the previous one. */
973 /* We run this loop in two passes so that cleanups of exited blocks
974 are run first, and blocks that are exited are marked so
975 afterwards. */
977 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
979 /* Test for a fixup that is inactive because it is already handled. */
980 if (f->before_jump == 0)
982 /* Delete inactive fixup from the chain, if that is easy to do. */
983 if (prev != 0)
984 prev->next = f->next;
986 /* Has this fixup's target label been defined?
987 If so, we can finalize it. */
988 else if (PREV_INSN (f->target_rtl) != 0)
990 register rtx cleanup_insns;
992 /* Get the first non-label after the label
993 this goto jumps to. If that's before this scope begins,
994 we don't have a jump into the scope. */
995 rtx after_label = f->target_rtl;
996 while (after_label != 0 && GET_CODE (after_label) == CODE_LABEL)
997 after_label = NEXT_INSN (after_label);
999 /* If this fixup jumped into this contour from before the beginning
1000 of this contour, report an error. */
1001 /* ??? Bug: this does not detect jumping in through intermediate
1002 blocks that have stack levels or cleanups.
1003 It detects only a problem with the innermost block
1004 around the label. */
1005 if (f->target != 0
1006 && (dont_jump_in || stack_level || cleanup_list)
1007 /* If AFTER_LABEL is 0, it means the jump goes to the end
1008 of the rtl, which means it jumps into this scope. */
1009 && (after_label == 0
1010 || INSN_UID (first_insn) < INSN_UID (after_label))
1011 && INSN_UID (first_insn) > INSN_UID (f->before_jump)
1012 && ! DECL_ERROR_ISSUED (f->target))
1014 error_with_decl (f->target,
1015 "label `%s' used before containing binding contour");
1016 /* Prevent multiple errors for one label. */
1017 DECL_ERROR_ISSUED (f->target) = 1;
1020 /* We will expand the cleanups into a sequence of their own and
1021 then later on we will attach this new sequence to the insn
1022 stream just ahead of the actual jump insn. */
1024 start_sequence ();
1026 /* Temporarily restore the lexical context where we will
1027 logically be inserting the fixup code. We do this for the
1028 sake of getting the debugging information right. */
1030 pushlevel (0);
1031 set_block (f->context);
1033 /* Expand the cleanups for blocks this jump exits. */
1034 if (f->cleanup_list_list)
1036 tree lists;
1037 for (lists = f->cleanup_list_list; lists; lists = TREE_CHAIN (lists))
1038 /* Marked elements correspond to blocks that have been closed.
1039 Do their cleanups. */
1040 if (TREE_ADDRESSABLE (lists)
1041 && TREE_VALUE (lists) != 0)
1043 expand_cleanups (TREE_VALUE (lists), NULL_TREE, 1, 1);
1044 /* Pop any pushes done in the cleanups,
1045 in case function is about to return. */
1046 do_pending_stack_adjust ();
1050 /* Restore stack level for the biggest contour that this
1051 jump jumps out of. */
1052 if (f->stack_level)
1053 emit_stack_restore (SAVE_BLOCK, f->stack_level, f->before_jump);
1055 /* Finish up the sequence containing the insns which implement the
1056 necessary cleanups, and then attach that whole sequence to the
1057 insn stream just ahead of the actual jump insn. Attaching it
1058 at that point insures that any cleanups which are in fact
1059 implicit C++ object destructions (which must be executed upon
1060 leaving the block) appear (to the debugger) to be taking place
1061 in an area of the generated code where the object(s) being
1062 destructed are still "in scope". */
1064 cleanup_insns = get_insns ();
1065 poplevel (1, 0, 0);
1067 end_sequence ();
1068 emit_insns_after (cleanup_insns, f->before_jump);
1071 f->before_jump = 0;
1075 /* For any still-undefined labels, do the cleanups for this block now.
1076 We must do this now since items in the cleanup list may go out
1077 of scope when the block ends. */
1078 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1079 if (f->before_jump != 0
1080 && PREV_INSN (f->target_rtl) == 0
1081 /* Label has still not appeared. If we are exiting a block with
1082 a stack level to restore, that started before the fixup,
1083 mark this stack level as needing restoration
1084 when the fixup is later finalized. */
1085 && thisblock != 0
1086 /* Note: if THISBLOCK == 0 and we have a label that hasn't appeared, it
1087 means the label is undefined. That's erroneous, but possible. */
1088 && (thisblock->data.block.block_start_count
1089 <= f->block_start_count))
1091 tree lists = f->cleanup_list_list;
1092 rtx cleanup_insns;
1094 for (; lists; lists = TREE_CHAIN (lists))
1095 /* If the following elt. corresponds to our containing block
1096 then the elt. must be for this block. */
1097 if (TREE_CHAIN (lists) == thisblock->data.block.outer_cleanups)
1099 start_sequence ();
1100 pushlevel (0);
1101 set_block (f->context);
1102 expand_cleanups (TREE_VALUE (lists), NULL_TREE, 1, 1);
1103 do_pending_stack_adjust ();
1104 cleanup_insns = get_insns ();
1105 poplevel (1, 0, 0);
1106 end_sequence ();
1107 if (cleanup_insns != 0)
1108 f->before_jump
1109 = emit_insns_after (cleanup_insns, f->before_jump);
1111 f->cleanup_list_list = TREE_CHAIN (lists);
1114 if (stack_level)
1115 f->stack_level = stack_level;
1119 /* Return the number of times character C occurs in string S. */
1120 static int
1121 n_occurrences (c, s)
1122 int c;
1123 const char *s;
1125 int n = 0;
1126 while (*s)
1127 n += (*s++ == c);
1128 return n;
1131 /* Generate RTL for an asm statement (explicit assembler code).
1132 BODY is a STRING_CST node containing the assembler code text,
1133 or an ADDR_EXPR containing a STRING_CST. */
1135 void
1136 expand_asm (body)
1137 tree body;
1139 if (current_function_check_memory_usage)
1141 error ("`asm' cannot be used with `-fcheck-memory-usage'");
1142 return;
1145 if (TREE_CODE (body) == ADDR_EXPR)
1146 body = TREE_OPERAND (body, 0);
1148 emit_insn (gen_rtx_ASM_INPUT (VOIDmode,
1149 TREE_STRING_POINTER (body)));
1150 last_expr_type = 0;
1153 /* Generate RTL for an asm statement with arguments.
1154 STRING is the instruction template.
1155 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
1156 Each output or input has an expression in the TREE_VALUE and
1157 a constraint-string in the TREE_PURPOSE.
1158 CLOBBERS is a list of STRING_CST nodes each naming a hard register
1159 that is clobbered by this insn.
1161 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
1162 Some elements of OUTPUTS may be replaced with trees representing temporary
1163 values. The caller should copy those temporary values to the originally
1164 specified lvalues.
1166 VOL nonzero means the insn is volatile; don't optimize it. */
1168 void
1169 expand_asm_operands (string, outputs, inputs, clobbers, vol, filename, line)
1170 tree string, outputs, inputs, clobbers;
1171 int vol;
1172 char *filename;
1173 int line;
1175 rtvec argvec, constraints;
1176 rtx body;
1177 int ninputs = list_length (inputs);
1178 int noutputs = list_length (outputs);
1179 int ninout = 0;
1180 int nclobbers;
1181 tree tail;
1182 register int i;
1183 /* Vector of RTX's of evaluated output operands. */
1184 rtx *output_rtx = (rtx *) alloca (noutputs * sizeof (rtx));
1185 int *inout_opnum = (int *) alloca (noutputs * sizeof (int));
1186 rtx *real_output_rtx = (rtx *) alloca (noutputs * sizeof (rtx));
1187 enum machine_mode *inout_mode
1188 = (enum machine_mode *) alloca (noutputs * sizeof (enum machine_mode));
1189 /* The insn we have emitted. */
1190 rtx insn;
1192 /* An ASM with no outputs needs to be treated as volatile, for now. */
1193 if (noutputs == 0)
1194 vol = 1;
1196 if (current_function_check_memory_usage)
1198 error ("`asm' cannot be used with `-fcheck-memory-usage'");
1199 return;
1202 /* Count the number of meaningful clobbered registers, ignoring what
1203 we would ignore later. */
1204 nclobbers = 0;
1205 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1207 char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1208 i = decode_reg_name (regname);
1209 if (i >= 0 || i == -4)
1210 ++nclobbers;
1211 else if (i == -2)
1212 error ("unknown register name `%s' in `asm'", regname);
1215 last_expr_type = 0;
1217 /* Check that the number of alternatives is constant across all
1218 operands. */
1219 if (outputs || inputs)
1221 tree tmp = TREE_PURPOSE (outputs ? outputs : inputs);
1222 int nalternatives = n_occurrences (',', TREE_STRING_POINTER (tmp));
1223 tree next = inputs;
1225 if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
1227 error ("too many alternatives in `asm'");
1228 return;
1231 tmp = outputs;
1232 while (tmp)
1234 char *constraint = TREE_STRING_POINTER (TREE_PURPOSE (tmp));
1235 if (n_occurrences (',', constraint) != nalternatives)
1237 error ("operand constraints for `asm' differ in number of alternatives");
1238 return;
1240 if (TREE_CHAIN (tmp))
1241 tmp = TREE_CHAIN (tmp);
1242 else
1243 tmp = next, next = 0;
1247 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1249 tree val = TREE_VALUE (tail);
1250 tree type = TREE_TYPE (val);
1251 char *constraint;
1252 char *p;
1253 int c_len;
1254 int j;
1255 int is_inout = 0;
1256 int allows_reg = 0;
1257 int allows_mem = 0;
1259 /* If there's an erroneous arg, emit no insn. */
1260 if (TREE_TYPE (val) == error_mark_node)
1261 return;
1263 /* Make sure constraint has `=' and does not have `+'. Also, see
1264 if it allows any register. Be liberal on the latter test, since
1265 the worst that happens if we get it wrong is we issue an error
1266 message. */
1268 c_len = TREE_STRING_LENGTH (TREE_PURPOSE (tail)) - 1;
1269 constraint = TREE_STRING_POINTER (TREE_PURPOSE (tail));
1271 /* Allow the `=' or `+' to not be at the beginning of the string,
1272 since it wasn't explicitly documented that way, and there is a
1273 large body of code that puts it last. Swap the character to
1274 the front, so as not to uglify any place else. */
1275 switch (c_len)
1277 default:
1278 if ((p = strchr (constraint, '=')) != NULL)
1279 break;
1280 if ((p = strchr (constraint, '+')) != NULL)
1281 break;
1282 case 0:
1283 error ("output operand constraint lacks `='");
1284 return;
1287 if (p != constraint)
1289 j = *p;
1290 bcopy (constraint, constraint+1, p-constraint);
1291 *constraint = j;
1293 warning ("output constraint `%c' for operand %d is not at the beginning", j, i);
1296 is_inout = constraint[0] == '+';
1297 /* Replace '+' with '='. */
1298 constraint[0] = '=';
1299 /* Make sure we can specify the matching operand. */
1300 if (is_inout && i > 9)
1302 error ("output operand constraint %d contains `+'", i);
1303 return;
1306 for (j = 1; j < c_len; j++)
1307 switch (constraint[j])
1309 case '+':
1310 case '=':
1311 error ("operand constraint contains '+' or '=' at illegal position.");
1312 return;
1314 case '%':
1315 if (i + 1 == ninputs + noutputs)
1317 error ("`%%' constraint used with last operand");
1318 return;
1320 break;
1322 case '?': case '!': case '*': case '&':
1323 case 'E': case 'F': case 'G': case 'H':
1324 case 's': case 'i': case 'n':
1325 case 'I': case 'J': case 'K': case 'L': case 'M':
1326 case 'N': case 'O': case 'P': case ',':
1327 #ifdef EXTRA_CONSTRAINT
1328 case 'Q': case 'R': case 'S': case 'T': case 'U':
1329 #endif
1330 break;
1332 case '0': case '1': case '2': case '3': case '4':
1333 case '5': case '6': case '7': case '8': case '9':
1334 error ("matching constraint not valid in output operand");
1335 break;
1337 case 'V': case 'm': case 'o':
1338 allows_mem = 1;
1339 break;
1341 case '<': case '>':
1342 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
1343 excepting those that expand_call created. So match memory
1344 and hope. */
1345 allows_mem = 1;
1346 break;
1348 case 'g': case 'X':
1349 allows_reg = 1;
1350 allows_mem = 1;
1351 break;
1353 case 'p': case 'r':
1354 default:
1355 allows_reg = 1;
1356 break;
1359 /* If an output operand is not a decl or indirect ref and our constraint
1360 allows a register, make a temporary to act as an intermediate.
1361 Make the asm insn write into that, then our caller will copy it to
1362 the real output operand. Likewise for promoted variables. */
1364 real_output_rtx[i] = NULL_RTX;
1365 if ((TREE_CODE (val) == INDIRECT_REF
1366 && allows_mem)
1367 || (TREE_CODE_CLASS (TREE_CODE (val)) == 'd'
1368 && (allows_mem || GET_CODE (DECL_RTL (val)) == REG)
1369 && ! (GET_CODE (DECL_RTL (val)) == REG
1370 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
1371 || ! allows_reg
1372 || is_inout)
1374 if (! allows_reg)
1375 mark_addressable (TREE_VALUE (tail));
1377 output_rtx[i]
1378 = expand_expr (TREE_VALUE (tail), NULL_RTX, VOIDmode,
1379 EXPAND_MEMORY_USE_WO);
1381 if (! allows_reg && GET_CODE (output_rtx[i]) != MEM)
1382 error ("output number %d not directly addressable", i);
1383 if (! allows_mem && GET_CODE (output_rtx[i]) == MEM)
1385 real_output_rtx[i] = protect_from_queue (output_rtx[i], 1);
1386 output_rtx[i] = gen_reg_rtx (GET_MODE (output_rtx[i]));
1387 if (is_inout)
1388 emit_move_insn (output_rtx[i], real_output_rtx[i]);
1391 else
1393 output_rtx[i] = assign_temp (type, 0, 0, 0);
1394 TREE_VALUE (tail) = make_tree (type, output_rtx[i]);
1397 if (is_inout)
1399 inout_mode[ninout] = TYPE_MODE (TREE_TYPE (TREE_VALUE (tail)));
1400 inout_opnum[ninout++] = i;
1404 ninputs += ninout;
1405 if (ninputs + noutputs > MAX_RECOG_OPERANDS)
1407 error ("more than %d operands in `asm'", MAX_RECOG_OPERANDS);
1408 return;
1411 /* Make vectors for the expression-rtx and constraint strings. */
1413 argvec = rtvec_alloc (ninputs);
1414 constraints = rtvec_alloc (ninputs);
1416 body = gen_rtx_ASM_OPERANDS (VOIDmode,
1417 TREE_STRING_POINTER (string), "", 0, argvec,
1418 constraints, filename, line);
1420 MEM_VOLATILE_P (body) = vol;
1422 /* Eval the inputs and put them into ARGVEC.
1423 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
1425 i = 0;
1426 for (tail = inputs; tail; tail = TREE_CHAIN (tail))
1428 int j;
1429 int allows_reg = 0, allows_mem = 0;
1430 char *constraint, *orig_constraint;
1431 int c_len;
1432 rtx op;
1434 /* If there's an erroneous arg, emit no insn,
1435 because the ASM_INPUT would get VOIDmode
1436 and that could cause a crash in reload. */
1437 if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
1438 return;
1440 /* ??? Can this happen, and does the error message make any sense? */
1441 if (TREE_PURPOSE (tail) == NULL_TREE)
1443 error ("hard register `%s' listed as input operand to `asm'",
1444 TREE_STRING_POINTER (TREE_VALUE (tail)) );
1445 return;
1448 c_len = TREE_STRING_LENGTH (TREE_PURPOSE (tail)) - 1;
1449 constraint = TREE_STRING_POINTER (TREE_PURPOSE (tail));
1450 orig_constraint = constraint;
1452 /* Make sure constraint has neither `=', `+', nor '&'. */
1454 for (j = 0; j < c_len; j++)
1455 switch (constraint[j])
1457 case '+': case '=': case '&':
1458 if (constraint == orig_constraint)
1460 error ("input operand constraint contains `%c'", constraint[j]);
1461 return;
1463 break;
1465 case '%':
1466 if (constraint == orig_constraint
1467 && i + 1 == ninputs - ninout)
1469 error ("`%%' constraint used with last operand");
1470 return;
1472 break;
1474 case 'V': case 'm': case 'o':
1475 allows_mem = 1;
1476 break;
1478 case '<': case '>':
1479 case '?': case '!': case '*':
1480 case 'E': case 'F': case 'G': case 'H': case 'X':
1481 case 's': case 'i': case 'n':
1482 case 'I': case 'J': case 'K': case 'L': case 'M':
1483 case 'N': case 'O': case 'P': case ',':
1484 #ifdef EXTRA_CONSTRAINT
1485 case 'Q': case 'R': case 'S': case 'T': case 'U':
1486 #endif
1487 break;
1489 /* Whether or not a numeric constraint allows a register is
1490 decided by the matching constraint, and so there is no need
1491 to do anything special with them. We must handle them in
1492 the default case, so that we don't unnecessarily force
1493 operands to memory. */
1494 case '0': case '1': case '2': case '3': case '4':
1495 case '5': case '6': case '7': case '8': case '9':
1496 if (constraint[j] >= '0' + noutputs)
1498 error
1499 ("matching constraint references invalid operand number");
1500 return;
1503 /* Try and find the real constraint for this dup. */
1504 if ((j == 0 && c_len == 1)
1505 || (j == 1 && c_len == 2 && constraint[0] == '%'))
1507 tree o = outputs;
1508 for (j = constraint[j] - '0'; j > 0; --j)
1509 o = TREE_CHAIN (o);
1511 c_len = TREE_STRING_LENGTH (TREE_PURPOSE (o)) - 1;
1512 constraint = TREE_STRING_POINTER (TREE_PURPOSE (o));
1513 j = 0;
1514 break;
1517 /* ... fall through ... */
1519 case 'p': case 'r':
1520 default:
1521 allows_reg = 1;
1522 break;
1524 case 'g':
1525 allows_reg = 1;
1526 allows_mem = 1;
1527 break;
1530 if (! allows_reg && allows_mem)
1531 mark_addressable (TREE_VALUE (tail));
1533 op = expand_expr (TREE_VALUE (tail), NULL_RTX, VOIDmode, 0);
1535 if (asm_operand_ok (op, constraint) <= 0)
1537 if (allows_reg)
1538 op = force_reg (TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))), op);
1539 else if (!allows_mem)
1540 warning ("asm operand %d probably doesn't match constraints", i);
1541 else if (CONSTANT_P (op))
1542 op = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))),
1543 op);
1544 else if (GET_CODE (op) == REG
1545 || GET_CODE (op) == SUBREG
1546 || GET_CODE (op) == CONCAT)
1548 tree type = TREE_TYPE (TREE_VALUE (tail));
1549 rtx memloc = assign_temp (type, 1, 1, 1);
1551 emit_move_insn (memloc, op);
1552 op = memloc;
1554 else if (GET_CODE (op) == MEM && MEM_VOLATILE_P (op))
1555 /* We won't recognize volatile memory as available a
1556 memory_operand at this point. Ignore it. */
1558 else if (queued_subexp_p (op))
1560 else
1561 /* ??? Leave this only until we have experience with what
1562 happens in combine and elsewhere when constraints are
1563 not satisfied. */
1564 warning ("asm operand %d probably doesn't match constraints", i);
1566 XVECEXP (body, 3, i) = op;
1568 XVECEXP (body, 4, i) /* constraints */
1569 = gen_rtx_ASM_INPUT (TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))),
1570 orig_constraint);
1571 i++;
1574 /* Protect all the operands from the queue,
1575 now that they have all been evaluated. */
1577 for (i = 0; i < ninputs - ninout; i++)
1578 XVECEXP (body, 3, i) = protect_from_queue (XVECEXP (body, 3, i), 0);
1580 for (i = 0; i < noutputs; i++)
1581 output_rtx[i] = protect_from_queue (output_rtx[i], 1);
1583 /* For in-out operands, copy output rtx to input rtx. */
1584 for (i = 0; i < ninout; i++)
1586 static char match[9+1][2]
1587 = {"0", "1", "2", "3", "4", "5", "6", "7", "8", "9"};
1588 int j = inout_opnum[i];
1590 XVECEXP (body, 3, ninputs - ninout + i) /* argvec */
1591 = output_rtx[j];
1592 XVECEXP (body, 4, ninputs - ninout + i) /* constraints */
1593 = gen_rtx_ASM_INPUT (inout_mode[j], match[j]);
1596 /* Now, for each output, construct an rtx
1597 (set OUTPUT (asm_operands INSN OUTPUTNUMBER OUTPUTCONSTRAINT
1598 ARGVEC CONSTRAINTS))
1599 If there is more than one, put them inside a PARALLEL. */
1601 if (noutputs == 1 && nclobbers == 0)
1603 XSTR (body, 1) = TREE_STRING_POINTER (TREE_PURPOSE (outputs));
1604 insn = emit_insn (gen_rtx_SET (VOIDmode, output_rtx[0], body));
1606 else if (noutputs == 0 && nclobbers == 0)
1608 /* No output operands: put in a raw ASM_OPERANDS rtx. */
1609 insn = emit_insn (body);
1611 else
1613 rtx obody = body;
1614 int num = noutputs;
1615 if (num == 0) num = 1;
1616 body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
1618 /* For each output operand, store a SET. */
1620 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1622 XVECEXP (body, 0, i)
1623 = gen_rtx_SET (VOIDmode,
1624 output_rtx[i],
1625 gen_rtx_ASM_OPERANDS (VOIDmode,
1626 TREE_STRING_POINTER (string),
1627 TREE_STRING_POINTER (TREE_PURPOSE (tail)),
1628 i, argvec, constraints,
1629 filename, line));
1630 MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
1633 /* If there are no outputs (but there are some clobbers)
1634 store the bare ASM_OPERANDS into the PARALLEL. */
1636 if (i == 0)
1637 XVECEXP (body, 0, i++) = obody;
1639 /* Store (clobber REG) for each clobbered register specified. */
1641 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1643 char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1644 int j = decode_reg_name (regname);
1646 if (j < 0)
1648 if (j == -3) /* `cc', which is not a register */
1649 continue;
1651 if (j == -4) /* `memory', don't cache memory across asm */
1653 XVECEXP (body, 0, i++)
1654 = gen_rtx_CLOBBER (VOIDmode,
1655 gen_rtx_MEM (BLKmode,
1656 gen_rtx_SCRATCH (VOIDmode)));
1657 continue;
1660 /* Ignore unknown register, error already signaled. */
1661 continue;
1664 /* Use QImode since that's guaranteed to clobber just one reg. */
1665 XVECEXP (body, 0, i++)
1666 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (QImode, j));
1669 insn = emit_insn (body);
1672 /* For any outputs that needed reloading into registers, spill them
1673 back to where they belong. */
1674 for (i = 0; i < noutputs; ++i)
1675 if (real_output_rtx[i])
1676 emit_move_insn (real_output_rtx[i], output_rtx[i]);
1678 free_temp_slots ();
1681 /* Generate RTL to evaluate the expression EXP
1682 and remember it in case this is the VALUE in a ({... VALUE; }) constr. */
1684 void
1685 expand_expr_stmt (exp)
1686 tree exp;
1688 /* If -W, warn about statements with no side effects,
1689 except for an explicit cast to void (e.g. for assert()), and
1690 except inside a ({...}) where they may be useful. */
1691 if (expr_stmts_for_value == 0 && exp != error_mark_node)
1693 if (! TREE_SIDE_EFFECTS (exp) && (extra_warnings || warn_unused)
1694 && !(TREE_CODE (exp) == CONVERT_EXPR
1695 && TREE_TYPE (exp) == void_type_node))
1696 warning_with_file_and_line (emit_filename, emit_lineno,
1697 "statement with no effect");
1698 else if (warn_unused)
1699 warn_if_unused_value (exp);
1702 /* If EXP is of function type and we are expanding statements for
1703 value, convert it to pointer-to-function. */
1704 if (expr_stmts_for_value && TREE_CODE (TREE_TYPE (exp)) == FUNCTION_TYPE)
1705 exp = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (exp)), exp);
1707 last_expr_type = TREE_TYPE (exp);
1708 last_expr_value = expand_expr (exp,
1709 (expr_stmts_for_value
1710 ? NULL_RTX : const0_rtx),
1711 VOIDmode, 0);
1713 /* If all we do is reference a volatile value in memory,
1714 copy it to a register to be sure it is actually touched. */
1715 if (last_expr_value != 0 && GET_CODE (last_expr_value) == MEM
1716 && TREE_THIS_VOLATILE (exp))
1718 if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode)
1720 else if (TYPE_MODE (TREE_TYPE (exp)) != BLKmode)
1721 copy_to_reg (last_expr_value);
1722 else
1724 rtx lab = gen_label_rtx ();
1726 /* Compare the value with itself to reference it. */
1727 emit_cmp_and_jump_insns (last_expr_value, last_expr_value, EQ,
1728 expand_expr (TYPE_SIZE (last_expr_type),
1729 NULL_RTX, VOIDmode, 0),
1730 BLKmode, 0,
1731 TYPE_ALIGN (last_expr_type) / BITS_PER_UNIT,
1732 lab);
1733 emit_label (lab);
1737 /* If this expression is part of a ({...}) and is in memory, we may have
1738 to preserve temporaries. */
1739 preserve_temp_slots (last_expr_value);
1741 /* Free any temporaries used to evaluate this expression. Any temporary
1742 used as a result of this expression will already have been preserved
1743 above. */
1744 free_temp_slots ();
1746 emit_queue ();
1749 /* Warn if EXP contains any computations whose results are not used.
1750 Return 1 if a warning is printed; 0 otherwise. */
1753 warn_if_unused_value (exp)
1754 tree exp;
1756 if (TREE_USED (exp))
1757 return 0;
1759 switch (TREE_CODE (exp))
1761 case PREINCREMENT_EXPR:
1762 case POSTINCREMENT_EXPR:
1763 case PREDECREMENT_EXPR:
1764 case POSTDECREMENT_EXPR:
1765 case MODIFY_EXPR:
1766 case INIT_EXPR:
1767 case TARGET_EXPR:
1768 case CALL_EXPR:
1769 case METHOD_CALL_EXPR:
1770 case RTL_EXPR:
1771 case TRY_CATCH_EXPR:
1772 case WITH_CLEANUP_EXPR:
1773 case EXIT_EXPR:
1774 /* We don't warn about COND_EXPR because it may be a useful
1775 construct if either arm contains a side effect. */
1776 case COND_EXPR:
1777 return 0;
1779 case BIND_EXPR:
1780 /* For a binding, warn if no side effect within it. */
1781 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1783 case SAVE_EXPR:
1784 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1786 case TRUTH_ORIF_EXPR:
1787 case TRUTH_ANDIF_EXPR:
1788 /* In && or ||, warn if 2nd operand has no side effect. */
1789 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1791 case COMPOUND_EXPR:
1792 if (TREE_NO_UNUSED_WARNING (exp))
1793 return 0;
1794 if (warn_if_unused_value (TREE_OPERAND (exp, 0)))
1795 return 1;
1796 /* Let people do `(foo (), 0)' without a warning. */
1797 if (TREE_CONSTANT (TREE_OPERAND (exp, 1)))
1798 return 0;
1799 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1801 case NOP_EXPR:
1802 case CONVERT_EXPR:
1803 case NON_LVALUE_EXPR:
1804 /* Don't warn about values cast to void. */
1805 if (TREE_TYPE (exp) == void_type_node)
1806 return 0;
1807 /* Don't warn about conversions not explicit in the user's program. */
1808 if (TREE_NO_UNUSED_WARNING (exp))
1809 return 0;
1810 /* Assignment to a cast usually results in a cast of a modify.
1811 Don't complain about that. There can be an arbitrary number of
1812 casts before the modify, so we must loop until we find the first
1813 non-cast expression and then test to see if that is a modify. */
1815 tree tem = TREE_OPERAND (exp, 0);
1817 while (TREE_CODE (tem) == CONVERT_EXPR || TREE_CODE (tem) == NOP_EXPR)
1818 tem = TREE_OPERAND (tem, 0);
1820 if (TREE_CODE (tem) == MODIFY_EXPR || TREE_CODE (tem) == INIT_EXPR
1821 || TREE_CODE (tem) == CALL_EXPR)
1822 return 0;
1824 goto warn;
1826 case INDIRECT_REF:
1827 /* Don't warn about automatic dereferencing of references, since
1828 the user cannot control it. */
1829 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == REFERENCE_TYPE)
1830 return warn_if_unused_value (TREE_OPERAND (exp, 0));
1831 /* ... fall through ... */
1833 default:
1834 /* Referencing a volatile value is a side effect, so don't warn. */
1835 if ((TREE_CODE_CLASS (TREE_CODE (exp)) == 'd'
1836 || TREE_CODE_CLASS (TREE_CODE (exp)) == 'r')
1837 && TREE_THIS_VOLATILE (exp))
1838 return 0;
1839 warn:
1840 warning_with_file_and_line (emit_filename, emit_lineno,
1841 "value computed is not used");
1842 return 1;
1846 /* Clear out the memory of the last expression evaluated. */
1848 void
1849 clear_last_expr ()
1851 last_expr_type = 0;
1854 /* Begin a statement which will return a value.
1855 Return the RTL_EXPR for this statement expr.
1856 The caller must save that value and pass it to expand_end_stmt_expr. */
1858 tree
1859 expand_start_stmt_expr ()
1861 int momentary;
1862 tree t;
1864 /* Make the RTL_EXPR node temporary, not momentary,
1865 so that rtl_expr_chain doesn't become garbage. */
1866 momentary = suspend_momentary ();
1867 t = make_node (RTL_EXPR);
1868 resume_momentary (momentary);
1869 do_pending_stack_adjust ();
1870 start_sequence_for_rtl_expr (t);
1871 NO_DEFER_POP;
1872 expr_stmts_for_value++;
1873 return t;
1876 /* Restore the previous state at the end of a statement that returns a value.
1877 Returns a tree node representing the statement's value and the
1878 insns to compute the value.
1880 The nodes of that expression have been freed by now, so we cannot use them.
1881 But we don't want to do that anyway; the expression has already been
1882 evaluated and now we just want to use the value. So generate a RTL_EXPR
1883 with the proper type and RTL value.
1885 If the last substatement was not an expression,
1886 return something with type `void'. */
1888 tree
1889 expand_end_stmt_expr (t)
1890 tree t;
1892 OK_DEFER_POP;
1894 if (last_expr_type == 0)
1896 last_expr_type = void_type_node;
1897 last_expr_value = const0_rtx;
1899 else if (last_expr_value == 0)
1900 /* There are some cases where this can happen, such as when the
1901 statement is void type. */
1902 last_expr_value = const0_rtx;
1903 else if (GET_CODE (last_expr_value) != REG && ! CONSTANT_P (last_expr_value))
1904 /* Remove any possible QUEUED. */
1905 last_expr_value = protect_from_queue (last_expr_value, 0);
1907 emit_queue ();
1909 TREE_TYPE (t) = last_expr_type;
1910 RTL_EXPR_RTL (t) = last_expr_value;
1911 RTL_EXPR_SEQUENCE (t) = get_insns ();
1913 rtl_expr_chain = tree_cons (NULL_TREE, t, rtl_expr_chain);
1915 end_sequence ();
1917 /* Don't consider deleting this expr or containing exprs at tree level. */
1918 TREE_SIDE_EFFECTS (t) = 1;
1919 /* Propagate volatility of the actual RTL expr. */
1920 TREE_THIS_VOLATILE (t) = volatile_refs_p (last_expr_value);
1922 last_expr_type = 0;
1923 expr_stmts_for_value--;
1925 return t;
1928 /* Generate RTL for the start of an if-then. COND is the expression
1929 whose truth should be tested.
1931 If EXITFLAG is nonzero, this conditional is visible to
1932 `exit_something'. */
1934 void
1935 expand_start_cond (cond, exitflag)
1936 tree cond;
1937 int exitflag;
1939 struct nesting *thiscond = ALLOC_NESTING ();
1941 /* Make an entry on cond_stack for the cond we are entering. */
1943 thiscond->next = cond_stack;
1944 thiscond->all = nesting_stack;
1945 thiscond->depth = ++nesting_depth;
1946 thiscond->data.cond.next_label = gen_label_rtx ();
1947 /* Before we encounter an `else', we don't need a separate exit label
1948 unless there are supposed to be exit statements
1949 to exit this conditional. */
1950 thiscond->exit_label = exitflag ? gen_label_rtx () : 0;
1951 thiscond->data.cond.endif_label = thiscond->exit_label;
1952 cond_stack = thiscond;
1953 nesting_stack = thiscond;
1955 do_jump (cond, thiscond->data.cond.next_label, NULL_RTX);
1958 /* Generate RTL between then-clause and the elseif-clause
1959 of an if-then-elseif-.... */
1961 void
1962 expand_start_elseif (cond)
1963 tree cond;
1965 if (cond_stack->data.cond.endif_label == 0)
1966 cond_stack->data.cond.endif_label = gen_label_rtx ();
1967 emit_jump (cond_stack->data.cond.endif_label);
1968 emit_label (cond_stack->data.cond.next_label);
1969 cond_stack->data.cond.next_label = gen_label_rtx ();
1970 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
1973 /* Generate RTL between the then-clause and the else-clause
1974 of an if-then-else. */
1976 void
1977 expand_start_else ()
1979 if (cond_stack->data.cond.endif_label == 0)
1980 cond_stack->data.cond.endif_label = gen_label_rtx ();
1982 emit_jump (cond_stack->data.cond.endif_label);
1983 emit_label (cond_stack->data.cond.next_label);
1984 cond_stack->data.cond.next_label = 0; /* No more _else or _elseif calls. */
1987 /* After calling expand_start_else, turn this "else" into an "else if"
1988 by providing another condition. */
1990 void
1991 expand_elseif (cond)
1992 tree cond;
1994 cond_stack->data.cond.next_label = gen_label_rtx ();
1995 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
1998 /* Generate RTL for the end of an if-then.
1999 Pop the record for it off of cond_stack. */
2001 void
2002 expand_end_cond ()
2004 struct nesting *thiscond = cond_stack;
2006 do_pending_stack_adjust ();
2007 if (thiscond->data.cond.next_label)
2008 emit_label (thiscond->data.cond.next_label);
2009 if (thiscond->data.cond.endif_label)
2010 emit_label (thiscond->data.cond.endif_label);
2012 POPSTACK (cond_stack);
2013 last_expr_type = 0;
2018 /* Generate RTL for the start of a loop. EXIT_FLAG is nonzero if this
2019 loop should be exited by `exit_something'. This is a loop for which
2020 `expand_continue' will jump to the top of the loop.
2022 Make an entry on loop_stack to record the labels associated with
2023 this loop. */
2025 struct nesting *
2026 expand_start_loop (exit_flag)
2027 int exit_flag;
2029 register struct nesting *thisloop = ALLOC_NESTING ();
2031 /* Make an entry on loop_stack for the loop we are entering. */
2033 thisloop->next = loop_stack;
2034 thisloop->all = nesting_stack;
2035 thisloop->depth = ++nesting_depth;
2036 thisloop->data.loop.start_label = gen_label_rtx ();
2037 thisloop->data.loop.end_label = gen_label_rtx ();
2038 thisloop->data.loop.alt_end_label = 0;
2039 thisloop->data.loop.continue_label = thisloop->data.loop.start_label;
2040 thisloop->exit_label = exit_flag ? thisloop->data.loop.end_label : 0;
2041 loop_stack = thisloop;
2042 nesting_stack = thisloop;
2044 do_pending_stack_adjust ();
2045 emit_queue ();
2046 emit_note (NULL_PTR, NOTE_INSN_LOOP_BEG);
2047 emit_label (thisloop->data.loop.start_label);
2049 return thisloop;
2052 /* Like expand_start_loop but for a loop where the continuation point
2053 (for expand_continue_loop) will be specified explicitly. */
2055 struct nesting *
2056 expand_start_loop_continue_elsewhere (exit_flag)
2057 int exit_flag;
2059 struct nesting *thisloop = expand_start_loop (exit_flag);
2060 loop_stack->data.loop.continue_label = gen_label_rtx ();
2061 return thisloop;
2064 /* Specify the continuation point for a loop started with
2065 expand_start_loop_continue_elsewhere.
2066 Use this at the point in the code to which a continue statement
2067 should jump. */
2069 void
2070 expand_loop_continue_here ()
2072 do_pending_stack_adjust ();
2073 emit_note (NULL_PTR, NOTE_INSN_LOOP_CONT);
2074 emit_label (loop_stack->data.loop.continue_label);
2077 /* Finish a loop. Generate a jump back to the top and the loop-exit label.
2078 Pop the block off of loop_stack. */
2080 void
2081 expand_end_loop ()
2083 rtx start_label = loop_stack->data.loop.start_label;
2084 rtx insn = get_last_insn ();
2085 int needs_end_jump = 1;
2087 /* Mark the continue-point at the top of the loop if none elsewhere. */
2088 if (start_label == loop_stack->data.loop.continue_label)
2089 emit_note_before (NOTE_INSN_LOOP_CONT, start_label);
2091 do_pending_stack_adjust ();
2093 /* If optimizing, perhaps reorder the loop.
2094 First, try to use a condjump near the end.
2095 expand_exit_loop_if_false ends loops with unconditional jumps,
2096 like this:
2098 if (test) goto label;
2099 optional: cleanup
2100 goto loop_stack->data.loop.end_label
2101 barrier
2102 label:
2104 If we find such a pattern, we can end the loop earlier. */
2106 if (optimize
2107 && GET_CODE (insn) == CODE_LABEL
2108 && LABEL_NAME (insn) == NULL
2109 && GET_CODE (PREV_INSN (insn)) == BARRIER)
2111 rtx label = insn;
2112 rtx jump = PREV_INSN (PREV_INSN (label));
2114 if (GET_CODE (jump) == JUMP_INSN
2115 && GET_CODE (PATTERN (jump)) == SET
2116 && SET_DEST (PATTERN (jump)) == pc_rtx
2117 && GET_CODE (SET_SRC (PATTERN (jump))) == LABEL_REF
2118 && (XEXP (SET_SRC (PATTERN (jump)), 0)
2119 == loop_stack->data.loop.end_label))
2121 rtx prev;
2123 /* The test might be complex and reference LABEL multiple times,
2124 like the loop in loop_iterations to set vtop. To handle this,
2125 we move LABEL. */
2126 insn = PREV_INSN (label);
2127 reorder_insns (label, label, start_label);
2129 for (prev = PREV_INSN (jump); ; prev = PREV_INSN (prev))
2131 /* We ignore line number notes, but if we see any other note,
2132 in particular NOTE_INSN_BLOCK_*, NOTE_INSN_EH_REGION_*,
2133 NOTE_INSN_LOOP_*, we disable this optimization. */
2134 if (GET_CODE (prev) == NOTE)
2136 if (NOTE_LINE_NUMBER (prev) < 0)
2137 break;
2138 continue;
2140 if (GET_CODE (prev) == CODE_LABEL)
2141 break;
2142 if (GET_CODE (prev) == JUMP_INSN)
2144 if (GET_CODE (PATTERN (prev)) == SET
2145 && SET_DEST (PATTERN (prev)) == pc_rtx
2146 && GET_CODE (SET_SRC (PATTERN (prev))) == IF_THEN_ELSE
2147 && (GET_CODE (XEXP (SET_SRC (PATTERN (prev)), 1))
2148 == LABEL_REF)
2149 && XEXP (XEXP (SET_SRC (PATTERN (prev)), 1), 0) == label)
2151 XEXP (XEXP (SET_SRC (PATTERN (prev)), 1), 0)
2152 = start_label;
2153 emit_note_after (NOTE_INSN_LOOP_END, prev);
2154 needs_end_jump = 0;
2156 break;
2162 /* If the loop starts with a loop exit, roll that to the end where
2163 it will optimize together with the jump back.
2165 We look for the conditional branch to the exit, except that once
2166 we find such a branch, we don't look past 30 instructions.
2168 In more detail, if the loop presently looks like this (in pseudo-C):
2170 start_label:
2171 if (test) goto end_label;
2172 body;
2173 goto start_label;
2174 end_label:
2176 transform it to look like:
2178 goto start_label;
2179 newstart_label:
2180 body;
2181 start_label:
2182 if (test) goto end_label;
2183 goto newstart_label;
2184 end_label:
2186 Here, the `test' may actually consist of some reasonably complex
2187 code, terminating in a test. */
2189 if (optimize
2190 && needs_end_jump
2192 ! (GET_CODE (insn) == JUMP_INSN
2193 && GET_CODE (PATTERN (insn)) == SET
2194 && SET_DEST (PATTERN (insn)) == pc_rtx
2195 && GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE))
2197 int eh_regions = 0;
2198 int num_insns = 0;
2199 rtx last_test_insn = NULL_RTX;
2201 /* Scan insns from the top of the loop looking for a qualified
2202 conditional exit. */
2203 for (insn = NEXT_INSN (loop_stack->data.loop.start_label); insn;
2204 insn = NEXT_INSN (insn))
2206 if (GET_CODE (insn) == NOTE)
2208 if (optimize < 2
2209 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2210 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
2211 /* The code that actually moves the exit test will
2212 carefully leave BLOCK notes in their original
2213 location. That means, however, that we can't debug
2214 the exit test itself. So, we refuse to move code
2215 containing BLOCK notes at low optimization levels. */
2216 break;
2218 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
2219 ++eh_regions;
2220 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END)
2222 --eh_regions;
2223 if (eh_regions < 0)
2224 /* We've come to the end of an EH region, but
2225 never saw the beginning of that region. That
2226 means that an EH region begins before the top
2227 of the loop, and ends in the middle of it. The
2228 existence of such a situation violates a basic
2229 assumption in this code, since that would imply
2230 that even when EH_REGIONS is zero, we might
2231 move code out of an exception region. */
2232 abort ();
2235 /* We must not walk into a nested loop. */
2236 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
2237 break;
2239 /* We already know this INSN is a NOTE, so there's no
2240 point in looking at it to see if it's a JUMP. */
2241 continue;
2244 if (GET_CODE (insn) == JUMP_INSN || GET_CODE (insn) == INSN)
2245 num_insns++;
2247 if (last_test_insn && num_insns > 30)
2248 break;
2250 if (eh_regions > 0)
2251 /* We don't want to move a partial EH region. Consider:
2253 while ( ( { try {
2254 if (cond ()) 0;
2255 else {
2256 bar();
2259 } catch (...) {
2261 } )) {
2262 body;
2265 This isn't legal C++, but here's what it's supposed to
2266 mean: if cond() is true, stop looping. Otherwise,
2267 call bar, and keep looping. In addition, if cond
2268 throws an exception, catch it and keep looping. Such
2269 constructs are certainy legal in LISP.
2271 We should not move the `if (cond()) 0' test since then
2272 the EH-region for the try-block would be broken up.
2273 (In this case we would the EH_BEG note for the `try'
2274 and `if cond()' but not the call to bar() or the
2275 EH_END note.)
2277 So we don't look for tests within an EH region. */
2278 continue;
2280 if (GET_CODE (insn) == JUMP_INSN
2281 && GET_CODE (PATTERN (insn)) == SET
2282 && SET_DEST (PATTERN (insn)) == pc_rtx)
2284 /* This is indeed a jump. */
2285 rtx dest1 = NULL_RTX;
2286 rtx dest2 = NULL_RTX;
2287 rtx potential_last_test;
2288 if (GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE)
2290 /* A conditional jump. */
2291 dest1 = XEXP (SET_SRC (PATTERN (insn)), 1);
2292 dest2 = XEXP (SET_SRC (PATTERN (insn)), 2);
2293 potential_last_test = insn;
2295 else
2297 /* An unconditional jump. */
2298 dest1 = SET_SRC (PATTERN (insn));
2299 /* Include the BARRIER after the JUMP. */
2300 potential_last_test = NEXT_INSN (insn);
2303 do {
2304 if (dest1 && GET_CODE (dest1) == LABEL_REF
2305 && ((XEXP (dest1, 0)
2306 == loop_stack->data.loop.alt_end_label)
2307 || (XEXP (dest1, 0)
2308 == loop_stack->data.loop.end_label)))
2310 last_test_insn = potential_last_test;
2311 break;
2314 /* If this was a conditional jump, there may be
2315 another label at which we should look. */
2316 dest1 = dest2;
2317 dest2 = NULL_RTX;
2318 } while (dest1);
2322 if (last_test_insn != 0 && last_test_insn != get_last_insn ())
2324 /* We found one. Move everything from there up
2325 to the end of the loop, and add a jump into the loop
2326 to jump to there. */
2327 register rtx newstart_label = gen_label_rtx ();
2328 register rtx start_move = start_label;
2329 rtx next_insn;
2331 /* If the start label is preceded by a NOTE_INSN_LOOP_CONT note,
2332 then we want to move this note also. */
2333 if (GET_CODE (PREV_INSN (start_move)) == NOTE
2334 && (NOTE_LINE_NUMBER (PREV_INSN (start_move))
2335 == NOTE_INSN_LOOP_CONT))
2336 start_move = PREV_INSN (start_move);
2338 emit_label_after (newstart_label, PREV_INSN (start_move));
2340 /* Actually move the insns. Start at the beginning, and
2341 keep copying insns until we've copied the
2342 last_test_insn. */
2343 for (insn = start_move; insn; insn = next_insn)
2345 /* Figure out which insn comes after this one. We have
2346 to do this before we move INSN. */
2347 if (insn == last_test_insn)
2348 /* We've moved all the insns. */
2349 next_insn = NULL_RTX;
2350 else
2351 next_insn = NEXT_INSN (insn);
2353 if (GET_CODE (insn) == NOTE
2354 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2355 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
2356 /* We don't want to move NOTE_INSN_BLOCK_BEGs or
2357 NOTE_INSN_BLOCK_ENDs because the correct generation
2358 of debugging information depends on these appearing
2359 in the same order in the RTL and in the tree
2360 structure, where they are represented as BLOCKs.
2361 So, we don't move block notes. Of course, moving
2362 the code inside the block is likely to make it
2363 impossible to debug the instructions in the exit
2364 test, but such is the price of optimization. */
2365 continue;
2367 /* Move the INSN. */
2368 reorder_insns (insn, insn, get_last_insn ());
2371 emit_jump_insn_after (gen_jump (start_label),
2372 PREV_INSN (newstart_label));
2373 emit_barrier_after (PREV_INSN (newstart_label));
2374 start_label = newstart_label;
2378 if (needs_end_jump)
2380 emit_jump (start_label);
2381 emit_note (NULL_PTR, NOTE_INSN_LOOP_END);
2383 emit_label (loop_stack->data.loop.end_label);
2385 POPSTACK (loop_stack);
2387 last_expr_type = 0;
2390 /* Generate a jump to the current loop's continue-point.
2391 This is usually the top of the loop, but may be specified
2392 explicitly elsewhere. If not currently inside a loop,
2393 return 0 and do nothing; caller will print an error message. */
2396 expand_continue_loop (whichloop)
2397 struct nesting *whichloop;
2399 last_expr_type = 0;
2400 if (whichloop == 0)
2401 whichloop = loop_stack;
2402 if (whichloop == 0)
2403 return 0;
2404 expand_goto_internal (NULL_TREE, whichloop->data.loop.continue_label,
2405 NULL_RTX);
2406 return 1;
2409 /* Generate a jump to exit the current loop. If not currently inside a loop,
2410 return 0 and do nothing; caller will print an error message. */
2413 expand_exit_loop (whichloop)
2414 struct nesting *whichloop;
2416 last_expr_type = 0;
2417 if (whichloop == 0)
2418 whichloop = loop_stack;
2419 if (whichloop == 0)
2420 return 0;
2421 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label, NULL_RTX);
2422 return 1;
2425 /* Generate a conditional jump to exit the current loop if COND
2426 evaluates to zero. If not currently inside a loop,
2427 return 0 and do nothing; caller will print an error message. */
2430 expand_exit_loop_if_false (whichloop, cond)
2431 struct nesting *whichloop;
2432 tree cond;
2434 rtx label = gen_label_rtx ();
2435 rtx last_insn;
2436 last_expr_type = 0;
2438 if (whichloop == 0)
2439 whichloop = loop_stack;
2440 if (whichloop == 0)
2441 return 0;
2442 /* In order to handle fixups, we actually create a conditional jump
2443 around a unconditional branch to exit the loop. If fixups are
2444 necessary, they go before the unconditional branch. */
2447 do_jump (cond, NULL_RTX, label);
2448 last_insn = get_last_insn ();
2449 if (GET_CODE (last_insn) == CODE_LABEL)
2450 whichloop->data.loop.alt_end_label = last_insn;
2451 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label,
2452 NULL_RTX);
2453 emit_label (label);
2455 return 1;
2458 /* Return nonzero if the loop nest is empty. Else return zero. */
2461 stmt_loop_nest_empty ()
2463 return (loop_stack == NULL);
2466 /* Return non-zero if we should preserve sub-expressions as separate
2467 pseudos. We never do so if we aren't optimizing. We always do so
2468 if -fexpensive-optimizations.
2470 Otherwise, we only do so if we are in the "early" part of a loop. I.e.,
2471 the loop may still be a small one. */
2474 preserve_subexpressions_p ()
2476 rtx insn;
2478 if (flag_expensive_optimizations)
2479 return 1;
2481 if (optimize == 0 || loop_stack == 0)
2482 return 0;
2484 insn = get_last_insn_anywhere ();
2486 return (insn
2487 && (INSN_UID (insn) - INSN_UID (loop_stack->data.loop.start_label)
2488 < n_non_fixed_regs * 3));
2492 /* Generate a jump to exit the current loop, conditional, binding contour
2493 or case statement. Not all such constructs are visible to this function,
2494 only those started with EXIT_FLAG nonzero. Individual languages use
2495 the EXIT_FLAG parameter to control which kinds of constructs you can
2496 exit this way.
2498 If not currently inside anything that can be exited,
2499 return 0 and do nothing; caller will print an error message. */
2502 expand_exit_something ()
2504 struct nesting *n;
2505 last_expr_type = 0;
2506 for (n = nesting_stack; n; n = n->all)
2507 if (n->exit_label != 0)
2509 expand_goto_internal (NULL_TREE, n->exit_label, NULL_RTX);
2510 return 1;
2513 return 0;
2516 /* Generate RTL to return from the current function, with no value.
2517 (That is, we do not do anything about returning any value.) */
2519 void
2520 expand_null_return ()
2522 struct nesting *block = block_stack;
2523 rtx last_insn = 0;
2525 /* Does any pending block have cleanups? */
2527 while (block && block->data.block.cleanups == 0)
2528 block = block->next;
2530 /* If yes, use a goto to return, since that runs cleanups. */
2532 expand_null_return_1 (last_insn, block != 0);
2535 /* Generate RTL to return from the current function, with value VAL. */
2537 static void
2538 expand_value_return (val)
2539 rtx val;
2541 struct nesting *block = block_stack;
2542 rtx last_insn = get_last_insn ();
2543 rtx return_reg = DECL_RTL (DECL_RESULT (current_function_decl));
2545 /* Copy the value to the return location
2546 unless it's already there. */
2548 if (return_reg != val)
2550 #ifdef PROMOTE_FUNCTION_RETURN
2551 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
2552 int unsignedp = TREE_UNSIGNED (type);
2553 enum machine_mode mode
2554 = promote_mode (type, DECL_MODE (DECL_RESULT (current_function_decl)),
2555 &unsignedp, 1);
2557 if (GET_MODE (val) != VOIDmode && GET_MODE (val) != mode)
2558 convert_move (return_reg, val, unsignedp);
2559 else
2560 #endif
2561 emit_move_insn (return_reg, val);
2563 if (GET_CODE (return_reg) == REG
2564 && REGNO (return_reg) < FIRST_PSEUDO_REGISTER)
2565 emit_insn (gen_rtx_USE (VOIDmode, return_reg));
2566 /* Handle calls that return values in multiple non-contiguous locations.
2567 The Irix 6 ABI has examples of this. */
2568 else if (GET_CODE (return_reg) == PARALLEL)
2570 int i;
2572 for (i = 0; i < XVECLEN (return_reg, 0); i++)
2574 rtx x = XEXP (XVECEXP (return_reg, 0, i), 0);
2576 if (GET_CODE (x) == REG
2577 && REGNO (x) < FIRST_PSEUDO_REGISTER)
2578 emit_insn (gen_rtx_USE (VOIDmode, x));
2582 /* Does any pending block have cleanups? */
2584 while (block && block->data.block.cleanups == 0)
2585 block = block->next;
2587 /* If yes, use a goto to return, since that runs cleanups.
2588 Use LAST_INSN to put cleanups *before* the move insn emitted above. */
2590 expand_null_return_1 (last_insn, block != 0);
2593 /* Output a return with no value. If LAST_INSN is nonzero,
2594 pretend that the return takes place after LAST_INSN.
2595 If USE_GOTO is nonzero then don't use a return instruction;
2596 go to the return label instead. This causes any cleanups
2597 of pending blocks to be executed normally. */
2599 static void
2600 expand_null_return_1 (last_insn, use_goto)
2601 rtx last_insn;
2602 int use_goto;
2604 rtx end_label = cleanup_label ? cleanup_label : return_label;
2606 clear_pending_stack_adjust ();
2607 do_pending_stack_adjust ();
2608 last_expr_type = 0;
2610 /* PCC-struct return always uses an epilogue. */
2611 if (current_function_returns_pcc_struct || use_goto)
2613 if (end_label == 0)
2614 end_label = return_label = gen_label_rtx ();
2615 expand_goto_internal (NULL_TREE, end_label, last_insn);
2616 return;
2619 /* Otherwise output a simple return-insn if one is available,
2620 unless it won't do the job. */
2621 #ifdef HAVE_return
2622 if (HAVE_return && use_goto == 0 && cleanup_label == 0)
2624 emit_jump_insn (gen_return ());
2625 emit_barrier ();
2626 return;
2628 #endif
2630 /* Otherwise jump to the epilogue. */
2631 expand_goto_internal (NULL_TREE, end_label, last_insn);
2634 /* Generate RTL to evaluate the expression RETVAL and return it
2635 from the current function. */
2637 void
2638 expand_return (retval)
2639 tree retval;
2641 /* If there are any cleanups to be performed, then they will
2642 be inserted following LAST_INSN. It is desirable
2643 that the last_insn, for such purposes, should be the
2644 last insn before computing the return value. Otherwise, cleanups
2645 which call functions can clobber the return value. */
2646 /* ??? rms: I think that is erroneous, because in C++ it would
2647 run destructors on variables that might be used in the subsequent
2648 computation of the return value. */
2649 rtx last_insn = 0;
2650 register rtx val = 0;
2651 register rtx op0;
2652 tree retval_rhs;
2653 int cleanups;
2655 /* If function wants no value, give it none. */
2656 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
2658 expand_expr (retval, NULL_RTX, VOIDmode, 0);
2659 emit_queue ();
2660 expand_null_return ();
2661 return;
2664 /* Are any cleanups needed? E.g. C++ destructors to be run? */
2665 /* This is not sufficient. We also need to watch for cleanups of the
2666 expression we are about to expand. Unfortunately, we cannot know
2667 if it has cleanups until we expand it, and we want to change how we
2668 expand it depending upon if we need cleanups. We can't win. */
2669 #if 0
2670 cleanups = any_pending_cleanups (1);
2671 #else
2672 cleanups = 1;
2673 #endif
2675 if (TREE_CODE (retval) == RESULT_DECL)
2676 retval_rhs = retval;
2677 else if ((TREE_CODE (retval) == MODIFY_EXPR || TREE_CODE (retval) == INIT_EXPR)
2678 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
2679 retval_rhs = TREE_OPERAND (retval, 1);
2680 else if (TREE_TYPE (retval) == void_type_node)
2681 /* Recognize tail-recursive call to void function. */
2682 retval_rhs = retval;
2683 else
2684 retval_rhs = NULL_TREE;
2686 /* Only use `last_insn' if there are cleanups which must be run. */
2687 if (cleanups || cleanup_label != 0)
2688 last_insn = get_last_insn ();
2690 /* Distribute return down conditional expr if either of the sides
2691 may involve tail recursion (see test below). This enhances the number
2692 of tail recursions we see. Don't do this always since it can produce
2693 sub-optimal code in some cases and we distribute assignments into
2694 conditional expressions when it would help. */
2696 if (optimize && retval_rhs != 0
2697 && frame_offset == 0
2698 && TREE_CODE (retval_rhs) == COND_EXPR
2699 && (TREE_CODE (TREE_OPERAND (retval_rhs, 1)) == CALL_EXPR
2700 || TREE_CODE (TREE_OPERAND (retval_rhs, 2)) == CALL_EXPR))
2702 rtx label = gen_label_rtx ();
2703 tree expr;
2705 do_jump (TREE_OPERAND (retval_rhs, 0), label, NULL_RTX);
2706 expr = build (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (current_function_decl)),
2707 DECL_RESULT (current_function_decl),
2708 TREE_OPERAND (retval_rhs, 1));
2709 TREE_SIDE_EFFECTS (expr) = 1;
2710 expand_return (expr);
2711 emit_label (label);
2713 expr = build (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (current_function_decl)),
2714 DECL_RESULT (current_function_decl),
2715 TREE_OPERAND (retval_rhs, 2));
2716 TREE_SIDE_EFFECTS (expr) = 1;
2717 expand_return (expr);
2718 return;
2721 /* Attempt to optimize the call if it is tail recursive. */
2722 if (optimize_tail_recursion (retval_rhs, last_insn))
2723 return;
2725 #ifdef HAVE_return
2726 /* This optimization is safe if there are local cleanups
2727 because expand_null_return takes care of them.
2728 ??? I think it should also be safe when there is a cleanup label,
2729 because expand_null_return takes care of them, too.
2730 Any reason why not? */
2731 if (HAVE_return && cleanup_label == 0
2732 && ! current_function_returns_pcc_struct
2733 && BRANCH_COST <= 1)
2735 /* If this is return x == y; then generate
2736 if (x == y) return 1; else return 0;
2737 if we can do it with explicit return insns and branches are cheap,
2738 but not if we have the corresponding scc insn. */
2739 int has_scc = 0;
2740 if (retval_rhs)
2741 switch (TREE_CODE (retval_rhs))
2743 case EQ_EXPR:
2744 #ifdef HAVE_seq
2745 has_scc = HAVE_seq;
2746 #endif
2747 case NE_EXPR:
2748 #ifdef HAVE_sne
2749 has_scc = HAVE_sne;
2750 #endif
2751 case GT_EXPR:
2752 #ifdef HAVE_sgt
2753 has_scc = HAVE_sgt;
2754 #endif
2755 case GE_EXPR:
2756 #ifdef HAVE_sge
2757 has_scc = HAVE_sge;
2758 #endif
2759 case LT_EXPR:
2760 #ifdef HAVE_slt
2761 has_scc = HAVE_slt;
2762 #endif
2763 case LE_EXPR:
2764 #ifdef HAVE_sle
2765 has_scc = HAVE_sle;
2766 #endif
2767 case TRUTH_ANDIF_EXPR:
2768 case TRUTH_ORIF_EXPR:
2769 case TRUTH_AND_EXPR:
2770 case TRUTH_OR_EXPR:
2771 case TRUTH_NOT_EXPR:
2772 case TRUTH_XOR_EXPR:
2773 if (! has_scc)
2775 op0 = gen_label_rtx ();
2776 jumpifnot (retval_rhs, op0);
2777 expand_value_return (const1_rtx);
2778 emit_label (op0);
2779 expand_value_return (const0_rtx);
2780 return;
2782 break;
2784 default:
2785 break;
2788 #endif /* HAVE_return */
2790 /* If the result is an aggregate that is being returned in one (or more)
2791 registers, load the registers here. The compiler currently can't handle
2792 copying a BLKmode value into registers. We could put this code in a
2793 more general area (for use by everyone instead of just function
2794 call/return), but until this feature is generally usable it is kept here
2795 (and in expand_call). The value must go into a pseudo in case there
2796 are cleanups that will clobber the real return register. */
2798 if (retval_rhs != 0
2799 && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
2800 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG)
2802 int i, bitpos, xbitpos;
2803 int big_endian_correction = 0;
2804 int bytes = int_size_in_bytes (TREE_TYPE (retval_rhs));
2805 int n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
2806 int bitsize = MIN (TYPE_ALIGN (TREE_TYPE (retval_rhs)),
2807 (unsigned int)BITS_PER_WORD);
2808 rtx *result_pseudos = (rtx *) alloca (sizeof (rtx) * n_regs);
2809 rtx result_reg, src = NULL_RTX, dst = NULL_RTX;
2810 rtx result_val = expand_expr (retval_rhs, NULL_RTX, VOIDmode, 0);
2811 enum machine_mode tmpmode, result_reg_mode;
2813 /* Structures whose size is not a multiple of a word are aligned
2814 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2815 machine, this means we must skip the empty high order bytes when
2816 calculating the bit offset. */
2817 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2818 big_endian_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2819 * BITS_PER_UNIT));
2821 /* Copy the structure BITSIZE bits at a time. */
2822 for (bitpos = 0, xbitpos = big_endian_correction;
2823 bitpos < bytes * BITS_PER_UNIT;
2824 bitpos += bitsize, xbitpos += bitsize)
2826 /* We need a new destination pseudo each time xbitpos is
2827 on a word boundary and when xbitpos == big_endian_correction
2828 (the first time through). */
2829 if (xbitpos % BITS_PER_WORD == 0
2830 || xbitpos == big_endian_correction)
2832 /* Generate an appropriate register. */
2833 dst = gen_reg_rtx (word_mode);
2834 result_pseudos[xbitpos / BITS_PER_WORD] = dst;
2836 /* Clobber the destination before we move anything into it. */
2837 emit_insn (gen_rtx_CLOBBER (VOIDmode, dst));
2840 /* We need a new source operand each time bitpos is on a word
2841 boundary. */
2842 if (bitpos % BITS_PER_WORD == 0)
2843 src = operand_subword_force (result_val,
2844 bitpos / BITS_PER_WORD,
2845 BLKmode);
2847 /* Use bitpos for the source extraction (left justified) and
2848 xbitpos for the destination store (right justified). */
2849 store_bit_field (dst, bitsize, xbitpos % BITS_PER_WORD, word_mode,
2850 extract_bit_field (src, bitsize,
2851 bitpos % BITS_PER_WORD, 1,
2852 NULL_RTX, word_mode,
2853 word_mode,
2854 bitsize / BITS_PER_UNIT,
2855 BITS_PER_WORD),
2856 bitsize / BITS_PER_UNIT, BITS_PER_WORD);
2859 /* Find the smallest integer mode large enough to hold the
2860 entire structure and use that mode instead of BLKmode
2861 on the USE insn for the return register. */
2862 bytes = int_size_in_bytes (TREE_TYPE (retval_rhs));
2863 for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2864 tmpmode != MAX_MACHINE_MODE;
2865 tmpmode = GET_MODE_WIDER_MODE (tmpmode))
2867 /* Have we found a large enough mode? */
2868 if (GET_MODE_SIZE (tmpmode) >= bytes)
2869 break;
2872 /* No suitable mode found. */
2873 if (tmpmode == MAX_MACHINE_MODE)
2874 abort ();
2876 PUT_MODE (DECL_RTL (DECL_RESULT (current_function_decl)), tmpmode);
2878 if (GET_MODE_SIZE (tmpmode) < GET_MODE_SIZE (word_mode))
2879 result_reg_mode = word_mode;
2880 else
2881 result_reg_mode = tmpmode;
2882 result_reg = gen_reg_rtx (result_reg_mode);
2884 emit_queue ();
2885 for (i = 0; i < n_regs; i++)
2886 emit_move_insn (operand_subword (result_reg, i, 0, result_reg_mode),
2887 result_pseudos[i]);
2889 if (tmpmode != result_reg_mode)
2890 result_reg = gen_lowpart (tmpmode, result_reg);
2892 expand_value_return (result_reg);
2894 else if (cleanups
2895 && retval_rhs != 0
2896 && TREE_TYPE (retval_rhs) != void_type_node
2897 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG)
2899 /* Calculate the return value into a pseudo reg. */
2900 val = gen_reg_rtx (DECL_MODE (DECL_RESULT (current_function_decl)));
2901 val = expand_expr (retval_rhs, val, GET_MODE (val), 0);
2902 val = force_not_mem (val);
2903 emit_queue ();
2904 /* Return the calculated value, doing cleanups first. */
2905 expand_value_return (val);
2907 else
2909 /* No cleanups or no hard reg used;
2910 calculate value into hard return reg. */
2911 expand_expr (retval, const0_rtx, VOIDmode, 0);
2912 emit_queue ();
2913 expand_value_return (DECL_RTL (DECL_RESULT (current_function_decl)));
2917 /* Return 1 if the end of the generated RTX is not a barrier.
2918 This means code already compiled can drop through. */
2921 drop_through_at_end_p ()
2923 rtx insn = get_last_insn ();
2924 while (insn && GET_CODE (insn) == NOTE)
2925 insn = PREV_INSN (insn);
2926 return insn && GET_CODE (insn) != BARRIER;
2929 /* Test CALL_EXPR to determine if it is a potential tail recursion call
2930 and emit code to optimize the tail recursion. LAST_INSN indicates where
2931 to place the jump to the tail recursion label. Return TRUE if the
2932 call was optimized into a goto.
2934 This is only used by expand_return, but expand_call is expected to
2935 use it soon. */
2938 optimize_tail_recursion (call_expr, last_insn)
2939 tree call_expr;
2940 rtx last_insn;
2942 /* For tail-recursive call to current function,
2943 just jump back to the beginning.
2944 It's unsafe if any auto variable in this function
2945 has its address taken; for simplicity,
2946 require stack frame to be empty. */
2947 if (optimize && call_expr != 0
2948 && frame_offset == 0
2949 && TREE_CODE (call_expr) == CALL_EXPR
2950 && TREE_CODE (TREE_OPERAND (call_expr, 0)) == ADDR_EXPR
2951 && TREE_OPERAND (TREE_OPERAND (call_expr, 0), 0) == current_function_decl
2952 /* Finish checking validity, and if valid emit code
2953 to set the argument variables for the new call. */
2954 && tail_recursion_args (TREE_OPERAND (call_expr, 1),
2955 DECL_ARGUMENTS (current_function_decl)))
2957 if (tail_recursion_label == 0)
2959 tail_recursion_label = gen_label_rtx ();
2960 emit_label_after (tail_recursion_label,
2961 tail_recursion_reentry);
2963 emit_queue ();
2964 expand_goto_internal (NULL_TREE, tail_recursion_label, last_insn);
2965 emit_barrier ();
2966 return 1;
2969 return 0;
2972 /* Emit code to alter this function's formal parms for a tail-recursive call.
2973 ACTUALS is a list of actual parameter expressions (chain of TREE_LISTs).
2974 FORMALS is the chain of decls of formals.
2975 Return 1 if this can be done;
2976 otherwise return 0 and do not emit any code. */
2978 static int
2979 tail_recursion_args (actuals, formals)
2980 tree actuals, formals;
2982 register tree a = actuals, f = formals;
2983 register int i;
2984 register rtx *argvec;
2986 /* Check that number and types of actuals are compatible
2987 with the formals. This is not always true in valid C code.
2988 Also check that no formal needs to be addressable
2989 and that all formals are scalars. */
2991 /* Also count the args. */
2993 for (a = actuals, f = formals, i = 0; a && f; a = TREE_CHAIN (a), f = TREE_CHAIN (f), i++)
2995 if (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_VALUE (a)))
2996 != TYPE_MAIN_VARIANT (TREE_TYPE (f)))
2997 return 0;
2998 if (GET_CODE (DECL_RTL (f)) != REG || DECL_MODE (f) == BLKmode)
2999 return 0;
3001 if (a != 0 || f != 0)
3002 return 0;
3004 /* Compute all the actuals. */
3006 argvec = (rtx *) alloca (i * sizeof (rtx));
3008 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
3009 argvec[i] = expand_expr (TREE_VALUE (a), NULL_RTX, VOIDmode, 0);
3011 /* Find which actual values refer to current values of previous formals.
3012 Copy each of them now, before any formal is changed. */
3014 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
3016 int copy = 0;
3017 register int j;
3018 for (f = formals, j = 0; j < i; f = TREE_CHAIN (f), j++)
3019 if (reg_mentioned_p (DECL_RTL (f), argvec[i]))
3020 { copy = 1; break; }
3021 if (copy)
3022 argvec[i] = copy_to_reg (argvec[i]);
3025 /* Store the values of the actuals into the formals. */
3027 for (f = formals, a = actuals, i = 0; f;
3028 f = TREE_CHAIN (f), a = TREE_CHAIN (a), i++)
3030 if (GET_MODE (DECL_RTL (f)) == GET_MODE (argvec[i]))
3031 emit_move_insn (DECL_RTL (f), argvec[i]);
3032 else
3033 convert_move (DECL_RTL (f), argvec[i],
3034 TREE_UNSIGNED (TREE_TYPE (TREE_VALUE (a))));
3037 free_temp_slots ();
3038 return 1;
3041 /* Generate the RTL code for entering a binding contour.
3042 The variables are declared one by one, by calls to `expand_decl'.
3044 EXIT_FLAG is nonzero if this construct should be visible to
3045 `exit_something'. */
3047 void
3048 expand_start_bindings (exit_flag)
3049 int exit_flag;
3051 struct nesting *thisblock = ALLOC_NESTING ();
3052 rtx note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_BEG);
3054 /* Make an entry on block_stack for the block we are entering. */
3056 thisblock->next = block_stack;
3057 thisblock->all = nesting_stack;
3058 thisblock->depth = ++nesting_depth;
3059 thisblock->data.block.stack_level = 0;
3060 thisblock->data.block.cleanups = 0;
3061 thisblock->data.block.function_call_count = 0;
3062 thisblock->data.block.exception_region = 0;
3063 thisblock->data.block.target_temp_slot_level = target_temp_slot_level;
3065 thisblock->data.block.conditional_code = 0;
3066 thisblock->data.block.last_unconditional_cleanup = note;
3067 thisblock->data.block.cleanup_ptr = &thisblock->data.block.cleanups;
3069 if (block_stack
3070 && !(block_stack->data.block.cleanups == NULL_TREE
3071 && block_stack->data.block.outer_cleanups == NULL_TREE))
3072 thisblock->data.block.outer_cleanups
3073 = tree_cons (NULL_TREE, block_stack->data.block.cleanups,
3074 block_stack->data.block.outer_cleanups);
3075 else
3076 thisblock->data.block.outer_cleanups = 0;
3077 thisblock->data.block.label_chain = 0;
3078 thisblock->data.block.innermost_stack_block = stack_block_stack;
3079 thisblock->data.block.first_insn = note;
3080 thisblock->data.block.block_start_count = ++block_start_count;
3081 thisblock->exit_label = exit_flag ? gen_label_rtx () : 0;
3082 block_stack = thisblock;
3083 nesting_stack = thisblock;
3085 /* Make a new level for allocating stack slots. */
3086 push_temp_slots ();
3089 /* Specify the scope of temporaries created by TARGET_EXPRs. Similar
3090 to CLEANUP_POINT_EXPR, but handles cases when a series of calls to
3091 expand_expr are made. After we end the region, we know that all
3092 space for all temporaries that were created by TARGET_EXPRs will be
3093 destroyed and their space freed for reuse. */
3095 void
3096 expand_start_target_temps ()
3098 /* This is so that even if the result is preserved, the space
3099 allocated will be freed, as we know that it is no longer in use. */
3100 push_temp_slots ();
3102 /* Start a new binding layer that will keep track of all cleanup
3103 actions to be performed. */
3104 expand_start_bindings (0);
3106 target_temp_slot_level = temp_slot_level;
3109 void
3110 expand_end_target_temps ()
3112 expand_end_bindings (NULL_TREE, 0, 0);
3114 /* This is so that even if the result is preserved, the space
3115 allocated will be freed, as we know that it is no longer in use. */
3116 pop_temp_slots ();
3119 /* Mark top block of block_stack as an implicit binding for an
3120 exception region. This is used to prevent infinite recursion when
3121 ending a binding with expand_end_bindings. It is only ever called
3122 by expand_eh_region_start, as that it the only way to create a
3123 block stack for a exception region. */
3125 void
3126 mark_block_as_eh_region ()
3128 block_stack->data.block.exception_region = 1;
3129 if (block_stack->next
3130 && block_stack->next->data.block.conditional_code)
3132 block_stack->data.block.conditional_code
3133 = block_stack->next->data.block.conditional_code;
3134 block_stack->data.block.last_unconditional_cleanup
3135 = block_stack->next->data.block.last_unconditional_cleanup;
3136 block_stack->data.block.cleanup_ptr
3137 = block_stack->next->data.block.cleanup_ptr;
3141 /* True if we are currently emitting insns in an area of output code
3142 that is controlled by a conditional expression. This is used by
3143 the cleanup handling code to generate conditional cleanup actions. */
3146 conditional_context ()
3148 return block_stack && block_stack->data.block.conditional_code;
3151 /* Mark top block of block_stack as not for an implicit binding for an
3152 exception region. This is only ever done by expand_eh_region_end
3153 to let expand_end_bindings know that it is being called explicitly
3154 to end the binding layer for just the binding layer associated with
3155 the exception region, otherwise expand_end_bindings would try and
3156 end all implicit binding layers for exceptions regions, and then
3157 one normal binding layer. */
3159 void
3160 mark_block_as_not_eh_region ()
3162 block_stack->data.block.exception_region = 0;
3165 /* True if the top block of block_stack was marked as for an exception
3166 region by mark_block_as_eh_region. */
3169 is_eh_region ()
3171 return block_stack && block_stack->data.block.exception_region;
3174 /* Given a pointer to a BLOCK node, save a pointer to the most recently
3175 generated NOTE_INSN_BLOCK_END in the BLOCK_END_NOTE field of the given
3176 BLOCK node. */
3178 void
3179 remember_end_note (block)
3180 register tree block;
3182 BLOCK_END_NOTE (block) = last_block_end_note;
3183 last_block_end_note = NULL_RTX;
3186 /* Emit a handler label for a nonlocal goto handler.
3187 Also emit code to store the handler label in SLOT before BEFORE_INSN. */
3189 static rtx
3190 expand_nl_handler_label (slot, before_insn)
3191 rtx slot, before_insn;
3193 rtx insns;
3194 rtx handler_label = gen_label_rtx ();
3196 /* Don't let jump_optimize delete the handler. */
3197 LABEL_PRESERVE_P (handler_label) = 1;
3199 start_sequence ();
3200 emit_move_insn (slot, gen_rtx_LABEL_REF (Pmode, handler_label));
3201 insns = get_insns ();
3202 end_sequence ();
3203 emit_insns_before (insns, before_insn);
3205 emit_label (handler_label);
3207 return handler_label;
3210 /* Emit code to restore vital registers at the beginning of a nonlocal goto
3211 handler. */
3212 static void
3213 expand_nl_goto_receiver ()
3215 #ifdef HAVE_nonlocal_goto
3216 if (! HAVE_nonlocal_goto)
3217 #endif
3218 /* First adjust our frame pointer to its actual value. It was
3219 previously set to the start of the virtual area corresponding to
3220 the stacked variables when we branched here and now needs to be
3221 adjusted to the actual hardware fp value.
3223 Assignments are to virtual registers are converted by
3224 instantiate_virtual_regs into the corresponding assignment
3225 to the underlying register (fp in this case) that makes
3226 the original assignment true.
3227 So the following insn will actually be
3228 decrementing fp by STARTING_FRAME_OFFSET. */
3229 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
3231 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3232 if (fixed_regs[ARG_POINTER_REGNUM])
3234 #ifdef ELIMINABLE_REGS
3235 /* If the argument pointer can be eliminated in favor of the
3236 frame pointer, we don't need to restore it. We assume here
3237 that if such an elimination is present, it can always be used.
3238 This is the case on all known machines; if we don't make this
3239 assumption, we do unnecessary saving on many machines. */
3240 static struct elims {int from, to;} elim_regs[] = ELIMINABLE_REGS;
3241 size_t i;
3243 for (i = 0; i < sizeof elim_regs / sizeof elim_regs[0]; i++)
3244 if (elim_regs[i].from == ARG_POINTER_REGNUM
3245 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
3246 break;
3248 if (i == sizeof elim_regs / sizeof elim_regs [0])
3249 #endif
3251 /* Now restore our arg pointer from the address at which it
3252 was saved in our stack frame.
3253 If there hasn't be space allocated for it yet, make
3254 some now. */
3255 if (arg_pointer_save_area == 0)
3256 arg_pointer_save_area
3257 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
3258 emit_move_insn (virtual_incoming_args_rtx,
3259 /* We need a pseudo here, or else
3260 instantiate_virtual_regs_1 complains. */
3261 copy_to_reg (arg_pointer_save_area));
3264 #endif
3266 #ifdef HAVE_nonlocal_goto_receiver
3267 if (HAVE_nonlocal_goto_receiver)
3268 emit_insn (gen_nonlocal_goto_receiver ());
3269 #endif
3272 /* Make handlers for nonlocal gotos taking place in the function calls in
3273 block THISBLOCK. */
3275 static void
3276 expand_nl_goto_receivers (thisblock)
3277 struct nesting *thisblock;
3279 tree link;
3280 rtx afterward = gen_label_rtx ();
3281 rtx insns, slot;
3282 rtx label_list;
3283 int any_invalid;
3285 /* Record the handler address in the stack slot for that purpose,
3286 during this block, saving and restoring the outer value. */
3287 if (thisblock->next != 0)
3288 for (slot = nonlocal_goto_handler_slots; slot; slot = XEXP (slot, 1))
3290 rtx save_receiver = gen_reg_rtx (Pmode);
3291 emit_move_insn (XEXP (slot, 0), save_receiver);
3293 start_sequence ();
3294 emit_move_insn (save_receiver, XEXP (slot, 0));
3295 insns = get_insns ();
3296 end_sequence ();
3297 emit_insns_before (insns, thisblock->data.block.first_insn);
3300 /* Jump around the handlers; they run only when specially invoked. */
3301 emit_jump (afterward);
3303 /* Make a separate handler for each label. */
3304 link = nonlocal_labels;
3305 slot = nonlocal_goto_handler_slots;
3306 label_list = NULL_RTX;
3307 for (; link; link = TREE_CHAIN (link), slot = XEXP (slot, 1))
3308 /* Skip any labels we shouldn't be able to jump to from here,
3309 we generate one special handler for all of them below which just calls
3310 abort. */
3311 if (! DECL_TOO_LATE (TREE_VALUE (link)))
3313 rtx lab;
3314 lab = expand_nl_handler_label (XEXP (slot, 0),
3315 thisblock->data.block.first_insn);
3316 label_list = gen_rtx_EXPR_LIST (VOIDmode, lab, label_list);
3318 expand_nl_goto_receiver ();
3320 /* Jump to the "real" nonlocal label. */
3321 expand_goto (TREE_VALUE (link));
3324 /* A second pass over all nonlocal labels; this time we handle those
3325 we should not be able to jump to at this point. */
3326 link = nonlocal_labels;
3327 slot = nonlocal_goto_handler_slots;
3328 any_invalid = 0;
3329 for (; link; link = TREE_CHAIN (link), slot = XEXP (slot, 1))
3330 if (DECL_TOO_LATE (TREE_VALUE (link)))
3332 rtx lab;
3333 lab = expand_nl_handler_label (XEXP (slot, 0),
3334 thisblock->data.block.first_insn);
3335 label_list = gen_rtx_EXPR_LIST (VOIDmode, lab, label_list);
3336 any_invalid = 1;
3339 if (any_invalid)
3341 expand_nl_goto_receiver ();
3342 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "abort"), 0,
3343 VOIDmode, 0);
3344 emit_barrier ();
3347 nonlocal_goto_handler_labels = label_list;
3348 emit_label (afterward);
3351 /* Generate RTL code to terminate a binding contour.
3352 VARS is the chain of VAR_DECL nodes
3353 for the variables bound in this contour.
3354 MARK_ENDS is nonzero if we should put a note at the beginning
3355 and end of this binding contour.
3357 DONT_JUMP_IN is nonzero if it is not valid to jump into this contour.
3358 (That is true automatically if the contour has a saved stack level.) */
3360 void
3361 expand_end_bindings (vars, mark_ends, dont_jump_in)
3362 tree vars;
3363 int mark_ends;
3364 int dont_jump_in;
3366 register struct nesting *thisblock;
3367 register tree decl;
3369 while (block_stack->data.block.exception_region)
3371 /* Because we don't need or want a new temporary level and
3372 because we didn't create one in expand_eh_region_start,
3373 create a fake one now to avoid removing one in
3374 expand_end_bindings. */
3375 push_temp_slots ();
3377 block_stack->data.block.exception_region = 0;
3379 expand_end_bindings (NULL_TREE, 0, 0);
3382 /* Since expand_eh_region_start does an expand_start_bindings, we
3383 have to first end all the bindings that were created by
3384 expand_eh_region_start. */
3386 thisblock = block_stack;
3388 if (warn_unused)
3389 for (decl = vars; decl; decl = TREE_CHAIN (decl))
3390 if (! TREE_USED (decl) && TREE_CODE (decl) == VAR_DECL
3391 && ! DECL_IN_SYSTEM_HEADER (decl)
3392 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
3393 warning_with_decl (decl, "unused variable `%s'");
3395 if (thisblock->exit_label)
3397 do_pending_stack_adjust ();
3398 emit_label (thisblock->exit_label);
3401 /* If necessary, make handlers for nonlocal gotos taking
3402 place in the function calls in this block. */
3403 if (function_call_count != thisblock->data.block.function_call_count
3404 && nonlocal_labels
3405 /* Make handler for outermost block
3406 if there were any nonlocal gotos to this function. */
3407 && (thisblock->next == 0 ? current_function_has_nonlocal_label
3408 /* Make handler for inner block if it has something
3409 special to do when you jump out of it. */
3410 : (thisblock->data.block.cleanups != 0
3411 || thisblock->data.block.stack_level != 0)))
3412 expand_nl_goto_receivers (thisblock);
3414 /* Don't allow jumping into a block that has a stack level.
3415 Cleanups are allowed, though. */
3416 if (dont_jump_in
3417 || thisblock->data.block.stack_level != 0)
3419 struct label_chain *chain;
3421 /* Any labels in this block are no longer valid to go to.
3422 Mark them to cause an error message. */
3423 for (chain = thisblock->data.block.label_chain; chain; chain = chain->next)
3425 DECL_TOO_LATE (chain->label) = 1;
3426 /* If any goto without a fixup came to this label,
3427 that must be an error, because gotos without fixups
3428 come from outside all saved stack-levels. */
3429 if (TREE_ADDRESSABLE (chain->label))
3430 error_with_decl (chain->label,
3431 "label `%s' used before containing binding contour");
3435 /* Restore stack level in effect before the block
3436 (only if variable-size objects allocated). */
3437 /* Perform any cleanups associated with the block. */
3439 if (thisblock->data.block.stack_level != 0
3440 || thisblock->data.block.cleanups != 0)
3442 /* Only clean up here if this point can actually be reached. */
3443 int reachable = GET_CODE (get_last_insn ()) != BARRIER;
3445 /* Don't let cleanups affect ({...}) constructs. */
3446 int old_expr_stmts_for_value = expr_stmts_for_value;
3447 rtx old_last_expr_value = last_expr_value;
3448 tree old_last_expr_type = last_expr_type;
3449 expr_stmts_for_value = 0;
3451 /* Do the cleanups. */
3452 expand_cleanups (thisblock->data.block.cleanups, NULL_TREE, 0, reachable);
3453 if (reachable)
3454 do_pending_stack_adjust ();
3456 expr_stmts_for_value = old_expr_stmts_for_value;
3457 last_expr_value = old_last_expr_value;
3458 last_expr_type = old_last_expr_type;
3460 /* Restore the stack level. */
3462 if (reachable && thisblock->data.block.stack_level != 0)
3464 emit_stack_restore (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3465 thisblock->data.block.stack_level, NULL_RTX);
3466 if (nonlocal_goto_handler_slots != 0)
3467 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level,
3468 NULL_RTX);
3471 /* Any gotos out of this block must also do these things.
3472 Also report any gotos with fixups that came to labels in this
3473 level. */
3474 fixup_gotos (thisblock,
3475 thisblock->data.block.stack_level,
3476 thisblock->data.block.cleanups,
3477 thisblock->data.block.first_insn,
3478 dont_jump_in);
3481 /* Mark the beginning and end of the scope if requested.
3482 We do this now, after running cleanups on the variables
3483 just going out of scope, so they are in scope for their cleanups. */
3485 if (mark_ends)
3486 last_block_end_note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_END);
3487 else
3488 /* Get rid of the beginning-mark if we don't make an end-mark. */
3489 NOTE_LINE_NUMBER (thisblock->data.block.first_insn) = NOTE_INSN_DELETED;
3491 /* If doing stupid register allocation, make sure lives of all
3492 register variables declared here extend thru end of scope. */
3494 if (obey_regdecls)
3495 for (decl = vars; decl; decl = TREE_CHAIN (decl))
3497 rtx rtl = DECL_RTL (decl);
3498 if (TREE_CODE (decl) == VAR_DECL && rtl != 0)
3499 use_variable (rtl);
3502 /* Restore the temporary level of TARGET_EXPRs. */
3503 target_temp_slot_level = thisblock->data.block.target_temp_slot_level;
3505 /* Restore block_stack level for containing block. */
3507 stack_block_stack = thisblock->data.block.innermost_stack_block;
3508 POPSTACK (block_stack);
3510 /* Pop the stack slot nesting and free any slots at this level. */
3511 pop_temp_slots ();
3514 /* Generate RTL for the automatic variable declaration DECL.
3515 (Other kinds of declarations are simply ignored if seen here.) */
3517 void
3518 expand_decl (decl)
3519 register tree decl;
3521 struct nesting *thisblock = block_stack;
3522 tree type;
3524 type = TREE_TYPE (decl);
3526 /* Only automatic variables need any expansion done.
3527 Static and external variables, and external functions,
3528 will be handled by `assemble_variable' (called from finish_decl).
3529 TYPE_DECL and CONST_DECL require nothing.
3530 PARM_DECLs are handled in `assign_parms'. */
3532 if (TREE_CODE (decl) != VAR_DECL)
3533 return;
3534 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
3535 return;
3537 /* Create the RTL representation for the variable. */
3539 if (type == error_mark_node)
3540 DECL_RTL (decl) = gen_rtx_MEM (BLKmode, const0_rtx);
3541 else if (DECL_SIZE (decl) == 0)
3542 /* Variable with incomplete type. */
3544 if (DECL_INITIAL (decl) == 0)
3545 /* Error message was already done; now avoid a crash. */
3546 DECL_RTL (decl) = assign_stack_temp (DECL_MODE (decl), 0, 1);
3547 else
3548 /* An initializer is going to decide the size of this array.
3549 Until we know the size, represent its address with a reg. */
3550 DECL_RTL (decl) = gen_rtx_MEM (BLKmode, gen_reg_rtx (Pmode));
3551 MEM_SET_IN_STRUCT_P (DECL_RTL (decl), AGGREGATE_TYPE_P (type));
3553 else if (DECL_MODE (decl) != BLKmode
3554 /* If -ffloat-store, don't put explicit float vars
3555 into regs. */
3556 && !(flag_float_store
3557 && TREE_CODE (type) == REAL_TYPE)
3558 && ! TREE_THIS_VOLATILE (decl)
3559 && ! TREE_ADDRESSABLE (decl)
3560 && (DECL_REGISTER (decl) || ! obey_regdecls)
3561 /* if -fcheck-memory-usage, check all variables. */
3562 && ! current_function_check_memory_usage)
3564 /* Automatic variable that can go in a register. */
3565 int unsignedp = TREE_UNSIGNED (type);
3566 enum machine_mode reg_mode
3567 = promote_mode (type, DECL_MODE (decl), &unsignedp, 0);
3569 DECL_RTL (decl) = gen_reg_rtx (reg_mode);
3570 mark_user_reg (DECL_RTL (decl));
3572 if (POINTER_TYPE_P (type))
3573 mark_reg_pointer (DECL_RTL (decl),
3574 (TYPE_ALIGN (TREE_TYPE (TREE_TYPE (decl)))
3575 / BITS_PER_UNIT));
3578 else if (TREE_CODE (DECL_SIZE (decl)) == INTEGER_CST
3579 && ! (flag_stack_check && ! STACK_CHECK_BUILTIN
3580 && (TREE_INT_CST_HIGH (DECL_SIZE (decl)) != 0
3581 || (TREE_INT_CST_LOW (DECL_SIZE (decl))
3582 > STACK_CHECK_MAX_VAR_SIZE * BITS_PER_UNIT))))
3584 /* Variable of fixed size that goes on the stack. */
3585 rtx oldaddr = 0;
3586 rtx addr;
3588 /* If we previously made RTL for this decl, it must be an array
3589 whose size was determined by the initializer.
3590 The old address was a register; set that register now
3591 to the proper address. */
3592 if (DECL_RTL (decl) != 0)
3594 if (GET_CODE (DECL_RTL (decl)) != MEM
3595 || GET_CODE (XEXP (DECL_RTL (decl), 0)) != REG)
3596 abort ();
3597 oldaddr = XEXP (DECL_RTL (decl), 0);
3600 DECL_RTL (decl) = assign_temp (TREE_TYPE (decl), 1, 1, 1);
3601 MEM_SET_IN_STRUCT_P (DECL_RTL (decl),
3602 AGGREGATE_TYPE_P (TREE_TYPE (decl)));
3604 /* Set alignment we actually gave this decl. */
3605 DECL_ALIGN (decl) = (DECL_MODE (decl) == BLKmode ? BIGGEST_ALIGNMENT
3606 : GET_MODE_BITSIZE (DECL_MODE (decl)));
3608 if (oldaddr)
3610 addr = force_operand (XEXP (DECL_RTL (decl), 0), oldaddr);
3611 if (addr != oldaddr)
3612 emit_move_insn (oldaddr, addr);
3615 /* If this is a memory ref that contains aggregate components,
3616 mark it as such for cse and loop optimize. */
3617 MEM_SET_IN_STRUCT_P (DECL_RTL (decl),
3618 AGGREGATE_TYPE_P (TREE_TYPE (decl)));
3619 #if 0
3620 /* If this is in memory because of -ffloat-store,
3621 set the volatile bit, to prevent optimizations from
3622 undoing the effects. */
3623 if (flag_float_store && TREE_CODE (type) == REAL_TYPE)
3624 MEM_VOLATILE_P (DECL_RTL (decl)) = 1;
3625 #endif
3627 MEM_ALIAS_SET (DECL_RTL (decl)) = get_alias_set (decl);
3629 else
3630 /* Dynamic-size object: must push space on the stack. */
3632 rtx address, size;
3634 /* Record the stack pointer on entry to block, if have
3635 not already done so. */
3636 if (thisblock->data.block.stack_level == 0)
3638 do_pending_stack_adjust ();
3639 emit_stack_save (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3640 &thisblock->data.block.stack_level,
3641 thisblock->data.block.first_insn);
3642 stack_block_stack = thisblock;
3645 /* Compute the variable's size, in bytes. */
3646 size = expand_expr (size_binop (CEIL_DIV_EXPR,
3647 DECL_SIZE (decl),
3648 size_int (BITS_PER_UNIT)),
3649 NULL_RTX, VOIDmode, 0);
3650 free_temp_slots ();
3652 /* Allocate space on the stack for the variable. Note that
3653 DECL_ALIGN says how the variable is to be aligned and we
3654 cannot use it to conclude anything about the alignment of
3655 the size. */
3656 address = allocate_dynamic_stack_space (size, NULL_RTX,
3657 TYPE_ALIGN (TREE_TYPE (decl)));
3659 /* Reference the variable indirect through that rtx. */
3660 DECL_RTL (decl) = gen_rtx_MEM (DECL_MODE (decl), address);
3662 /* If this is a memory ref that contains aggregate components,
3663 mark it as such for cse and loop optimize. */
3664 MEM_SET_IN_STRUCT_P (DECL_RTL (decl),
3665 AGGREGATE_TYPE_P (TREE_TYPE (decl)));
3667 /* Indicate the alignment we actually gave this variable. */
3668 #ifdef STACK_BOUNDARY
3669 DECL_ALIGN (decl) = STACK_BOUNDARY;
3670 #else
3671 DECL_ALIGN (decl) = BIGGEST_ALIGNMENT;
3672 #endif
3675 if (TREE_THIS_VOLATILE (decl))
3676 MEM_VOLATILE_P (DECL_RTL (decl)) = 1;
3677 #if 0 /* A variable is not necessarily unchanging
3678 just because it is const. RTX_UNCHANGING_P
3679 means no change in the function,
3680 not merely no change in the variable's scope.
3681 It is correct to set RTX_UNCHANGING_P if the variable's scope
3682 is the whole function. There's no convenient way to test that. */
3683 if (TREE_READONLY (decl))
3684 RTX_UNCHANGING_P (DECL_RTL (decl)) = 1;
3685 #endif
3687 /* If doing stupid register allocation, make sure life of any
3688 register variable starts here, at the start of its scope. */
3690 if (obey_regdecls)
3691 use_variable (DECL_RTL (decl));
3696 /* Emit code to perform the initialization of a declaration DECL. */
3698 void
3699 expand_decl_init (decl)
3700 tree decl;
3702 int was_used = TREE_USED (decl);
3704 /* If this is a CONST_DECL, we don't have to generate any code, but
3705 if DECL_INITIAL is a constant, call expand_expr to force TREE_CST_RTL
3706 to be set while in the obstack containing the constant. If we don't
3707 do this, we can lose if we have functions nested three deep and the middle
3708 function makes a CONST_DECL whose DECL_INITIAL is a STRING_CST while
3709 the innermost function is the first to expand that STRING_CST. */
3710 if (TREE_CODE (decl) == CONST_DECL)
3712 if (DECL_INITIAL (decl) && TREE_CONSTANT (DECL_INITIAL (decl)))
3713 expand_expr (DECL_INITIAL (decl), NULL_RTX, VOIDmode,
3714 EXPAND_INITIALIZER);
3715 return;
3718 if (TREE_STATIC (decl))
3719 return;
3721 /* Compute and store the initial value now. */
3723 if (DECL_INITIAL (decl) == error_mark_node)
3725 enum tree_code code = TREE_CODE (TREE_TYPE (decl));
3727 if (code == INTEGER_TYPE || code == REAL_TYPE || code == ENUMERAL_TYPE
3728 || code == POINTER_TYPE || code == REFERENCE_TYPE)
3729 expand_assignment (decl, convert (TREE_TYPE (decl), integer_zero_node),
3730 0, 0);
3731 emit_queue ();
3733 else if (DECL_INITIAL (decl) && TREE_CODE (DECL_INITIAL (decl)) != TREE_LIST)
3735 emit_line_note (DECL_SOURCE_FILE (decl), DECL_SOURCE_LINE (decl));
3736 expand_assignment (decl, DECL_INITIAL (decl), 0, 0);
3737 emit_queue ();
3740 /* Don't let the initialization count as "using" the variable. */
3741 TREE_USED (decl) = was_used;
3743 /* Free any temporaries we made while initializing the decl. */
3744 preserve_temp_slots (NULL_RTX);
3745 free_temp_slots ();
3748 /* CLEANUP is an expression to be executed at exit from this binding contour;
3749 for example, in C++, it might call the destructor for this variable.
3751 We wrap CLEANUP in an UNSAVE_EXPR node, so that we can expand the
3752 CLEANUP multiple times, and have the correct semantics. This
3753 happens in exception handling, for gotos, returns, breaks that
3754 leave the current scope.
3756 If CLEANUP is nonzero and DECL is zero, we record a cleanup
3757 that is not associated with any particular variable. */
3760 expand_decl_cleanup (decl, cleanup)
3761 tree decl, cleanup;
3763 struct nesting *thisblock = block_stack;
3765 /* Error if we are not in any block. */
3766 if (thisblock == 0)
3767 return 0;
3769 /* Record the cleanup if there is one. */
3771 if (cleanup != 0)
3773 tree t;
3774 rtx seq;
3775 tree *cleanups = &thisblock->data.block.cleanups;
3776 int cond_context = conditional_context ();
3778 if (cond_context)
3780 rtx flag = gen_reg_rtx (word_mode);
3781 rtx set_flag_0;
3782 tree cond;
3784 start_sequence ();
3785 emit_move_insn (flag, const0_rtx);
3786 set_flag_0 = get_insns ();
3787 end_sequence ();
3789 thisblock->data.block.last_unconditional_cleanup
3790 = emit_insns_after (set_flag_0,
3791 thisblock->data.block.last_unconditional_cleanup);
3793 emit_move_insn (flag, const1_rtx);
3795 /* All cleanups must be on the function_obstack. */
3796 push_obstacks_nochange ();
3797 resume_temporary_allocation ();
3799 cond = build_decl (VAR_DECL, NULL_TREE, type_for_mode (word_mode, 1));
3800 DECL_RTL (cond) = flag;
3802 /* Conditionalize the cleanup. */
3803 cleanup = build (COND_EXPR, void_type_node,
3804 truthvalue_conversion (cond),
3805 cleanup, integer_zero_node);
3806 cleanup = fold (cleanup);
3808 pop_obstacks ();
3810 cleanups = thisblock->data.block.cleanup_ptr;
3813 /* All cleanups must be on the function_obstack. */
3814 push_obstacks_nochange ();
3815 resume_temporary_allocation ();
3816 cleanup = unsave_expr (cleanup);
3817 pop_obstacks ();
3819 t = *cleanups = temp_tree_cons (decl, cleanup, *cleanups);
3821 if (! cond_context)
3822 /* If this block has a cleanup, it belongs in stack_block_stack. */
3823 stack_block_stack = thisblock;
3825 if (cond_context)
3827 start_sequence ();
3830 /* If this was optimized so that there is no exception region for the
3831 cleanup, then mark the TREE_LIST node, so that we can later tell
3832 if we need to call expand_eh_region_end. */
3833 if (! using_eh_for_cleanups_p
3834 || expand_eh_region_start_tree (decl, cleanup))
3835 TREE_ADDRESSABLE (t) = 1;
3836 /* If that started a new EH region, we're in a new block. */
3837 thisblock = block_stack;
3839 if (cond_context)
3841 seq = get_insns ();
3842 end_sequence ();
3843 if (seq)
3844 thisblock->data.block.last_unconditional_cleanup
3845 = emit_insns_after (seq,
3846 thisblock->data.block.last_unconditional_cleanup);
3848 else
3850 thisblock->data.block.last_unconditional_cleanup
3851 = get_last_insn ();
3852 thisblock->data.block.cleanup_ptr = &thisblock->data.block.cleanups;
3855 return 1;
3858 /* Like expand_decl_cleanup, but suppress generating an exception handler
3859 to perform the cleanup. */
3862 expand_decl_cleanup_no_eh (decl, cleanup)
3863 tree decl, cleanup;
3865 int save_eh = using_eh_for_cleanups_p;
3866 int result;
3868 using_eh_for_cleanups_p = 0;
3869 result = expand_decl_cleanup (decl, cleanup);
3870 using_eh_for_cleanups_p = save_eh;
3872 return result;
3875 /* Arrange for the top element of the dynamic cleanup chain to be
3876 popped if we exit the current binding contour. DECL is the
3877 associated declaration, if any, otherwise NULL_TREE. If the
3878 current contour is left via an exception, then __sjthrow will pop
3879 the top element off the dynamic cleanup chain. The code that
3880 avoids doing the action we push into the cleanup chain in the
3881 exceptional case is contained in expand_cleanups.
3883 This routine is only used by expand_eh_region_start, and that is
3884 the only way in which an exception region should be started. This
3885 routine is only used when using the setjmp/longjmp codegen method
3886 for exception handling. */
3889 expand_dcc_cleanup (decl)
3890 tree decl;
3892 struct nesting *thisblock = block_stack;
3893 tree cleanup;
3895 /* Error if we are not in any block. */
3896 if (thisblock == 0)
3897 return 0;
3899 /* Record the cleanup for the dynamic handler chain. */
3901 /* All cleanups must be on the function_obstack. */
3902 push_obstacks_nochange ();
3903 resume_temporary_allocation ();
3904 cleanup = make_node (POPDCC_EXPR);
3905 pop_obstacks ();
3907 /* Add the cleanup in a manner similar to expand_decl_cleanup. */
3908 thisblock->data.block.cleanups
3909 = temp_tree_cons (decl, cleanup, thisblock->data.block.cleanups);
3911 /* If this block has a cleanup, it belongs in stack_block_stack. */
3912 stack_block_stack = thisblock;
3913 return 1;
3916 /* Arrange for the top element of the dynamic handler chain to be
3917 popped if we exit the current binding contour. DECL is the
3918 associated declaration, if any, otherwise NULL_TREE. If the current
3919 contour is left via an exception, then __sjthrow will pop the top
3920 element off the dynamic handler chain. The code that avoids doing
3921 the action we push into the handler chain in the exceptional case
3922 is contained in expand_cleanups.
3924 This routine is only used by expand_eh_region_start, and that is
3925 the only way in which an exception region should be started. This
3926 routine is only used when using the setjmp/longjmp codegen method
3927 for exception handling. */
3930 expand_dhc_cleanup (decl)
3931 tree decl;
3933 struct nesting *thisblock = block_stack;
3934 tree cleanup;
3936 /* Error if we are not in any block. */
3937 if (thisblock == 0)
3938 return 0;
3940 /* Record the cleanup for the dynamic handler chain. */
3942 /* All cleanups must be on the function_obstack. */
3943 push_obstacks_nochange ();
3944 resume_temporary_allocation ();
3945 cleanup = make_node (POPDHC_EXPR);
3946 pop_obstacks ();
3948 /* Add the cleanup in a manner similar to expand_decl_cleanup. */
3949 thisblock->data.block.cleanups
3950 = temp_tree_cons (decl, cleanup, thisblock->data.block.cleanups);
3952 /* If this block has a cleanup, it belongs in stack_block_stack. */
3953 stack_block_stack = thisblock;
3954 return 1;
3957 /* DECL is an anonymous union. CLEANUP is a cleanup for DECL.
3958 DECL_ELTS is the list of elements that belong to DECL's type.
3959 In each, the TREE_VALUE is a VAR_DECL, and the TREE_PURPOSE a cleanup. */
3961 void
3962 expand_anon_union_decl (decl, cleanup, decl_elts)
3963 tree decl, cleanup, decl_elts;
3965 struct nesting *thisblock = block_stack;
3966 rtx x;
3968 expand_decl (decl);
3969 expand_decl_cleanup (decl, cleanup);
3970 x = DECL_RTL (decl);
3972 while (decl_elts)
3974 tree decl_elt = TREE_VALUE (decl_elts);
3975 tree cleanup_elt = TREE_PURPOSE (decl_elts);
3976 enum machine_mode mode = TYPE_MODE (TREE_TYPE (decl_elt));
3978 /* Propagate the union's alignment to the elements. */
3979 DECL_ALIGN (decl_elt) = DECL_ALIGN (decl);
3981 /* If the element has BLKmode and the union doesn't, the union is
3982 aligned such that the element doesn't need to have BLKmode, so
3983 change the element's mode to the appropriate one for its size. */
3984 if (mode == BLKmode && DECL_MODE (decl) != BLKmode)
3985 DECL_MODE (decl_elt) = mode
3986 = mode_for_size (TREE_INT_CST_LOW (DECL_SIZE (decl_elt)),
3987 MODE_INT, 1);
3989 /* (SUBREG (MEM ...)) at RTL generation time is invalid, so we
3990 instead create a new MEM rtx with the proper mode. */
3991 if (GET_CODE (x) == MEM)
3993 if (mode == GET_MODE (x))
3994 DECL_RTL (decl_elt) = x;
3995 else
3997 DECL_RTL (decl_elt) = gen_rtx_MEM (mode, copy_rtx (XEXP (x, 0)));
3998 MEM_COPY_ATTRIBUTES (DECL_RTL (decl_elt), x);
3999 RTX_UNCHANGING_P (DECL_RTL (decl_elt)) = RTX_UNCHANGING_P (x);
4002 else if (GET_CODE (x) == REG)
4004 if (mode == GET_MODE (x))
4005 DECL_RTL (decl_elt) = x;
4006 else
4007 DECL_RTL (decl_elt) = gen_rtx_SUBREG (mode, x, 0);
4009 else
4010 abort ();
4012 /* Record the cleanup if there is one. */
4014 if (cleanup != 0)
4015 thisblock->data.block.cleanups
4016 = temp_tree_cons (decl_elt, cleanup_elt,
4017 thisblock->data.block.cleanups);
4019 decl_elts = TREE_CHAIN (decl_elts);
4023 /* Expand a list of cleanups LIST.
4024 Elements may be expressions or may be nested lists.
4026 If DONT_DO is nonnull, then any list-element
4027 whose TREE_PURPOSE matches DONT_DO is omitted.
4028 This is sometimes used to avoid a cleanup associated with
4029 a value that is being returned out of the scope.
4031 If IN_FIXUP is non-zero, we are generating this cleanup for a fixup
4032 goto and handle protection regions specially in that case.
4034 If REACHABLE, we emit code, otherwise just inform the exception handling
4035 code about this finalization. */
4037 static void
4038 expand_cleanups (list, dont_do, in_fixup, reachable)
4039 tree list;
4040 tree dont_do;
4041 int in_fixup;
4042 int reachable;
4044 tree tail;
4045 for (tail = list; tail; tail = TREE_CHAIN (tail))
4046 if (dont_do == 0 || TREE_PURPOSE (tail) != dont_do)
4048 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
4049 expand_cleanups (TREE_VALUE (tail), dont_do, in_fixup, reachable);
4050 else
4052 if (! in_fixup)
4054 tree cleanup = TREE_VALUE (tail);
4056 /* See expand_d{h,c}c_cleanup for why we avoid this. */
4057 if (TREE_CODE (cleanup) != POPDHC_EXPR
4058 && TREE_CODE (cleanup) != POPDCC_EXPR
4059 /* See expand_eh_region_start_tree for this case. */
4060 && ! TREE_ADDRESSABLE (tail))
4062 cleanup = protect_with_terminate (cleanup);
4063 expand_eh_region_end (cleanup);
4067 if (reachable)
4069 /* Cleanups may be run multiple times. For example,
4070 when exiting a binding contour, we expand the
4071 cleanups associated with that contour. When a goto
4072 within that binding contour has a target outside that
4073 contour, it will expand all cleanups from its scope to
4074 the target. Though the cleanups are expanded multiple
4075 times, the control paths are non-overlapping so the
4076 cleanups will not be executed twice. */
4078 /* We may need to protect fixups with rethrow regions. */
4079 int protect = (in_fixup && ! TREE_ADDRESSABLE (tail));
4081 if (protect)
4082 expand_fixup_region_start ();
4084 expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
4085 if (protect)
4086 expand_fixup_region_end (TREE_VALUE (tail));
4087 free_temp_slots ();
4093 /* Mark when the context we are emitting RTL for as a conditional
4094 context, so that any cleanup actions we register with
4095 expand_decl_init will be properly conditionalized when those
4096 cleanup actions are later performed. Must be called before any
4097 expression (tree) is expanded that is within a conditional context. */
4099 void
4100 start_cleanup_deferral ()
4102 /* block_stack can be NULL if we are inside the parameter list. It is
4103 OK to do nothing, because cleanups aren't possible here. */
4104 if (block_stack)
4105 ++block_stack->data.block.conditional_code;
4108 /* Mark the end of a conditional region of code. Because cleanup
4109 deferrals may be nested, we may still be in a conditional region
4110 after we end the currently deferred cleanups, only after we end all
4111 deferred cleanups, are we back in unconditional code. */
4113 void
4114 end_cleanup_deferral ()
4116 /* block_stack can be NULL if we are inside the parameter list. It is
4117 OK to do nothing, because cleanups aren't possible here. */
4118 if (block_stack)
4119 --block_stack->data.block.conditional_code;
4122 /* Move all cleanups from the current block_stack
4123 to the containing block_stack, where they are assumed to
4124 have been created. If anything can cause a temporary to
4125 be created, but not expanded for more than one level of
4126 block_stacks, then this code will have to change. */
4128 void
4129 move_cleanups_up ()
4131 struct nesting *block = block_stack;
4132 struct nesting *outer = block->next;
4134 outer->data.block.cleanups
4135 = chainon (block->data.block.cleanups,
4136 outer->data.block.cleanups);
4137 block->data.block.cleanups = 0;
4140 tree
4141 last_cleanup_this_contour ()
4143 if (block_stack == 0)
4144 return 0;
4146 return block_stack->data.block.cleanups;
4149 /* Return 1 if there are any pending cleanups at this point.
4150 If THIS_CONTOUR is nonzero, check the current contour as well.
4151 Otherwise, look only at the contours that enclose this one. */
4154 any_pending_cleanups (this_contour)
4155 int this_contour;
4157 struct nesting *block;
4159 if (block_stack == 0)
4160 return 0;
4162 if (this_contour && block_stack->data.block.cleanups != NULL)
4163 return 1;
4164 if (block_stack->data.block.cleanups == 0
4165 && block_stack->data.block.outer_cleanups == 0)
4166 return 0;
4168 for (block = block_stack->next; block; block = block->next)
4169 if (block->data.block.cleanups != 0)
4170 return 1;
4172 return 0;
4175 /* Enter a case (Pascal) or switch (C) statement.
4176 Push a block onto case_stack and nesting_stack
4177 to accumulate the case-labels that are seen
4178 and to record the labels generated for the statement.
4180 EXIT_FLAG is nonzero if `exit_something' should exit this case stmt.
4181 Otherwise, this construct is transparent for `exit_something'.
4183 EXPR is the index-expression to be dispatched on.
4184 TYPE is its nominal type. We could simply convert EXPR to this type,
4185 but instead we take short cuts. */
4187 void
4188 expand_start_case (exit_flag, expr, type, printname)
4189 int exit_flag;
4190 tree expr;
4191 tree type;
4192 const char *printname;
4194 register struct nesting *thiscase = ALLOC_NESTING ();
4196 /* Make an entry on case_stack for the case we are entering. */
4198 thiscase->next = case_stack;
4199 thiscase->all = nesting_stack;
4200 thiscase->depth = ++nesting_depth;
4201 thiscase->exit_label = exit_flag ? gen_label_rtx () : 0;
4202 thiscase->data.case_stmt.case_list = 0;
4203 thiscase->data.case_stmt.index_expr = expr;
4204 thiscase->data.case_stmt.nominal_type = type;
4205 thiscase->data.case_stmt.default_label = 0;
4206 thiscase->data.case_stmt.num_ranges = 0;
4207 thiscase->data.case_stmt.printname = printname;
4208 thiscase->data.case_stmt.line_number_status = force_line_numbers ();
4209 case_stack = thiscase;
4210 nesting_stack = thiscase;
4212 do_pending_stack_adjust ();
4214 /* Make sure case_stmt.start points to something that won't
4215 need any transformation before expand_end_case. */
4216 if (GET_CODE (get_last_insn ()) != NOTE)
4217 emit_note (NULL_PTR, NOTE_INSN_DELETED);
4219 thiscase->data.case_stmt.start = get_last_insn ();
4221 start_cleanup_deferral ();
4225 /* Start a "dummy case statement" within which case labels are invalid
4226 and are not connected to any larger real case statement.
4227 This can be used if you don't want to let a case statement jump
4228 into the middle of certain kinds of constructs. */
4230 void
4231 expand_start_case_dummy ()
4233 register struct nesting *thiscase = ALLOC_NESTING ();
4235 /* Make an entry on case_stack for the dummy. */
4237 thiscase->next = case_stack;
4238 thiscase->all = nesting_stack;
4239 thiscase->depth = ++nesting_depth;
4240 thiscase->exit_label = 0;
4241 thiscase->data.case_stmt.case_list = 0;
4242 thiscase->data.case_stmt.start = 0;
4243 thiscase->data.case_stmt.nominal_type = 0;
4244 thiscase->data.case_stmt.default_label = 0;
4245 thiscase->data.case_stmt.num_ranges = 0;
4246 case_stack = thiscase;
4247 nesting_stack = thiscase;
4248 start_cleanup_deferral ();
4251 /* End a dummy case statement. */
4253 void
4254 expand_end_case_dummy ()
4256 end_cleanup_deferral ();
4257 POPSTACK (case_stack);
4260 /* Return the data type of the index-expression
4261 of the innermost case statement, or null if none. */
4263 tree
4264 case_index_expr_type ()
4266 if (case_stack)
4267 return TREE_TYPE (case_stack->data.case_stmt.index_expr);
4268 return 0;
4271 static void
4272 check_seenlabel ()
4274 /* If this is the first label, warn if any insns have been emitted. */
4275 if (case_stack->data.case_stmt.line_number_status >= 0)
4277 rtx insn;
4279 restore_line_number_status
4280 (case_stack->data.case_stmt.line_number_status);
4281 case_stack->data.case_stmt.line_number_status = -1;
4283 for (insn = case_stack->data.case_stmt.start;
4284 insn;
4285 insn = NEXT_INSN (insn))
4287 if (GET_CODE (insn) == CODE_LABEL)
4288 break;
4289 if (GET_CODE (insn) != NOTE
4290 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn)) != USE))
4293 insn = PREV_INSN (insn);
4294 while (insn && (GET_CODE (insn) != NOTE || NOTE_LINE_NUMBER (insn) < 0));
4296 /* If insn is zero, then there must have been a syntax error. */
4297 if (insn)
4298 warning_with_file_and_line (NOTE_SOURCE_FILE(insn),
4299 NOTE_LINE_NUMBER(insn),
4300 "unreachable code at beginning of %s",
4301 case_stack->data.case_stmt.printname);
4302 break;
4308 /* Accumulate one case or default label inside a case or switch statement.
4309 VALUE is the value of the case (a null pointer, for a default label).
4310 The function CONVERTER, when applied to arguments T and V,
4311 converts the value V to the type T.
4313 If not currently inside a case or switch statement, return 1 and do
4314 nothing. The caller will print a language-specific error message.
4315 If VALUE is a duplicate or overlaps, return 2 and do nothing
4316 except store the (first) duplicate node in *DUPLICATE.
4317 If VALUE is out of range, return 3 and do nothing.
4318 If we are jumping into the scope of a cleanup or var-sized array, return 5.
4319 Return 0 on success.
4321 Extended to handle range statements. */
4324 pushcase (value, converter, label, duplicate)
4325 register tree value;
4326 tree (*converter) PROTO((tree, tree));
4327 register tree label;
4328 tree *duplicate;
4330 tree index_type;
4331 tree nominal_type;
4333 /* Fail if not inside a real case statement. */
4334 if (! (case_stack && case_stack->data.case_stmt.start))
4335 return 1;
4337 if (stack_block_stack
4338 && stack_block_stack->depth > case_stack->depth)
4339 return 5;
4341 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4342 nominal_type = case_stack->data.case_stmt.nominal_type;
4344 /* If the index is erroneous, avoid more problems: pretend to succeed. */
4345 if (index_type == error_mark_node)
4346 return 0;
4348 /* Convert VALUE to the type in which the comparisons are nominally done. */
4349 if (value != 0)
4350 value = (*converter) (nominal_type, value);
4352 check_seenlabel ();
4354 /* Fail if this value is out of range for the actual type of the index
4355 (which may be narrower than NOMINAL_TYPE). */
4356 if (value != 0 && ! int_fits_type_p (value, index_type))
4357 return 3;
4359 /* Fail if this is a duplicate or overlaps another entry. */
4360 if (value == 0)
4362 if (case_stack->data.case_stmt.default_label != 0)
4364 *duplicate = case_stack->data.case_stmt.default_label;
4365 return 2;
4367 case_stack->data.case_stmt.default_label = label;
4369 else
4370 return add_case_node (value, value, label, duplicate);
4372 expand_label (label);
4373 return 0;
4376 /* Like pushcase but this case applies to all values between VALUE1 and
4377 VALUE2 (inclusive). If VALUE1 is NULL, the range starts at the lowest
4378 value of the index type and ends at VALUE2. If VALUE2 is NULL, the range
4379 starts at VALUE1 and ends at the highest value of the index type.
4380 If both are NULL, this case applies to all values.
4382 The return value is the same as that of pushcase but there is one
4383 additional error code: 4 means the specified range was empty. */
4386 pushcase_range (value1, value2, converter, label, duplicate)
4387 register tree value1, value2;
4388 tree (*converter) PROTO((tree, tree));
4389 register tree label;
4390 tree *duplicate;
4392 tree index_type;
4393 tree nominal_type;
4395 /* Fail if not inside a real case statement. */
4396 if (! (case_stack && case_stack->data.case_stmt.start))
4397 return 1;
4399 if (stack_block_stack
4400 && stack_block_stack->depth > case_stack->depth)
4401 return 5;
4403 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4404 nominal_type = case_stack->data.case_stmt.nominal_type;
4406 /* If the index is erroneous, avoid more problems: pretend to succeed. */
4407 if (index_type == error_mark_node)
4408 return 0;
4410 check_seenlabel ();
4412 /* Convert VALUEs to type in which the comparisons are nominally done
4413 and replace any unspecified value with the corresponding bound. */
4414 if (value1 == 0)
4415 value1 = TYPE_MIN_VALUE (index_type);
4416 if (value2 == 0)
4417 value2 = TYPE_MAX_VALUE (index_type);
4419 /* Fail if the range is empty. Do this before any conversion since
4420 we want to allow out-of-range empty ranges. */
4421 if (value2 && tree_int_cst_lt (value2, value1))
4422 return 4;
4424 value1 = (*converter) (nominal_type, value1);
4426 /* If the max was unbounded, use the max of the nominal_type we are
4427 converting to. Do this after the < check above to suppress false
4428 positives. */
4429 if (!value2)
4430 value2 = TYPE_MAX_VALUE (nominal_type);
4431 value2 = (*converter) (nominal_type, value2);
4433 /* Fail if these values are out of range. */
4434 if (TREE_CONSTANT_OVERFLOW (value1)
4435 || ! int_fits_type_p (value1, index_type))
4436 return 3;
4438 if (TREE_CONSTANT_OVERFLOW (value2)
4439 || ! int_fits_type_p (value2, index_type))
4440 return 3;
4442 return add_case_node (value1, value2, label, duplicate);
4445 /* Do the actual insertion of a case label for pushcase and pushcase_range
4446 into case_stack->data.case_stmt.case_list. Use an AVL tree to avoid
4447 slowdown for large switch statements. */
4449 static int
4450 add_case_node (low, high, label, duplicate)
4451 tree low, high;
4452 tree label;
4453 tree *duplicate;
4455 struct case_node *p, **q, *r;
4457 q = &case_stack->data.case_stmt.case_list;
4458 p = *q;
4460 while ((r = *q))
4462 p = r;
4464 /* Keep going past elements distinctly greater than HIGH. */
4465 if (tree_int_cst_lt (high, p->low))
4466 q = &p->left;
4468 /* or distinctly less than LOW. */
4469 else if (tree_int_cst_lt (p->high, low))
4470 q = &p->right;
4472 else
4474 /* We have an overlap; this is an error. */
4475 *duplicate = p->code_label;
4476 return 2;
4480 /* Add this label to the chain, and succeed.
4481 Copy LOW, HIGH so they are on temporary rather than momentary
4482 obstack and will thus survive till the end of the case statement. */
4484 r = (struct case_node *) oballoc (sizeof (struct case_node));
4485 r->low = copy_node (low);
4487 /* If the bounds are equal, turn this into the one-value case. */
4489 if (tree_int_cst_equal (low, high))
4490 r->high = r->low;
4491 else
4493 r->high = copy_node (high);
4494 case_stack->data.case_stmt.num_ranges++;
4497 r->code_label = label;
4498 expand_label (label);
4500 *q = r;
4501 r->parent = p;
4502 r->left = 0;
4503 r->right = 0;
4504 r->balance = 0;
4506 while (p)
4508 struct case_node *s;
4510 if (r == p->left)
4512 int b;
4514 if (! (b = p->balance))
4515 /* Growth propagation from left side. */
4516 p->balance = -1;
4517 else if (b < 0)
4519 if (r->balance < 0)
4521 /* R-Rotation */
4522 if ((p->left = s = r->right))
4523 s->parent = p;
4525 r->right = p;
4526 p->balance = 0;
4527 r->balance = 0;
4528 s = p->parent;
4529 p->parent = r;
4531 if ((r->parent = s))
4533 if (s->left == p)
4534 s->left = r;
4535 else
4536 s->right = r;
4538 else
4539 case_stack->data.case_stmt.case_list = r;
4541 else
4542 /* r->balance == +1 */
4544 /* LR-Rotation */
4546 int b2;
4547 struct case_node *t = r->right;
4549 if ((p->left = s = t->right))
4550 s->parent = p;
4552 t->right = p;
4553 if ((r->right = s = t->left))
4554 s->parent = r;
4556 t->left = r;
4557 b = t->balance;
4558 b2 = b < 0;
4559 p->balance = b2;
4560 b2 = -b2 - b;
4561 r->balance = b2;
4562 t->balance = 0;
4563 s = p->parent;
4564 p->parent = t;
4565 r->parent = t;
4567 if ((t->parent = s))
4569 if (s->left == p)
4570 s->left = t;
4571 else
4572 s->right = t;
4574 else
4575 case_stack->data.case_stmt.case_list = t;
4577 break;
4580 else
4582 /* p->balance == +1; growth of left side balances the node. */
4583 p->balance = 0;
4584 break;
4587 else
4588 /* r == p->right */
4590 int b;
4592 if (! (b = p->balance))
4593 /* Growth propagation from right side. */
4594 p->balance++;
4595 else if (b > 0)
4597 if (r->balance > 0)
4599 /* L-Rotation */
4601 if ((p->right = s = r->left))
4602 s->parent = p;
4604 r->left = p;
4605 p->balance = 0;
4606 r->balance = 0;
4607 s = p->parent;
4608 p->parent = r;
4609 if ((r->parent = s))
4611 if (s->left == p)
4612 s->left = r;
4613 else
4614 s->right = r;
4617 else
4618 case_stack->data.case_stmt.case_list = r;
4621 else
4622 /* r->balance == -1 */
4624 /* RL-Rotation */
4625 int b2;
4626 struct case_node *t = r->left;
4628 if ((p->right = s = t->left))
4629 s->parent = p;
4631 t->left = p;
4633 if ((r->left = s = t->right))
4634 s->parent = r;
4636 t->right = r;
4637 b = t->balance;
4638 b2 = b < 0;
4639 r->balance = b2;
4640 b2 = -b2 - b;
4641 p->balance = b2;
4642 t->balance = 0;
4643 s = p->parent;
4644 p->parent = t;
4645 r->parent = t;
4647 if ((t->parent = s))
4649 if (s->left == p)
4650 s->left = t;
4651 else
4652 s->right = t;
4655 else
4656 case_stack->data.case_stmt.case_list = t;
4658 break;
4660 else
4662 /* p->balance == -1; growth of right side balances the node. */
4663 p->balance = 0;
4664 break;
4668 r = p;
4669 p = p->parent;
4672 return 0;
4676 /* Returns the number of possible values of TYPE.
4677 Returns -1 if the number is unknown or variable.
4678 Returns -2 if the number does not fit in a HOST_WIDE_INT.
4679 Sets *SPARENESS to 2 if TYPE is an ENUMERAL_TYPE whose values
4680 do not increase monotonically (there may be duplicates);
4681 to 1 if the values increase monotonically, but not always by 1;
4682 otherwise sets it to 0. */
4684 HOST_WIDE_INT
4685 all_cases_count (type, spareness)
4686 tree type;
4687 int *spareness;
4689 HOST_WIDE_INT count;
4690 *spareness = 0;
4692 switch (TREE_CODE (type))
4694 tree t;
4695 case BOOLEAN_TYPE:
4696 count = 2;
4697 break;
4698 case CHAR_TYPE:
4699 count = 1 << BITS_PER_UNIT;
4700 break;
4701 default:
4702 case INTEGER_TYPE:
4703 if (TREE_CODE (TYPE_MIN_VALUE (type)) != INTEGER_CST
4704 || TYPE_MAX_VALUE (type) == NULL
4705 || TREE_CODE (TYPE_MAX_VALUE (type)) != INTEGER_CST)
4706 return -1;
4707 else
4709 /* count
4710 = TREE_INT_CST_LOW (TYPE_MAX_VALUE (type))
4711 - TREE_INT_CST_LOW (TYPE_MIN_VALUE (type)) + 1
4712 but with overflow checking. */
4713 tree mint = TYPE_MIN_VALUE (type);
4714 tree maxt = TYPE_MAX_VALUE (type);
4715 HOST_WIDE_INT lo, hi;
4716 neg_double(TREE_INT_CST_LOW (mint), TREE_INT_CST_HIGH (mint),
4717 &lo, &hi);
4718 add_double(TREE_INT_CST_LOW (maxt), TREE_INT_CST_HIGH (maxt),
4719 lo, hi, &lo, &hi);
4720 add_double (lo, hi, 1, 0, &lo, &hi);
4721 if (hi != 0 || lo < 0)
4722 return -2;
4723 count = lo;
4725 break;
4726 case ENUMERAL_TYPE:
4727 count = 0;
4728 for (t = TYPE_VALUES (type); t != NULL_TREE; t = TREE_CHAIN (t))
4730 if (TREE_CODE (TYPE_MIN_VALUE (type)) != INTEGER_CST
4731 || TREE_CODE (TREE_VALUE (t)) != INTEGER_CST
4732 || TREE_INT_CST_LOW (TYPE_MIN_VALUE (type)) + count
4733 != TREE_INT_CST_LOW (TREE_VALUE (t)))
4734 *spareness = 1;
4735 count++;
4737 if (*spareness == 1)
4739 tree prev = TREE_VALUE (TYPE_VALUES (type));
4740 for (t = TYPE_VALUES (type); t = TREE_CHAIN (t), t != NULL_TREE; )
4742 if (! tree_int_cst_lt (prev, TREE_VALUE (t)))
4744 *spareness = 2;
4745 break;
4747 prev = TREE_VALUE (t);
4752 return count;
4756 #define BITARRAY_TEST(ARRAY, INDEX) \
4757 ((ARRAY)[(unsigned) (INDEX) / HOST_BITS_PER_CHAR]\
4758 & (1 << ((unsigned) (INDEX) % HOST_BITS_PER_CHAR)))
4759 #define BITARRAY_SET(ARRAY, INDEX) \
4760 ((ARRAY)[(unsigned) (INDEX) / HOST_BITS_PER_CHAR]\
4761 |= 1 << ((unsigned) (INDEX) % HOST_BITS_PER_CHAR))
4763 /* Set the elements of the bitstring CASES_SEEN (which has length COUNT),
4764 with the case values we have seen, assuming the case expression
4765 has the given TYPE.
4766 SPARSENESS is as determined by all_cases_count.
4768 The time needed is proportional to COUNT, unless
4769 SPARSENESS is 2, in which case quadratic time is needed. */
4771 void
4772 mark_seen_cases (type, cases_seen, count, sparseness)
4773 tree type;
4774 unsigned char *cases_seen;
4775 long count;
4776 int sparseness;
4778 tree next_node_to_try = NULL_TREE;
4779 long next_node_offset = 0;
4781 register struct case_node *n, *root = case_stack->data.case_stmt.case_list;
4782 tree val = make_node (INTEGER_CST);
4783 TREE_TYPE (val) = type;
4784 if (! root)
4785 ; /* Do nothing */
4786 else if (sparseness == 2)
4788 tree t;
4789 HOST_WIDE_INT xlo;
4791 /* This less efficient loop is only needed to handle
4792 duplicate case values (multiple enum constants
4793 with the same value). */
4794 TREE_TYPE (val) = TREE_TYPE (root->low);
4795 for (t = TYPE_VALUES (type), xlo = 0; t != NULL_TREE;
4796 t = TREE_CHAIN (t), xlo++)
4798 TREE_INT_CST_LOW (val) = TREE_INT_CST_LOW (TREE_VALUE (t));
4799 TREE_INT_CST_HIGH (val) = TREE_INT_CST_HIGH (TREE_VALUE (t));
4800 n = root;
4803 /* Keep going past elements distinctly greater than VAL. */
4804 if (tree_int_cst_lt (val, n->low))
4805 n = n->left;
4807 /* or distinctly less than VAL. */
4808 else if (tree_int_cst_lt (n->high, val))
4809 n = n->right;
4811 else
4813 /* We have found a matching range. */
4814 BITARRAY_SET (cases_seen, xlo);
4815 break;
4818 while (n);
4821 else
4823 if (root->left)
4824 case_stack->data.case_stmt.case_list = root = case_tree2list (root, 0);
4825 for (n = root; n; n = n->right)
4827 TREE_INT_CST_LOW (val) = TREE_INT_CST_LOW (n->low);
4828 TREE_INT_CST_HIGH (val) = TREE_INT_CST_HIGH (n->low);
4829 while ( ! tree_int_cst_lt (n->high, val))
4831 /* Calculate (into xlo) the "offset" of the integer (val).
4832 The element with lowest value has offset 0, the next smallest
4833 element has offset 1, etc. */
4835 HOST_WIDE_INT xlo, xhi;
4836 tree t;
4837 if (sparseness && TYPE_VALUES (type) != NULL_TREE)
4839 /* The TYPE_VALUES will be in increasing order, so
4840 starting searching where we last ended. */
4841 t = next_node_to_try;
4842 xlo = next_node_offset;
4843 xhi = 0;
4844 for (;;)
4846 if (t == NULL_TREE)
4848 t = TYPE_VALUES (type);
4849 xlo = 0;
4851 if (tree_int_cst_equal (val, TREE_VALUE (t)))
4853 next_node_to_try = TREE_CHAIN (t);
4854 next_node_offset = xlo + 1;
4855 break;
4857 xlo++;
4858 t = TREE_CHAIN (t);
4859 if (t == next_node_to_try)
4861 xlo = -1;
4862 break;
4866 else
4868 t = TYPE_MIN_VALUE (type);
4869 if (t)
4870 neg_double (TREE_INT_CST_LOW (t), TREE_INT_CST_HIGH (t),
4871 &xlo, &xhi);
4872 else
4873 xlo = xhi = 0;
4874 add_double (xlo, xhi,
4875 TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
4876 &xlo, &xhi);
4879 if (xhi == 0 && xlo >= 0 && xlo < count)
4880 BITARRAY_SET (cases_seen, xlo);
4881 add_double (TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
4882 1, 0,
4883 &TREE_INT_CST_LOW (val), &TREE_INT_CST_HIGH (val));
4889 /* Called when the index of a switch statement is an enumerated type
4890 and there is no default label.
4892 Checks that all enumeration literals are covered by the case
4893 expressions of a switch. Also, warn if there are any extra
4894 switch cases that are *not* elements of the enumerated type.
4896 If all enumeration literals were covered by the case expressions,
4897 turn one of the expressions into the default expression since it should
4898 not be possible to fall through such a switch. */
4900 void
4901 check_for_full_enumeration_handling (type)
4902 tree type;
4904 register struct case_node *n;
4905 register tree chain;
4906 #if 0 /* variable used by 'if 0'ed code below. */
4907 register struct case_node **l;
4908 int all_values = 1;
4909 #endif
4911 /* True iff the selector type is a numbered set mode. */
4912 int sparseness = 0;
4914 /* The number of possible selector values. */
4915 HOST_WIDE_INT size;
4917 /* For each possible selector value. a one iff it has been matched
4918 by a case value alternative. */
4919 unsigned char *cases_seen;
4921 /* The allocated size of cases_seen, in chars. */
4922 long bytes_needed;
4924 if (! warn_switch)
4925 return;
4927 size = all_cases_count (type, &sparseness);
4928 bytes_needed = (size + HOST_BITS_PER_CHAR) / HOST_BITS_PER_CHAR;
4930 if (size > 0 && size < 600000
4931 /* We deliberately use malloc here - not xmalloc. */
4932 && (cases_seen = (unsigned char *) malloc (bytes_needed)) != NULL)
4934 long i;
4935 tree v = TYPE_VALUES (type);
4936 bzero (cases_seen, bytes_needed);
4938 /* The time complexity of this code is normally O(N), where
4939 N being the number of members in the enumerated type.
4940 However, if type is a ENUMERAL_TYPE whose values do not
4941 increase monotonically, O(N*log(N)) time may be needed. */
4943 mark_seen_cases (type, cases_seen, size, sparseness);
4945 for (i = 0; v != NULL_TREE && i < size; i++, v = TREE_CHAIN (v))
4947 if (BITARRAY_TEST(cases_seen, i) == 0)
4948 warning ("enumeration value `%s' not handled in switch",
4949 IDENTIFIER_POINTER (TREE_PURPOSE (v)));
4952 free (cases_seen);
4955 /* Now we go the other way around; we warn if there are case
4956 expressions that don't correspond to enumerators. This can
4957 occur since C and C++ don't enforce type-checking of
4958 assignments to enumeration variables. */
4960 if (case_stack->data.case_stmt.case_list
4961 && case_stack->data.case_stmt.case_list->left)
4962 case_stack->data.case_stmt.case_list
4963 = case_tree2list (case_stack->data.case_stmt.case_list, 0);
4964 if (warn_switch)
4965 for (n = case_stack->data.case_stmt.case_list; n; n = n->right)
4967 for (chain = TYPE_VALUES (type);
4968 chain && !tree_int_cst_equal (n->low, TREE_VALUE (chain));
4969 chain = TREE_CHAIN (chain))
4972 if (!chain)
4974 if (TYPE_NAME (type) == 0)
4975 warning ("case value `%ld' not in enumerated type",
4976 (long) TREE_INT_CST_LOW (n->low));
4977 else
4978 warning ("case value `%ld' not in enumerated type `%s'",
4979 (long) TREE_INT_CST_LOW (n->low),
4980 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
4981 == IDENTIFIER_NODE)
4982 ? TYPE_NAME (type)
4983 : DECL_NAME (TYPE_NAME (type))));
4985 if (!tree_int_cst_equal (n->low, n->high))
4987 for (chain = TYPE_VALUES (type);
4988 chain && !tree_int_cst_equal (n->high, TREE_VALUE (chain));
4989 chain = TREE_CHAIN (chain))
4992 if (!chain)
4994 if (TYPE_NAME (type) == 0)
4995 warning ("case value `%ld' not in enumerated type",
4996 (long) TREE_INT_CST_LOW (n->high));
4997 else
4998 warning ("case value `%ld' not in enumerated type `%s'",
4999 (long) TREE_INT_CST_LOW (n->high),
5000 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
5001 == IDENTIFIER_NODE)
5002 ? TYPE_NAME (type)
5003 : DECL_NAME (TYPE_NAME (type))));
5008 #if 0
5009 /* ??? This optimization is disabled because it causes valid programs to
5010 fail. ANSI C does not guarantee that an expression with enum type
5011 will have a value that is the same as one of the enumeration literals. */
5013 /* If all values were found as case labels, make one of them the default
5014 label. Thus, this switch will never fall through. We arbitrarily pick
5015 the last one to make the default since this is likely the most
5016 efficient choice. */
5018 if (all_values)
5020 for (l = &case_stack->data.case_stmt.case_list;
5021 (*l)->right != 0;
5022 l = &(*l)->right)
5025 case_stack->data.case_stmt.default_label = (*l)->code_label;
5026 *l = 0;
5028 #endif /* 0 */
5032 /* Terminate a case (Pascal) or switch (C) statement
5033 in which ORIG_INDEX is the expression to be tested.
5034 Generate the code to test it and jump to the right place. */
5036 void
5037 expand_end_case (orig_index)
5038 tree orig_index;
5040 tree minval = NULL_TREE, maxval = NULL_TREE, range, orig_minval;
5041 rtx default_label = 0;
5042 register struct case_node *n;
5043 unsigned int count;
5044 rtx index;
5045 rtx table_label;
5046 int ncases;
5047 rtx *labelvec;
5048 register int i;
5049 rtx before_case;
5050 register struct nesting *thiscase = case_stack;
5051 tree index_expr, index_type;
5052 int unsignedp;
5054 table_label = gen_label_rtx ();
5055 index_expr = thiscase->data.case_stmt.index_expr;
5056 index_type = TREE_TYPE (index_expr);
5057 unsignedp = TREE_UNSIGNED (index_type);
5059 do_pending_stack_adjust ();
5061 /* This might get an spurious warning in the presence of a syntax error;
5062 it could be fixed by moving the call to check_seenlabel after the
5063 check for error_mark_node, and copying the code of check_seenlabel that
5064 deals with case_stack->data.case_stmt.line_number_status /
5065 restore_line_number_status in front of the call to end_cleanup_deferral;
5066 However, this might miss some useful warnings in the presence of
5067 non-syntax errors. */
5068 check_seenlabel ();
5070 /* An ERROR_MARK occurs for various reasons including invalid data type. */
5071 if (index_type != error_mark_node)
5073 /* If switch expression was an enumerated type, check that all
5074 enumeration literals are covered by the cases.
5075 No sense trying this if there's a default case, however. */
5077 if (!thiscase->data.case_stmt.default_label
5078 && TREE_CODE (TREE_TYPE (orig_index)) == ENUMERAL_TYPE
5079 && TREE_CODE (index_expr) != INTEGER_CST)
5080 check_for_full_enumeration_handling (TREE_TYPE (orig_index));
5082 /* If we don't have a default-label, create one here,
5083 after the body of the switch. */
5084 if (thiscase->data.case_stmt.default_label == 0)
5086 thiscase->data.case_stmt.default_label
5087 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
5088 expand_label (thiscase->data.case_stmt.default_label);
5090 default_label = label_rtx (thiscase->data.case_stmt.default_label);
5092 before_case = get_last_insn ();
5094 if (thiscase->data.case_stmt.case_list
5095 && thiscase->data.case_stmt.case_list->left)
5096 thiscase->data.case_stmt.case_list
5097 = case_tree2list(thiscase->data.case_stmt.case_list, 0);
5099 /* Simplify the case-list before we count it. */
5100 group_case_nodes (thiscase->data.case_stmt.case_list);
5102 /* Get upper and lower bounds of case values.
5103 Also convert all the case values to the index expr's data type. */
5105 count = 0;
5106 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5108 /* Check low and high label values are integers. */
5109 if (TREE_CODE (n->low) != INTEGER_CST)
5110 abort ();
5111 if (TREE_CODE (n->high) != INTEGER_CST)
5112 abort ();
5114 n->low = convert (index_type, n->low);
5115 n->high = convert (index_type, n->high);
5117 /* Count the elements and track the largest and smallest
5118 of them (treating them as signed even if they are not). */
5119 if (count++ == 0)
5121 minval = n->low;
5122 maxval = n->high;
5124 else
5126 if (INT_CST_LT (n->low, minval))
5127 minval = n->low;
5128 if (INT_CST_LT (maxval, n->high))
5129 maxval = n->high;
5131 /* A range counts double, since it requires two compares. */
5132 if (! tree_int_cst_equal (n->low, n->high))
5133 count++;
5136 orig_minval = minval;
5138 /* Compute span of values. */
5139 if (count != 0)
5140 range = fold (build (MINUS_EXPR, index_type, maxval, minval));
5142 end_cleanup_deferral ();
5144 if (count == 0)
5146 expand_expr (index_expr, const0_rtx, VOIDmode, 0);
5147 emit_queue ();
5148 emit_jump (default_label);
5151 /* If range of values is much bigger than number of values,
5152 make a sequence of conditional branches instead of a dispatch.
5153 If the switch-index is a constant, do it this way
5154 because we can optimize it. */
5156 #ifndef CASE_VALUES_THRESHOLD
5157 #ifdef HAVE_casesi
5158 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
5159 #else
5160 /* If machine does not have a case insn that compares the
5161 bounds, this means extra overhead for dispatch tables
5162 which raises the threshold for using them. */
5163 #define CASE_VALUES_THRESHOLD 5
5164 #endif /* HAVE_casesi */
5165 #endif /* CASE_VALUES_THRESHOLD */
5167 else if (TREE_INT_CST_HIGH (range) != 0
5168 || count < (unsigned int) CASE_VALUES_THRESHOLD
5169 || ((unsigned HOST_WIDE_INT) (TREE_INT_CST_LOW (range))
5170 > 10 * count)
5171 #ifndef ASM_OUTPUT_ADDR_DIFF_ELT
5172 || flag_pic
5173 #endif
5174 || TREE_CODE (index_expr) == INTEGER_CST
5175 /* These will reduce to a constant. */
5176 || (TREE_CODE (index_expr) == CALL_EXPR
5177 && TREE_CODE (TREE_OPERAND (index_expr, 0)) == ADDR_EXPR
5178 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == FUNCTION_DECL
5179 && DECL_FUNCTION_CODE (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == BUILT_IN_CLASSIFY_TYPE)
5180 || (TREE_CODE (index_expr) == COMPOUND_EXPR
5181 && TREE_CODE (TREE_OPERAND (index_expr, 1)) == INTEGER_CST))
5183 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5185 /* If the index is a short or char that we do not have
5186 an insn to handle comparisons directly, convert it to
5187 a full integer now, rather than letting each comparison
5188 generate the conversion. */
5190 if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT
5191 && (cmp_optab->handlers[(int) GET_MODE(index)].insn_code
5192 == CODE_FOR_nothing))
5194 enum machine_mode wider_mode;
5195 for (wider_mode = GET_MODE (index); wider_mode != VOIDmode;
5196 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
5197 if (cmp_optab->handlers[(int) wider_mode].insn_code
5198 != CODE_FOR_nothing)
5200 index = convert_to_mode (wider_mode, index, unsignedp);
5201 break;
5205 emit_queue ();
5206 do_pending_stack_adjust ();
5208 index = protect_from_queue (index, 0);
5209 if (GET_CODE (index) == MEM)
5210 index = copy_to_reg (index);
5211 if (GET_CODE (index) == CONST_INT
5212 || TREE_CODE (index_expr) == INTEGER_CST)
5214 /* Make a tree node with the proper constant value
5215 if we don't already have one. */
5216 if (TREE_CODE (index_expr) != INTEGER_CST)
5218 index_expr
5219 = build_int_2 (INTVAL (index),
5220 unsignedp || INTVAL (index) >= 0 ? 0 : -1);
5221 index_expr = convert (index_type, index_expr);
5224 /* For constant index expressions we need only
5225 issue a unconditional branch to the appropriate
5226 target code. The job of removing any unreachable
5227 code is left to the optimisation phase if the
5228 "-O" option is specified. */
5229 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5230 if (! tree_int_cst_lt (index_expr, n->low)
5231 && ! tree_int_cst_lt (n->high, index_expr))
5232 break;
5234 if (n)
5235 emit_jump (label_rtx (n->code_label));
5236 else
5237 emit_jump (default_label);
5239 else
5241 /* If the index expression is not constant we generate
5242 a binary decision tree to select the appropriate
5243 target code. This is done as follows:
5245 The list of cases is rearranged into a binary tree,
5246 nearly optimal assuming equal probability for each case.
5248 The tree is transformed into RTL, eliminating
5249 redundant test conditions at the same time.
5251 If program flow could reach the end of the
5252 decision tree an unconditional jump to the
5253 default code is emitted. */
5255 use_cost_table
5256 = (TREE_CODE (TREE_TYPE (orig_index)) != ENUMERAL_TYPE
5257 && estimate_case_costs (thiscase->data.case_stmt.case_list));
5258 balance_case_nodes (&thiscase->data.case_stmt.case_list,
5259 NULL_PTR);
5260 emit_case_nodes (index, thiscase->data.case_stmt.case_list,
5261 default_label, index_type);
5262 emit_jump_if_reachable (default_label);
5265 else
5267 int win = 0;
5268 #ifdef HAVE_casesi
5269 if (HAVE_casesi)
5271 enum machine_mode index_mode = SImode;
5272 int index_bits = GET_MODE_BITSIZE (index_mode);
5273 rtx op1, op2;
5274 enum machine_mode op_mode;
5276 /* Convert the index to SImode. */
5277 if (GET_MODE_BITSIZE (TYPE_MODE (index_type))
5278 > GET_MODE_BITSIZE (index_mode))
5280 enum machine_mode omode = TYPE_MODE (index_type);
5281 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
5283 /* We must handle the endpoints in the original mode. */
5284 index_expr = build (MINUS_EXPR, index_type,
5285 index_expr, minval);
5286 minval = integer_zero_node;
5287 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5288 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
5289 omode, 1, 0, default_label);
5290 /* Now we can safely truncate. */
5291 index = convert_to_mode (index_mode, index, 0);
5293 else
5295 if (TYPE_MODE (index_type) != index_mode)
5297 index_expr = convert (type_for_size (index_bits, 0),
5298 index_expr);
5299 index_type = TREE_TYPE (index_expr);
5302 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5304 emit_queue ();
5305 index = protect_from_queue (index, 0);
5306 do_pending_stack_adjust ();
5308 op_mode = insn_operand_mode[(int)CODE_FOR_casesi][0];
5309 if (! (*insn_operand_predicate[(int)CODE_FOR_casesi][0])
5310 (index, op_mode))
5311 index = copy_to_mode_reg (op_mode, index);
5313 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
5315 op_mode = insn_operand_mode[(int)CODE_FOR_casesi][1];
5316 if (! (*insn_operand_predicate[(int)CODE_FOR_casesi][1])
5317 (op1, op_mode))
5318 op1 = copy_to_mode_reg (op_mode, op1);
5320 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
5322 op_mode = insn_operand_mode[(int)CODE_FOR_casesi][2];
5323 if (! (*insn_operand_predicate[(int)CODE_FOR_casesi][2])
5324 (op2, op_mode))
5325 op2 = copy_to_mode_reg (op_mode, op2);
5327 emit_jump_insn (gen_casesi (index, op1, op2,
5328 table_label, default_label));
5329 win = 1;
5331 #endif
5332 #ifdef HAVE_tablejump
5333 if (! win && HAVE_tablejump)
5335 index_expr = convert (thiscase->data.case_stmt.nominal_type,
5336 fold (build (MINUS_EXPR, index_type,
5337 index_expr, minval)));
5338 index_type = TREE_TYPE (index_expr);
5339 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5340 emit_queue ();
5341 index = protect_from_queue (index, 0);
5342 do_pending_stack_adjust ();
5344 do_tablejump (index, TYPE_MODE (index_type),
5345 expand_expr (range, NULL_RTX, VOIDmode, 0),
5346 table_label, default_label);
5347 win = 1;
5349 #endif
5350 if (! win)
5351 abort ();
5353 /* Get table of labels to jump to, in order of case index. */
5355 ncases = TREE_INT_CST_LOW (range) + 1;
5356 labelvec = (rtx *) alloca (ncases * sizeof (rtx));
5357 bzero ((char *) labelvec, ncases * sizeof (rtx));
5359 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5361 register HOST_WIDE_INT i
5362 = TREE_INT_CST_LOW (n->low) - TREE_INT_CST_LOW (orig_minval);
5364 while (1)
5366 labelvec[i]
5367 = gen_rtx_LABEL_REF (Pmode, label_rtx (n->code_label));
5368 if (i + TREE_INT_CST_LOW (orig_minval)
5369 == TREE_INT_CST_LOW (n->high))
5370 break;
5371 i++;
5375 /* Fill in the gaps with the default. */
5376 for (i = 0; i < ncases; i++)
5377 if (labelvec[i] == 0)
5378 labelvec[i] = gen_rtx_LABEL_REF (Pmode, default_label);
5380 /* Output the table */
5381 emit_label (table_label);
5383 if (CASE_VECTOR_PC_RELATIVE || flag_pic)
5384 emit_jump_insn (gen_rtx_ADDR_DIFF_VEC (CASE_VECTOR_MODE,
5385 gen_rtx_LABEL_REF (Pmode, table_label),
5386 gen_rtvec_v (ncases, labelvec),
5387 const0_rtx, const0_rtx, 0));
5388 else
5389 emit_jump_insn (gen_rtx_ADDR_VEC (CASE_VECTOR_MODE,
5390 gen_rtvec_v (ncases, labelvec)));
5392 /* If the case insn drops through the table,
5393 after the table we must jump to the default-label.
5394 Otherwise record no drop-through after the table. */
5395 #ifdef CASE_DROPS_THROUGH
5396 emit_jump (default_label);
5397 #else
5398 emit_barrier ();
5399 #endif
5402 before_case = squeeze_notes (NEXT_INSN (before_case), get_last_insn ());
5403 reorder_insns (before_case, get_last_insn (),
5404 thiscase->data.case_stmt.start);
5406 else
5407 end_cleanup_deferral ();
5409 if (thiscase->exit_label)
5410 emit_label (thiscase->exit_label);
5412 POPSTACK (case_stack);
5414 free_temp_slots ();
5417 /* Convert the tree NODE into a list linked by the right field, with the left
5418 field zeroed. RIGHT is used for recursion; it is a list to be placed
5419 rightmost in the resulting list. */
5421 static struct case_node *
5422 case_tree2list (node, right)
5423 struct case_node *node, *right;
5425 struct case_node *left;
5427 if (node->right)
5428 right = case_tree2list (node->right, right);
5430 node->right = right;
5431 if ((left = node->left))
5433 node->left = 0;
5434 return case_tree2list (left, node);
5437 return node;
5440 /* Generate code to jump to LABEL if OP1 and OP2 are equal. */
5442 static void
5443 do_jump_if_equal (op1, op2, label, unsignedp)
5444 rtx op1, op2, label;
5445 int unsignedp;
5447 if (GET_CODE (op1) == CONST_INT
5448 && GET_CODE (op2) == CONST_INT)
5450 if (INTVAL (op1) == INTVAL (op2))
5451 emit_jump (label);
5453 else
5455 enum machine_mode mode = GET_MODE (op1);
5456 if (mode == VOIDmode)
5457 mode = GET_MODE (op2);
5458 emit_cmp_and_jump_insns (op1, op2, EQ, NULL_RTX, mode, unsignedp,
5459 0, label);
5463 /* Not all case values are encountered equally. This function
5464 uses a heuristic to weight case labels, in cases where that
5465 looks like a reasonable thing to do.
5467 Right now, all we try to guess is text, and we establish the
5468 following weights:
5470 chars above space: 16
5471 digits: 16
5472 default: 12
5473 space, punct: 8
5474 tab: 4
5475 newline: 2
5476 other "\" chars: 1
5477 remaining chars: 0
5479 If we find any cases in the switch that are not either -1 or in the range
5480 of valid ASCII characters, or are control characters other than those
5481 commonly used with "\", don't treat this switch scanning text.
5483 Return 1 if these nodes are suitable for cost estimation, otherwise
5484 return 0. */
5486 static int
5487 estimate_case_costs (node)
5488 case_node_ptr node;
5490 tree min_ascii = build_int_2 (-1, -1);
5491 tree max_ascii = convert (TREE_TYPE (node->high), build_int_2 (127, 0));
5492 case_node_ptr n;
5493 int i;
5495 /* If we haven't already made the cost table, make it now. Note that the
5496 lower bound of the table is -1, not zero. */
5498 if (cost_table == NULL)
5500 cost_table = ((short *) xmalloc (129 * sizeof (short))) + 1;
5501 bzero ((char *) (cost_table - 1), 129 * sizeof (short));
5503 for (i = 0; i < 128; i++)
5505 if (ISALNUM (i))
5506 cost_table[i] = 16;
5507 else if (ISPUNCT (i))
5508 cost_table[i] = 8;
5509 else if (ISCNTRL (i))
5510 cost_table[i] = -1;
5513 cost_table[' '] = 8;
5514 cost_table['\t'] = 4;
5515 cost_table['\0'] = 4;
5516 cost_table['\n'] = 2;
5517 cost_table['\f'] = 1;
5518 cost_table['\v'] = 1;
5519 cost_table['\b'] = 1;
5522 /* See if all the case expressions look like text. It is text if the
5523 constant is >= -1 and the highest constant is <= 127. Do all comparisons
5524 as signed arithmetic since we don't want to ever access cost_table with a
5525 value less than -1. Also check that none of the constants in a range
5526 are strange control characters. */
5528 for (n = node; n; n = n->right)
5530 if ((INT_CST_LT (n->low, min_ascii)) || INT_CST_LT (max_ascii, n->high))
5531 return 0;
5533 for (i = TREE_INT_CST_LOW (n->low); i <= TREE_INT_CST_LOW (n->high); i++)
5534 if (cost_table[i] < 0)
5535 return 0;
5538 /* All interesting values are within the range of interesting
5539 ASCII characters. */
5540 return 1;
5543 /* Scan an ordered list of case nodes
5544 combining those with consecutive values or ranges.
5546 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
5548 static void
5549 group_case_nodes (head)
5550 case_node_ptr head;
5552 case_node_ptr node = head;
5554 while (node)
5556 rtx lb = next_real_insn (label_rtx (node->code_label));
5557 rtx lb2;
5558 case_node_ptr np = node;
5560 /* Try to group the successors of NODE with NODE. */
5561 while (((np = np->right) != 0)
5562 /* Do they jump to the same place? */
5563 && ((lb2 = next_real_insn (label_rtx (np->code_label))) == lb
5564 || (lb != 0 && lb2 != 0
5565 && simplejump_p (lb)
5566 && simplejump_p (lb2)
5567 && rtx_equal_p (SET_SRC (PATTERN (lb)),
5568 SET_SRC (PATTERN (lb2)))))
5569 /* Are their ranges consecutive? */
5570 && tree_int_cst_equal (np->low,
5571 fold (build (PLUS_EXPR,
5572 TREE_TYPE (node->high),
5573 node->high,
5574 integer_one_node)))
5575 /* An overflow is not consecutive. */
5576 && tree_int_cst_lt (node->high,
5577 fold (build (PLUS_EXPR,
5578 TREE_TYPE (node->high),
5579 node->high,
5580 integer_one_node))))
5582 node->high = np->high;
5584 /* NP is the first node after NODE which can't be grouped with it.
5585 Delete the nodes in between, and move on to that node. */
5586 node->right = np;
5587 node = np;
5591 /* Take an ordered list of case nodes
5592 and transform them into a near optimal binary tree,
5593 on the assumption that any target code selection value is as
5594 likely as any other.
5596 The transformation is performed by splitting the ordered
5597 list into two equal sections plus a pivot. The parts are
5598 then attached to the pivot as left and right branches. Each
5599 branch is then transformed recursively. */
5601 static void
5602 balance_case_nodes (head, parent)
5603 case_node_ptr *head;
5604 case_node_ptr parent;
5606 register case_node_ptr np;
5608 np = *head;
5609 if (np)
5611 int cost = 0;
5612 int i = 0;
5613 int ranges = 0;
5614 register case_node_ptr *npp;
5615 case_node_ptr left;
5617 /* Count the number of entries on branch. Also count the ranges. */
5619 while (np)
5621 if (!tree_int_cst_equal (np->low, np->high))
5623 ranges++;
5624 if (use_cost_table)
5625 cost += cost_table[TREE_INT_CST_LOW (np->high)];
5628 if (use_cost_table)
5629 cost += cost_table[TREE_INT_CST_LOW (np->low)];
5631 i++;
5632 np = np->right;
5635 if (i > 2)
5637 /* Split this list if it is long enough for that to help. */
5638 npp = head;
5639 left = *npp;
5640 if (use_cost_table)
5642 /* Find the place in the list that bisects the list's total cost,
5643 Here I gets half the total cost. */
5644 int n_moved = 0;
5645 i = (cost + 1) / 2;
5646 while (1)
5648 /* Skip nodes while their cost does not reach that amount. */
5649 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5650 i -= cost_table[TREE_INT_CST_LOW ((*npp)->high)];
5651 i -= cost_table[TREE_INT_CST_LOW ((*npp)->low)];
5652 if (i <= 0)
5653 break;
5654 npp = &(*npp)->right;
5655 n_moved += 1;
5657 if (n_moved == 0)
5659 /* Leave this branch lopsided, but optimize left-hand
5660 side and fill in `parent' fields for right-hand side. */
5661 np = *head;
5662 np->parent = parent;
5663 balance_case_nodes (&np->left, np);
5664 for (; np->right; np = np->right)
5665 np->right->parent = np;
5666 return;
5669 /* If there are just three nodes, split at the middle one. */
5670 else if (i == 3)
5671 npp = &(*npp)->right;
5672 else
5674 /* Find the place in the list that bisects the list's total cost,
5675 where ranges count as 2.
5676 Here I gets half the total cost. */
5677 i = (i + ranges + 1) / 2;
5678 while (1)
5680 /* Skip nodes while their cost does not reach that amount. */
5681 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5682 i--;
5683 i--;
5684 if (i <= 0)
5685 break;
5686 npp = &(*npp)->right;
5689 *head = np = *npp;
5690 *npp = 0;
5691 np->parent = parent;
5692 np->left = left;
5694 /* Optimize each of the two split parts. */
5695 balance_case_nodes (&np->left, np);
5696 balance_case_nodes (&np->right, np);
5698 else
5700 /* Else leave this branch as one level,
5701 but fill in `parent' fields. */
5702 np = *head;
5703 np->parent = parent;
5704 for (; np->right; np = np->right)
5705 np->right->parent = np;
5710 /* Search the parent sections of the case node tree
5711 to see if a test for the lower bound of NODE would be redundant.
5712 INDEX_TYPE is the type of the index expression.
5714 The instructions to generate the case decision tree are
5715 output in the same order as nodes are processed so it is
5716 known that if a parent node checks the range of the current
5717 node minus one that the current node is bounded at its lower
5718 span. Thus the test would be redundant. */
5720 static int
5721 node_has_low_bound (node, index_type)
5722 case_node_ptr node;
5723 tree index_type;
5725 tree low_minus_one;
5726 case_node_ptr pnode;
5728 /* If the lower bound of this node is the lowest value in the index type,
5729 we need not test it. */
5731 if (tree_int_cst_equal (node->low, TYPE_MIN_VALUE (index_type)))
5732 return 1;
5734 /* If this node has a left branch, the value at the left must be less
5735 than that at this node, so it cannot be bounded at the bottom and
5736 we need not bother testing any further. */
5738 if (node->left)
5739 return 0;
5741 low_minus_one = fold (build (MINUS_EXPR, TREE_TYPE (node->low),
5742 node->low, integer_one_node));
5744 /* If the subtraction above overflowed, we can't verify anything.
5745 Otherwise, look for a parent that tests our value - 1. */
5747 if (! tree_int_cst_lt (low_minus_one, node->low))
5748 return 0;
5750 for (pnode = node->parent; pnode; pnode = pnode->parent)
5751 if (tree_int_cst_equal (low_minus_one, pnode->high))
5752 return 1;
5754 return 0;
5757 /* Search the parent sections of the case node tree
5758 to see if a test for the upper bound of NODE would be redundant.
5759 INDEX_TYPE is the type of the index expression.
5761 The instructions to generate the case decision tree are
5762 output in the same order as nodes are processed so it is
5763 known that if a parent node checks the range of the current
5764 node plus one that the current node is bounded at its upper
5765 span. Thus the test would be redundant. */
5767 static int
5768 node_has_high_bound (node, index_type)
5769 case_node_ptr node;
5770 tree index_type;
5772 tree high_plus_one;
5773 case_node_ptr pnode;
5775 /* If there is no upper bound, obviously no test is needed. */
5777 if (TYPE_MAX_VALUE (index_type) == NULL)
5778 return 1;
5780 /* If the upper bound of this node is the highest value in the type
5781 of the index expression, we need not test against it. */
5783 if (tree_int_cst_equal (node->high, TYPE_MAX_VALUE (index_type)))
5784 return 1;
5786 /* If this node has a right branch, the value at the right must be greater
5787 than that at this node, so it cannot be bounded at the top and
5788 we need not bother testing any further. */
5790 if (node->right)
5791 return 0;
5793 high_plus_one = fold (build (PLUS_EXPR, TREE_TYPE (node->high),
5794 node->high, integer_one_node));
5796 /* If the addition above overflowed, we can't verify anything.
5797 Otherwise, look for a parent that tests our value + 1. */
5799 if (! tree_int_cst_lt (node->high, high_plus_one))
5800 return 0;
5802 for (pnode = node->parent; pnode; pnode = pnode->parent)
5803 if (tree_int_cst_equal (high_plus_one, pnode->low))
5804 return 1;
5806 return 0;
5809 /* Search the parent sections of the
5810 case node tree to see if both tests for the upper and lower
5811 bounds of NODE would be redundant. */
5813 static int
5814 node_is_bounded (node, index_type)
5815 case_node_ptr node;
5816 tree index_type;
5818 return (node_has_low_bound (node, index_type)
5819 && node_has_high_bound (node, index_type));
5822 /* Emit an unconditional jump to LABEL unless it would be dead code. */
5824 static void
5825 emit_jump_if_reachable (label)
5826 rtx label;
5828 if (GET_CODE (get_last_insn ()) != BARRIER)
5829 emit_jump (label);
5832 /* Emit step-by-step code to select a case for the value of INDEX.
5833 The thus generated decision tree follows the form of the
5834 case-node binary tree NODE, whose nodes represent test conditions.
5835 INDEX_TYPE is the type of the index of the switch.
5837 Care is taken to prune redundant tests from the decision tree
5838 by detecting any boundary conditions already checked by
5839 emitted rtx. (See node_has_high_bound, node_has_low_bound
5840 and node_is_bounded, above.)
5842 Where the test conditions can be shown to be redundant we emit
5843 an unconditional jump to the target code. As a further
5844 optimization, the subordinates of a tree node are examined to
5845 check for bounded nodes. In this case conditional and/or
5846 unconditional jumps as a result of the boundary check for the
5847 current node are arranged to target the subordinates associated
5848 code for out of bound conditions on the current node.
5850 We can assume that when control reaches the code generated here,
5851 the index value has already been compared with the parents
5852 of this node, and determined to be on the same side of each parent
5853 as this node is. Thus, if this node tests for the value 51,
5854 and a parent tested for 52, we don't need to consider
5855 the possibility of a value greater than 51. If another parent
5856 tests for the value 50, then this node need not test anything. */
5858 static void
5859 emit_case_nodes (index, node, default_label, index_type)
5860 rtx index;
5861 case_node_ptr node;
5862 rtx default_label;
5863 tree index_type;
5865 /* If INDEX has an unsigned type, we must make unsigned branches. */
5866 int unsignedp = TREE_UNSIGNED (index_type);
5867 typedef rtx rtx_fn ();
5868 enum machine_mode mode = GET_MODE (index);
5870 /* See if our parents have already tested everything for us.
5871 If they have, emit an unconditional jump for this node. */
5872 if (node_is_bounded (node, index_type))
5873 emit_jump (label_rtx (node->code_label));
5875 else if (tree_int_cst_equal (node->low, node->high))
5877 /* Node is single valued. First see if the index expression matches
5878 this node and then check our children, if any. */
5880 do_jump_if_equal (index, expand_expr (node->low, NULL_RTX, VOIDmode, 0),
5881 label_rtx (node->code_label), unsignedp);
5883 if (node->right != 0 && node->left != 0)
5885 /* This node has children on both sides.
5886 Dispatch to one side or the other
5887 by comparing the index value with this node's value.
5888 If one subtree is bounded, check that one first,
5889 so we can avoid real branches in the tree. */
5891 if (node_is_bounded (node->right, index_type))
5893 emit_cmp_and_jump_insns (index, expand_expr (node->high, NULL_RTX,
5894 VOIDmode, 0),
5895 GT, NULL_RTX, mode, unsignedp, 0,
5896 label_rtx (node->right->code_label));
5897 emit_case_nodes (index, node->left, default_label, index_type);
5900 else if (node_is_bounded (node->left, index_type))
5902 emit_cmp_and_jump_insns (index, expand_expr (node->high, NULL_RTX,
5903 VOIDmode, 0),
5904 LT, NULL_RTX, mode, unsignedp, 0,
5905 label_rtx (node->left->code_label));
5906 emit_case_nodes (index, node->right, default_label, index_type);
5909 else
5911 /* Neither node is bounded. First distinguish the two sides;
5912 then emit the code for one side at a time. */
5914 tree test_label
5915 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
5917 /* See if the value is on the right. */
5918 emit_cmp_and_jump_insns (index, expand_expr (node->high, NULL_RTX,
5919 VOIDmode, 0),
5920 GT, NULL_RTX, mode, unsignedp, 0,
5921 label_rtx (test_label));
5923 /* Value must be on the left.
5924 Handle the left-hand subtree. */
5925 emit_case_nodes (index, node->left, default_label, index_type);
5926 /* If left-hand subtree does nothing,
5927 go to default. */
5928 emit_jump_if_reachable (default_label);
5930 /* Code branches here for the right-hand subtree. */
5931 expand_label (test_label);
5932 emit_case_nodes (index, node->right, default_label, index_type);
5936 else if (node->right != 0 && node->left == 0)
5938 /* Here we have a right child but no left so we issue conditional
5939 branch to default and process the right child.
5941 Omit the conditional branch to default if we it avoid only one
5942 right child; it costs too much space to save so little time. */
5944 if (node->right->right || node->right->left
5945 || !tree_int_cst_equal (node->right->low, node->right->high))
5947 if (!node_has_low_bound (node, index_type))
5949 emit_cmp_and_jump_insns (index, expand_expr (node->high,
5950 NULL_RTX,
5951 VOIDmode, 0),
5952 LT, NULL_RTX, mode, unsignedp, 0,
5953 default_label);
5956 emit_case_nodes (index, node->right, default_label, index_type);
5958 else
5959 /* We cannot process node->right normally
5960 since we haven't ruled out the numbers less than
5961 this node's value. So handle node->right explicitly. */
5962 do_jump_if_equal (index,
5963 expand_expr (node->right->low, NULL_RTX,
5964 VOIDmode, 0),
5965 label_rtx (node->right->code_label), unsignedp);
5968 else if (node->right == 0 && node->left != 0)
5970 /* Just one subtree, on the left. */
5972 #if 0 /* The following code and comment were formerly part
5973 of the condition here, but they didn't work
5974 and I don't understand what the idea was. -- rms. */
5975 /* If our "most probable entry" is less probable
5976 than the default label, emit a jump to
5977 the default label using condition codes
5978 already lying around. With no right branch,
5979 a branch-greater-than will get us to the default
5980 label correctly. */
5981 if (use_cost_table
5982 && cost_table[TREE_INT_CST_LOW (node->high)] < 12)
5984 #endif /* 0 */
5985 if (node->left->left || node->left->right
5986 || !tree_int_cst_equal (node->left->low, node->left->high))
5988 if (!node_has_high_bound (node, index_type))
5990 emit_cmp_and_jump_insns (index, expand_expr (node->high,
5991 NULL_RTX,
5992 VOIDmode, 0),
5993 GT, NULL_RTX, mode, unsignedp, 0,
5994 default_label);
5997 emit_case_nodes (index, node->left, default_label, index_type);
5999 else
6000 /* We cannot process node->left normally
6001 since we haven't ruled out the numbers less than
6002 this node's value. So handle node->left explicitly. */
6003 do_jump_if_equal (index,
6004 expand_expr (node->left->low, NULL_RTX,
6005 VOIDmode, 0),
6006 label_rtx (node->left->code_label), unsignedp);
6009 else
6011 /* Node is a range. These cases are very similar to those for a single
6012 value, except that we do not start by testing whether this node
6013 is the one to branch to. */
6015 if (node->right != 0 && node->left != 0)
6017 /* Node has subtrees on both sides.
6018 If the right-hand subtree is bounded,
6019 test for it first, since we can go straight there.
6020 Otherwise, we need to make a branch in the control structure,
6021 then handle the two subtrees. */
6022 tree test_label = 0;
6025 if (node_is_bounded (node->right, index_type))
6026 /* Right hand node is fully bounded so we can eliminate any
6027 testing and branch directly to the target code. */
6028 emit_cmp_and_jump_insns (index, expand_expr (node->high, NULL_RTX,
6029 VOIDmode, 0),
6030 GT, NULL_RTX, mode, unsignedp, 0,
6031 label_rtx (node->right->code_label));
6032 else
6034 /* Right hand node requires testing.
6035 Branch to a label where we will handle it later. */
6037 test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
6038 emit_cmp_and_jump_insns (index, expand_expr (node->high, NULL_RTX,
6039 VOIDmode, 0),
6040 GT, NULL_RTX, mode, unsignedp, 0,
6041 label_rtx (test_label));
6044 /* Value belongs to this node or to the left-hand subtree. */
6046 emit_cmp_and_jump_insns (index, expand_expr (node->low, NULL_RTX,
6047 VOIDmode, 0),
6048 GE, NULL_RTX, mode, unsignedp, 0,
6049 label_rtx (node->code_label));
6051 /* Handle the left-hand subtree. */
6052 emit_case_nodes (index, node->left, default_label, index_type);
6054 /* If right node had to be handled later, do that now. */
6056 if (test_label)
6058 /* If the left-hand subtree fell through,
6059 don't let it fall into the right-hand subtree. */
6060 emit_jump_if_reachable (default_label);
6062 expand_label (test_label);
6063 emit_case_nodes (index, node->right, default_label, index_type);
6067 else if (node->right != 0 && node->left == 0)
6069 /* Deal with values to the left of this node,
6070 if they are possible. */
6071 if (!node_has_low_bound (node, index_type))
6073 emit_cmp_and_jump_insns (index, expand_expr (node->low, NULL_RTX,
6074 VOIDmode, 0),
6075 LT, NULL_RTX, mode, unsignedp, 0,
6076 default_label);
6079 /* Value belongs to this node or to the right-hand subtree. */
6081 emit_cmp_and_jump_insns (index, expand_expr (node->high, NULL_RTX,
6082 VOIDmode, 0),
6083 LE, NULL_RTX, mode, unsignedp, 0,
6084 label_rtx (node->code_label));
6086 emit_case_nodes (index, node->right, default_label, index_type);
6089 else if (node->right == 0 && node->left != 0)
6091 /* Deal with values to the right of this node,
6092 if they are possible. */
6093 if (!node_has_high_bound (node, index_type))
6095 emit_cmp_and_jump_insns (index, expand_expr (node->high, NULL_RTX,
6096 VOIDmode, 0),
6097 GT, NULL_RTX, mode, unsignedp, 0,
6098 default_label);
6101 /* Value belongs to this node or to the left-hand subtree. */
6103 emit_cmp_and_jump_insns (index, expand_expr (node->low, NULL_RTX,
6104 VOIDmode, 0),
6105 GE, NULL_RTX, mode, unsignedp, 0,
6106 label_rtx (node->code_label));
6108 emit_case_nodes (index, node->left, default_label, index_type);
6111 else
6113 /* Node has no children so we check low and high bounds to remove
6114 redundant tests. Only one of the bounds can exist,
6115 since otherwise this node is bounded--a case tested already. */
6117 if (!node_has_high_bound (node, index_type))
6119 emit_cmp_and_jump_insns (index, expand_expr (node->high, NULL_RTX,
6120 VOIDmode, 0),
6121 GT, NULL_RTX, mode, unsignedp, 0,
6122 default_label);
6125 if (!node_has_low_bound (node, index_type))
6127 emit_cmp_and_jump_insns (index, expand_expr (node->low, NULL_RTX,
6128 VOIDmode, 0),
6129 LT, NULL_RTX, mode, unsignedp, 0,
6130 default_label);
6133 emit_jump (label_rtx (node->code_label));
6138 /* These routines are used by the loop unrolling code. They copy BLOCK trees
6139 so that the debugging info will be correct for the unrolled loop. */
6141 /* Indexed by block number, contains a pointer to the N'th block node.
6143 Allocated by the call to identify_blocks, then released after the call
6144 to reorder_blocks in the function unroll_block_trees. */
6146 static tree *block_vector;
6148 void
6149 find_loop_tree_blocks ()
6151 tree block = DECL_INITIAL (current_function_decl);
6153 block_vector = identify_blocks (block, get_insns ());
6156 void
6157 unroll_block_trees ()
6159 tree block = DECL_INITIAL (current_function_decl);
6161 reorder_blocks (block_vector, block, get_insns ());
6163 /* Release any memory allocated by identify_blocks. */
6164 if (block_vector)
6165 free (block_vector);