2001-11-01 Eric Christopher <echristo@redhat.com>
[official-gcc.git] / gcc / stmt.c
blob6bc227f07420531a7fd99d87145b2d1c993d0e91
1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000, 2001 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 /* This file handles the generation of rtl code from tree structure
23 above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
24 It also creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
27 The functions whose names start with `expand_' are called by the
28 parser to generate RTL instructions for various kinds of constructs.
30 Some control and binding constructs require calling several such
31 functions at different times. For example, a simple if-then
32 is expanded by calling `expand_start_cond' (with the condition-expression
33 as argument) before parsing the then-clause and calling `expand_end_cond'
34 after parsing the then-clause. */
36 #include "config.h"
37 #include "system.h"
39 #include "rtl.h"
40 #include "tree.h"
41 #include "tm_p.h"
42 #include "flags.h"
43 #include "except.h"
44 #include "function.h"
45 #include "insn-config.h"
46 #include "expr.h"
47 #include "libfuncs.h"
48 #include "hard-reg-set.h"
49 #include "obstack.h"
50 #include "loop.h"
51 #include "recog.h"
52 #include "machmode.h"
53 #include "toplev.h"
54 #include "output.h"
55 #include "ggc.h"
57 #define obstack_chunk_alloc xmalloc
58 #define obstack_chunk_free free
59 struct obstack stmt_obstack;
61 /* Assume that case vectors are not pc-relative. */
62 #ifndef CASE_VECTOR_PC_RELATIVE
63 #define CASE_VECTOR_PC_RELATIVE 0
64 #endif
66 /* Functions and data structures for expanding case statements. */
68 /* Case label structure, used to hold info on labels within case
69 statements. We handle "range" labels; for a single-value label
70 as in C, the high and low limits are the same.
72 An AVL tree of case nodes is initially created, and later transformed
73 to a list linked via the RIGHT fields in the nodes. Nodes with
74 higher case values are later in the list.
76 Switch statements can be output in one of two forms. A branch table
77 is used if there are more than a few labels and the labels are dense
78 within the range between the smallest and largest case value. If a
79 branch table is used, no further manipulations are done with the case
80 node chain.
82 The alternative to the use of a branch table is to generate a series
83 of compare and jump insns. When that is done, we use the LEFT, RIGHT,
84 and PARENT fields to hold a binary tree. Initially the tree is
85 totally unbalanced, with everything on the right. We balance the tree
86 with nodes on the left having lower case values than the parent
87 and nodes on the right having higher values. We then output the tree
88 in order. */
90 struct case_node
92 struct case_node *left; /* Left son in binary tree */
93 struct case_node *right; /* Right son in binary tree; also node chain */
94 struct case_node *parent; /* Parent of node in binary tree */
95 tree low; /* Lowest index value for this label */
96 tree high; /* Highest index value for this label */
97 tree code_label; /* Label to jump to when node matches */
98 int balance;
101 typedef struct case_node case_node;
102 typedef struct case_node *case_node_ptr;
104 /* These are used by estimate_case_costs and balance_case_nodes. */
106 /* This must be a signed type, and non-ANSI compilers lack signed char. */
107 static short cost_table_[129];
108 static int use_cost_table;
109 static int cost_table_initialized;
111 /* Special care is needed because we allow -1, but TREE_INT_CST_LOW
112 is unsigned. */
113 #define COST_TABLE(I) cost_table_[(unsigned HOST_WIDE_INT)((I) + 1)]
115 /* Stack of control and binding constructs we are currently inside.
117 These constructs begin when you call `expand_start_WHATEVER'
118 and end when you call `expand_end_WHATEVER'. This stack records
119 info about how the construct began that tells the end-function
120 what to do. It also may provide information about the construct
121 to alter the behavior of other constructs within the body.
122 For example, they may affect the behavior of C `break' and `continue'.
124 Each construct gets one `struct nesting' object.
125 All of these objects are chained through the `all' field.
126 `nesting_stack' points to the first object (innermost construct).
127 The position of an entry on `nesting_stack' is in its `depth' field.
129 Each type of construct has its own individual stack.
130 For example, loops have `loop_stack'. Each object points to the
131 next object of the same type through the `next' field.
133 Some constructs are visible to `break' exit-statements and others
134 are not. Which constructs are visible depends on the language.
135 Therefore, the data structure allows each construct to be visible
136 or not, according to the args given when the construct is started.
137 The construct is visible if the `exit_label' field is non-null.
138 In that case, the value should be a CODE_LABEL rtx. */
140 struct nesting
142 struct nesting *all;
143 struct nesting *next;
144 int depth;
145 rtx exit_label;
146 union
148 /* For conds (if-then and if-then-else statements). */
149 struct
151 /* Label for the end of the if construct.
152 There is none if EXITFLAG was not set
153 and no `else' has been seen yet. */
154 rtx endif_label;
155 /* Label for the end of this alternative.
156 This may be the end of the if or the next else/elseif. */
157 rtx next_label;
158 } cond;
159 /* For loops. */
160 struct
162 /* Label at the top of the loop; place to loop back to. */
163 rtx start_label;
164 /* Label at the end of the whole construct. */
165 rtx end_label;
166 /* Label before a jump that branches to the end of the whole
167 construct. This is where destructors go if any. */
168 rtx alt_end_label;
169 /* Label for `continue' statement to jump to;
170 this is in front of the stepper of the loop. */
171 rtx continue_label;
172 } loop;
173 /* For variable binding contours. */
174 struct
176 /* Sequence number of this binding contour within the function,
177 in order of entry. */
178 int block_start_count;
179 /* Nonzero => value to restore stack to on exit. */
180 rtx stack_level;
181 /* The NOTE that starts this contour.
182 Used by expand_goto to check whether the destination
183 is within each contour or not. */
184 rtx first_insn;
185 /* Innermost containing binding contour that has a stack level. */
186 struct nesting *innermost_stack_block;
187 /* List of cleanups to be run on exit from this contour.
188 This is a list of expressions to be evaluated.
189 The TREE_PURPOSE of each link is the ..._DECL node
190 which the cleanup pertains to. */
191 tree cleanups;
192 /* List of cleanup-lists of blocks containing this block,
193 as they were at the locus where this block appears.
194 There is an element for each containing block,
195 ordered innermost containing block first.
196 The tail of this list can be 0,
197 if all remaining elements would be empty lists.
198 The element's TREE_VALUE is the cleanup-list of that block,
199 which may be null. */
200 tree outer_cleanups;
201 /* Chain of labels defined inside this binding contour.
202 For contours that have stack levels or cleanups. */
203 struct label_chain *label_chain;
204 /* Number of function calls seen, as of start of this block. */
205 int n_function_calls;
206 /* Nonzero if this is associated with a EH region. */
207 int exception_region;
208 /* The saved target_temp_slot_level from our outer block.
209 We may reset target_temp_slot_level to be the level of
210 this block, if that is done, target_temp_slot_level
211 reverts to the saved target_temp_slot_level at the very
212 end of the block. */
213 int block_target_temp_slot_level;
214 /* True if we are currently emitting insns in an area of
215 output code that is controlled by a conditional
216 expression. This is used by the cleanup handling code to
217 generate conditional cleanup actions. */
218 int conditional_code;
219 /* A place to move the start of the exception region for any
220 of the conditional cleanups, must be at the end or after
221 the start of the last unconditional cleanup, and before any
222 conditional branch points. */
223 rtx last_unconditional_cleanup;
224 /* When in a conditional context, this is the specific
225 cleanup list associated with last_unconditional_cleanup,
226 where we place the conditionalized cleanups. */
227 tree *cleanup_ptr;
228 } block;
229 /* For switch (C) or case (Pascal) statements,
230 and also for dummies (see `expand_start_case_dummy'). */
231 struct
233 /* The insn after which the case dispatch should finally
234 be emitted. Zero for a dummy. */
235 rtx start;
236 /* A list of case labels; it is first built as an AVL tree.
237 During expand_end_case, this is converted to a list, and may be
238 rearranged into a nearly balanced binary tree. */
239 struct case_node *case_list;
240 /* Label to jump to if no case matches. */
241 tree default_label;
242 /* The expression to be dispatched on. */
243 tree index_expr;
244 /* Type that INDEX_EXPR should be converted to. */
245 tree nominal_type;
246 /* Name of this kind of statement, for warnings. */
247 const char *printname;
248 /* Used to save no_line_numbers till we see the first case label.
249 We set this to -1 when we see the first case label in this
250 case statement. */
251 int line_number_status;
252 } case_stmt;
253 } data;
256 /* Allocate and return a new `struct nesting'. */
258 #define ALLOC_NESTING() \
259 (struct nesting *) obstack_alloc (&stmt_obstack, sizeof (struct nesting))
261 /* Pop the nesting stack element by element until we pop off
262 the element which is at the top of STACK.
263 Update all the other stacks, popping off elements from them
264 as we pop them from nesting_stack. */
266 #define POPSTACK(STACK) \
267 do { struct nesting *target = STACK; \
268 struct nesting *this; \
269 do { this = nesting_stack; \
270 if (loop_stack == this) \
271 loop_stack = loop_stack->next; \
272 if (cond_stack == this) \
273 cond_stack = cond_stack->next; \
274 if (block_stack == this) \
275 block_stack = block_stack->next; \
276 if (stack_block_stack == this) \
277 stack_block_stack = stack_block_stack->next; \
278 if (case_stack == this) \
279 case_stack = case_stack->next; \
280 nesting_depth = nesting_stack->depth - 1; \
281 nesting_stack = this->all; \
282 obstack_free (&stmt_obstack, this); } \
283 while (this != target); } while (0)
285 /* In some cases it is impossible to generate code for a forward goto
286 until the label definition is seen. This happens when it may be necessary
287 for the goto to reset the stack pointer: we don't yet know how to do that.
288 So expand_goto puts an entry on this fixup list.
289 Each time a binding contour that resets the stack is exited,
290 we check each fixup.
291 If the target label has now been defined, we can insert the proper code. */
293 struct goto_fixup
295 /* Points to following fixup. */
296 struct goto_fixup *next;
297 /* Points to the insn before the jump insn.
298 If more code must be inserted, it goes after this insn. */
299 rtx before_jump;
300 /* The LABEL_DECL that this jump is jumping to, or 0
301 for break, continue or return. */
302 tree target;
303 /* The BLOCK for the place where this goto was found. */
304 tree context;
305 /* The CODE_LABEL rtx that this is jumping to. */
306 rtx target_rtl;
307 /* Number of binding contours started in current function
308 before the label reference. */
309 int block_start_count;
310 /* The outermost stack level that should be restored for this jump.
311 Each time a binding contour that resets the stack is exited,
312 if the target label is *not* yet defined, this slot is updated. */
313 rtx stack_level;
314 /* List of lists of cleanup expressions to be run by this goto.
315 There is one element for each block that this goto is within.
316 The tail of this list can be 0,
317 if all remaining elements would be empty.
318 The TREE_VALUE contains the cleanup list of that block as of the
319 time this goto was seen.
320 The TREE_ADDRESSABLE flag is 1 for a block that has been exited. */
321 tree cleanup_list_list;
324 /* Within any binding contour that must restore a stack level,
325 all labels are recorded with a chain of these structures. */
327 struct label_chain
329 /* Points to following fixup. */
330 struct label_chain *next;
331 tree label;
334 struct stmt_status
336 /* Chain of all pending binding contours. */
337 struct nesting *x_block_stack;
339 /* If any new stacks are added here, add them to POPSTACKS too. */
341 /* Chain of all pending binding contours that restore stack levels
342 or have cleanups. */
343 struct nesting *x_stack_block_stack;
345 /* Chain of all pending conditional statements. */
346 struct nesting *x_cond_stack;
348 /* Chain of all pending loops. */
349 struct nesting *x_loop_stack;
351 /* Chain of all pending case or switch statements. */
352 struct nesting *x_case_stack;
354 /* Separate chain including all of the above,
355 chained through the `all' field. */
356 struct nesting *x_nesting_stack;
358 /* Number of entries on nesting_stack now. */
359 int x_nesting_depth;
361 /* Number of binding contours started so far in this function. */
362 int x_block_start_count;
364 /* Each time we expand an expression-statement,
365 record the expr's type and its RTL value here. */
366 tree x_last_expr_type;
367 rtx x_last_expr_value;
369 /* Nonzero if within a ({...}) grouping, in which case we must
370 always compute a value for each expr-stmt in case it is the last one. */
371 int x_expr_stmts_for_value;
373 /* Filename and line number of last line-number note,
374 whether we actually emitted it or not. */
375 const char *x_emit_filename;
376 int x_emit_lineno;
378 struct goto_fixup *x_goto_fixup_chain;
381 #define block_stack (cfun->stmt->x_block_stack)
382 #define stack_block_stack (cfun->stmt->x_stack_block_stack)
383 #define cond_stack (cfun->stmt->x_cond_stack)
384 #define loop_stack (cfun->stmt->x_loop_stack)
385 #define case_stack (cfun->stmt->x_case_stack)
386 #define nesting_stack (cfun->stmt->x_nesting_stack)
387 #define nesting_depth (cfun->stmt->x_nesting_depth)
388 #define current_block_start_count (cfun->stmt->x_block_start_count)
389 #define last_expr_type (cfun->stmt->x_last_expr_type)
390 #define last_expr_value (cfun->stmt->x_last_expr_value)
391 #define expr_stmts_for_value (cfun->stmt->x_expr_stmts_for_value)
392 #define emit_filename (cfun->stmt->x_emit_filename)
393 #define emit_lineno (cfun->stmt->x_emit_lineno)
394 #define goto_fixup_chain (cfun->stmt->x_goto_fixup_chain)
396 /* Non-zero if we are using EH to handle cleanus. */
397 static int using_eh_for_cleanups_p = 0;
399 static int n_occurrences PARAMS ((int, const char *));
400 static void expand_goto_internal PARAMS ((tree, rtx, rtx));
401 static int expand_fixup PARAMS ((tree, rtx, rtx));
402 static rtx expand_nl_handler_label PARAMS ((rtx, rtx));
403 static void expand_nl_goto_receiver PARAMS ((void));
404 static void expand_nl_goto_receivers PARAMS ((struct nesting *));
405 static void fixup_gotos PARAMS ((struct nesting *, rtx, tree,
406 rtx, int));
407 static bool check_operand_nalternatives PARAMS ((tree, tree));
408 static bool check_unique_operand_names PARAMS ((tree, tree));
409 static tree resolve_operand_names PARAMS ((tree, tree, tree,
410 const char **));
411 static char *resolve_operand_name_1 PARAMS ((char *, tree, tree));
412 static void expand_null_return_1 PARAMS ((rtx));
413 static void expand_value_return PARAMS ((rtx));
414 static int tail_recursion_args PARAMS ((tree, tree));
415 static void expand_cleanups PARAMS ((tree, tree, int, int));
416 static void check_seenlabel PARAMS ((void));
417 static void do_jump_if_equal PARAMS ((rtx, rtx, rtx, int));
418 static int estimate_case_costs PARAMS ((case_node_ptr));
419 static void group_case_nodes PARAMS ((case_node_ptr));
420 static void balance_case_nodes PARAMS ((case_node_ptr *,
421 case_node_ptr));
422 static int node_has_low_bound PARAMS ((case_node_ptr, tree));
423 static int node_has_high_bound PARAMS ((case_node_ptr, tree));
424 static int node_is_bounded PARAMS ((case_node_ptr, tree));
425 static void emit_jump_if_reachable PARAMS ((rtx));
426 static void emit_case_nodes PARAMS ((rtx, case_node_ptr, rtx, tree));
427 static struct case_node *case_tree2list PARAMS ((case_node *, case_node *));
428 static void mark_cond_nesting PARAMS ((struct nesting *));
429 static void mark_loop_nesting PARAMS ((struct nesting *));
430 static void mark_block_nesting PARAMS ((struct nesting *));
431 static void mark_case_nesting PARAMS ((struct nesting *));
432 static void mark_case_node PARAMS ((struct case_node *));
433 static void mark_goto_fixup PARAMS ((struct goto_fixup *));
434 static void free_case_nodes PARAMS ((case_node_ptr));
436 void
437 using_eh_for_cleanups ()
439 using_eh_for_cleanups_p = 1;
442 /* Mark N (known to be a cond-nesting) for GC. */
444 static void
445 mark_cond_nesting (n)
446 struct nesting *n;
448 while (n)
450 ggc_mark_rtx (n->exit_label);
451 ggc_mark_rtx (n->data.cond.endif_label);
452 ggc_mark_rtx (n->data.cond.next_label);
454 n = n->next;
458 /* Mark N (known to be a loop-nesting) for GC. */
460 static void
461 mark_loop_nesting (n)
462 struct nesting *n;
465 while (n)
467 ggc_mark_rtx (n->exit_label);
468 ggc_mark_rtx (n->data.loop.start_label);
469 ggc_mark_rtx (n->data.loop.end_label);
470 ggc_mark_rtx (n->data.loop.alt_end_label);
471 ggc_mark_rtx (n->data.loop.continue_label);
473 n = n->next;
477 /* Mark N (known to be a block-nesting) for GC. */
479 static void
480 mark_block_nesting (n)
481 struct nesting *n;
483 while (n)
485 struct label_chain *l;
487 ggc_mark_rtx (n->exit_label);
488 ggc_mark_rtx (n->data.block.stack_level);
489 ggc_mark_rtx (n->data.block.first_insn);
490 ggc_mark_tree (n->data.block.cleanups);
491 ggc_mark_tree (n->data.block.outer_cleanups);
493 for (l = n->data.block.label_chain; l != NULL; l = l->next)
495 ggc_mark (l);
496 ggc_mark_tree (l->label);
499 ggc_mark_rtx (n->data.block.last_unconditional_cleanup);
501 /* ??? cleanup_ptr never points outside the stack, does it? */
503 n = n->next;
507 /* Mark N (known to be a case-nesting) for GC. */
509 static void
510 mark_case_nesting (n)
511 struct nesting *n;
513 while (n)
515 ggc_mark_rtx (n->exit_label);
516 ggc_mark_rtx (n->data.case_stmt.start);
518 ggc_mark_tree (n->data.case_stmt.default_label);
519 ggc_mark_tree (n->data.case_stmt.index_expr);
520 ggc_mark_tree (n->data.case_stmt.nominal_type);
522 mark_case_node (n->data.case_stmt.case_list);
523 n = n->next;
527 /* Mark C for GC. */
529 static void
530 mark_case_node (c)
531 struct case_node *c;
533 if (c != 0)
535 ggc_mark_tree (c->low);
536 ggc_mark_tree (c->high);
537 ggc_mark_tree (c->code_label);
539 mark_case_node (c->right);
540 mark_case_node (c->left);
544 /* Mark G for GC. */
546 static void
547 mark_goto_fixup (g)
548 struct goto_fixup *g;
550 while (g)
552 ggc_mark (g);
553 ggc_mark_rtx (g->before_jump);
554 ggc_mark_tree (g->target);
555 ggc_mark_tree (g->context);
556 ggc_mark_rtx (g->target_rtl);
557 ggc_mark_rtx (g->stack_level);
558 ggc_mark_tree (g->cleanup_list_list);
560 g = g->next;
564 /* Clear out all parts of the state in F that can safely be discarded
565 after the function has been compiled, to let garbage collection
566 reclaim the memory. */
568 void
569 free_stmt_status (f)
570 struct function *f;
572 /* We're about to free the function obstack. If we hold pointers to
573 things allocated there, then we'll try to mark them when we do
574 GC. So, we clear them out here explicitly. */
575 if (f->stmt)
576 free (f->stmt);
577 f->stmt = NULL;
580 /* Mark P for GC. */
582 void
583 mark_stmt_status (p)
584 struct stmt_status *p;
586 if (p == 0)
587 return;
589 mark_block_nesting (p->x_block_stack);
590 mark_cond_nesting (p->x_cond_stack);
591 mark_loop_nesting (p->x_loop_stack);
592 mark_case_nesting (p->x_case_stack);
594 ggc_mark_tree (p->x_last_expr_type);
595 /* last_epxr_value is only valid if last_expr_type is nonzero. */
596 if (p->x_last_expr_type)
597 ggc_mark_rtx (p->x_last_expr_value);
599 mark_goto_fixup (p->x_goto_fixup_chain);
602 void
603 init_stmt ()
605 gcc_obstack_init (&stmt_obstack);
608 void
609 init_stmt_for_function ()
611 cfun->stmt = (struct stmt_status *) xmalloc (sizeof (struct stmt_status));
613 /* We are not currently within any block, conditional, loop or case. */
614 block_stack = 0;
615 stack_block_stack = 0;
616 loop_stack = 0;
617 case_stack = 0;
618 cond_stack = 0;
619 nesting_stack = 0;
620 nesting_depth = 0;
622 current_block_start_count = 0;
624 /* No gotos have been expanded yet. */
625 goto_fixup_chain = 0;
627 /* We are not processing a ({...}) grouping. */
628 expr_stmts_for_value = 0;
629 last_expr_type = 0;
630 last_expr_value = NULL_RTX;
633 /* Return nonzero if anything is pushed on the loop, condition, or case
634 stack. */
636 in_control_zone_p ()
638 return cond_stack || loop_stack || case_stack;
641 /* Record the current file and line. Called from emit_line_note. */
642 void
643 set_file_and_line_for_stmt (file, line)
644 const char *file;
645 int line;
647 /* If we're outputting an inline function, and we add a line note,
648 there may be no CFUN->STMT information. So, there's no need to
649 update it. */
650 if (cfun->stmt)
652 emit_filename = file;
653 emit_lineno = line;
657 /* Emit a no-op instruction. */
659 void
660 emit_nop ()
662 rtx last_insn;
664 last_insn = get_last_insn ();
665 if (!optimize
666 && (GET_CODE (last_insn) == CODE_LABEL
667 || (GET_CODE (last_insn) == NOTE
668 && prev_real_insn (last_insn) == 0)))
669 emit_insn (gen_nop ());
672 /* Return the rtx-label that corresponds to a LABEL_DECL,
673 creating it if necessary. */
676 label_rtx (label)
677 tree label;
679 if (TREE_CODE (label) != LABEL_DECL)
680 abort ();
682 if (!DECL_RTL_SET_P (label))
683 SET_DECL_RTL (label, gen_label_rtx ());
685 return DECL_RTL (label);
689 /* Add an unconditional jump to LABEL as the next sequential instruction. */
691 void
692 emit_jump (label)
693 rtx label;
695 do_pending_stack_adjust ();
696 emit_jump_insn (gen_jump (label));
697 emit_barrier ();
700 /* Emit code to jump to the address
701 specified by the pointer expression EXP. */
703 void
704 expand_computed_goto (exp)
705 tree exp;
707 rtx x = expand_expr (exp, NULL_RTX, VOIDmode, 0);
709 #ifdef POINTERS_EXTEND_UNSIGNED
710 if (GET_MODE (x) != Pmode)
711 x = convert_memory_address (Pmode, x);
712 #endif
714 emit_queue ();
715 /* Be sure the function is executable. */
716 if (current_function_check_memory_usage)
717 emit_library_call (chkr_check_exec_libfunc, LCT_CONST_MAKE_BLOCK,
718 VOIDmode, 1, x, ptr_mode);
720 do_pending_stack_adjust ();
721 emit_indirect_jump (x);
723 current_function_has_computed_jump = 1;
726 /* Handle goto statements and the labels that they can go to. */
728 /* Specify the location in the RTL code of a label LABEL,
729 which is a LABEL_DECL tree node.
731 This is used for the kind of label that the user can jump to with a
732 goto statement, and for alternatives of a switch or case statement.
733 RTL labels generated for loops and conditionals don't go through here;
734 they are generated directly at the RTL level, by other functions below.
736 Note that this has nothing to do with defining label *names*.
737 Languages vary in how they do that and what that even means. */
739 void
740 expand_label (label)
741 tree label;
743 struct label_chain *p;
745 do_pending_stack_adjust ();
746 emit_label (label_rtx (label));
747 if (DECL_NAME (label))
748 LABEL_NAME (DECL_RTL (label)) = IDENTIFIER_POINTER (DECL_NAME (label));
750 if (stack_block_stack != 0)
752 p = (struct label_chain *) ggc_alloc (sizeof (struct label_chain));
753 p->next = stack_block_stack->data.block.label_chain;
754 stack_block_stack->data.block.label_chain = p;
755 p->label = label;
759 /* Declare that LABEL (a LABEL_DECL) may be used for nonlocal gotos
760 from nested functions. */
762 void
763 declare_nonlocal_label (label)
764 tree label;
766 rtx slot = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
768 nonlocal_labels = tree_cons (NULL_TREE, label, nonlocal_labels);
769 LABEL_PRESERVE_P (label_rtx (label)) = 1;
770 if (nonlocal_goto_handler_slots == 0)
772 emit_stack_save (SAVE_NONLOCAL,
773 &nonlocal_goto_stack_level,
774 PREV_INSN (tail_recursion_reentry));
776 nonlocal_goto_handler_slots
777 = gen_rtx_EXPR_LIST (VOIDmode, slot, nonlocal_goto_handler_slots);
780 /* Generate RTL code for a `goto' statement with target label LABEL.
781 LABEL should be a LABEL_DECL tree node that was or will later be
782 defined with `expand_label'. */
784 void
785 expand_goto (label)
786 tree label;
788 tree context;
790 /* Check for a nonlocal goto to a containing function. */
791 context = decl_function_context (label);
792 if (context != 0 && context != current_function_decl)
794 struct function *p = find_function_data (context);
795 rtx label_ref = gen_rtx_LABEL_REF (Pmode, label_rtx (label));
796 rtx handler_slot, static_chain, save_area, insn;
797 tree link;
799 /* Find the corresponding handler slot for this label. */
800 handler_slot = p->x_nonlocal_goto_handler_slots;
801 for (link = p->x_nonlocal_labels; TREE_VALUE (link) != label;
802 link = TREE_CHAIN (link))
803 handler_slot = XEXP (handler_slot, 1);
804 handler_slot = XEXP (handler_slot, 0);
806 p->has_nonlocal_label = 1;
807 current_function_has_nonlocal_goto = 1;
808 LABEL_REF_NONLOCAL_P (label_ref) = 1;
810 /* Copy the rtl for the slots so that they won't be shared in
811 case the virtual stack vars register gets instantiated differently
812 in the parent than in the child. */
814 static_chain = copy_to_reg (lookup_static_chain (label));
816 /* Get addr of containing function's current nonlocal goto handler,
817 which will do any cleanups and then jump to the label. */
818 handler_slot = copy_to_reg (replace_rtx (copy_rtx (handler_slot),
819 virtual_stack_vars_rtx,
820 static_chain));
822 /* Get addr of containing function's nonlocal save area. */
823 save_area = p->x_nonlocal_goto_stack_level;
824 if (save_area)
825 save_area = replace_rtx (copy_rtx (save_area),
826 virtual_stack_vars_rtx, static_chain);
828 #if HAVE_nonlocal_goto
829 if (HAVE_nonlocal_goto)
830 emit_insn (gen_nonlocal_goto (static_chain, handler_slot,
831 save_area, label_ref));
832 else
833 #endif
835 /* Restore frame pointer for containing function.
836 This sets the actual hard register used for the frame pointer
837 to the location of the function's incoming static chain info.
838 The non-local goto handler will then adjust it to contain the
839 proper value and reload the argument pointer, if needed. */
840 emit_move_insn (hard_frame_pointer_rtx, static_chain);
841 emit_stack_restore (SAVE_NONLOCAL, save_area, NULL_RTX);
843 /* USE of hard_frame_pointer_rtx added for consistency;
844 not clear if really needed. */
845 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
846 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
847 emit_indirect_jump (handler_slot);
850 /* Search backwards to the jump insn and mark it as a
851 non-local goto. */
852 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
854 if (GET_CODE (insn) == JUMP_INSN)
856 REG_NOTES (insn) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO,
857 const0_rtx, REG_NOTES (insn));
858 break;
860 else if (GET_CODE (insn) == CALL_INSN)
861 break;
864 else
865 expand_goto_internal (label, label_rtx (label), NULL_RTX);
868 /* Generate RTL code for a `goto' statement with target label BODY.
869 LABEL should be a LABEL_REF.
870 LAST_INSN, if non-0, is the rtx we should consider as the last
871 insn emitted (for the purposes of cleaning up a return). */
873 static void
874 expand_goto_internal (body, label, last_insn)
875 tree body;
876 rtx label;
877 rtx last_insn;
879 struct nesting *block;
880 rtx stack_level = 0;
882 if (GET_CODE (label) != CODE_LABEL)
883 abort ();
885 /* If label has already been defined, we can tell now
886 whether and how we must alter the stack level. */
888 if (PREV_INSN (label) != 0)
890 /* Find the innermost pending block that contains the label.
891 (Check containment by comparing insn-uids.)
892 Then restore the outermost stack level within that block,
893 and do cleanups of all blocks contained in it. */
894 for (block = block_stack; block; block = block->next)
896 if (INSN_UID (block->data.block.first_insn) < INSN_UID (label))
897 break;
898 if (block->data.block.stack_level != 0)
899 stack_level = block->data.block.stack_level;
900 /* Execute the cleanups for blocks we are exiting. */
901 if (block->data.block.cleanups != 0)
903 expand_cleanups (block->data.block.cleanups, NULL_TREE, 1, 1);
904 do_pending_stack_adjust ();
908 if (stack_level)
910 /* Ensure stack adjust isn't done by emit_jump, as this
911 would clobber the stack pointer. This one should be
912 deleted as dead by flow. */
913 clear_pending_stack_adjust ();
914 do_pending_stack_adjust ();
916 /* Don't do this adjust if it's to the end label and this function
917 is to return with a depressed stack pointer. */
918 if (label == return_label
919 && (((TREE_CODE (TREE_TYPE (current_function_decl))
920 == FUNCTION_TYPE)
921 && (TYPE_RETURNS_STACK_DEPRESSED
922 (TREE_TYPE (current_function_decl))))))
924 else
925 emit_stack_restore (SAVE_BLOCK, stack_level, NULL_RTX);
928 if (body != 0 && DECL_TOO_LATE (body))
929 error ("jump to `%s' invalidly jumps into binding contour",
930 IDENTIFIER_POINTER (DECL_NAME (body)));
932 /* Label not yet defined: may need to put this goto
933 on the fixup list. */
934 else if (! expand_fixup (body, label, last_insn))
936 /* No fixup needed. Record that the label is the target
937 of at least one goto that has no fixup. */
938 if (body != 0)
939 TREE_ADDRESSABLE (body) = 1;
942 emit_jump (label);
945 /* Generate if necessary a fixup for a goto
946 whose target label in tree structure (if any) is TREE_LABEL
947 and whose target in rtl is RTL_LABEL.
949 If LAST_INSN is nonzero, we pretend that the jump appears
950 after insn LAST_INSN instead of at the current point in the insn stream.
952 The fixup will be used later to insert insns just before the goto.
953 Those insns will restore the stack level as appropriate for the
954 target label, and will (in the case of C++) also invoke any object
955 destructors which have to be invoked when we exit the scopes which
956 are exited by the goto.
958 Value is nonzero if a fixup is made. */
960 static int
961 expand_fixup (tree_label, rtl_label, last_insn)
962 tree tree_label;
963 rtx rtl_label;
964 rtx last_insn;
966 struct nesting *block, *end_block;
968 /* See if we can recognize which block the label will be output in.
969 This is possible in some very common cases.
970 If we succeed, set END_BLOCK to that block.
971 Otherwise, set it to 0. */
973 if (cond_stack
974 && (rtl_label == cond_stack->data.cond.endif_label
975 || rtl_label == cond_stack->data.cond.next_label))
976 end_block = cond_stack;
977 /* If we are in a loop, recognize certain labels which
978 are likely targets. This reduces the number of fixups
979 we need to create. */
980 else if (loop_stack
981 && (rtl_label == loop_stack->data.loop.start_label
982 || rtl_label == loop_stack->data.loop.end_label
983 || rtl_label == loop_stack->data.loop.continue_label))
984 end_block = loop_stack;
985 else
986 end_block = 0;
988 /* Now set END_BLOCK to the binding level to which we will return. */
990 if (end_block)
992 struct nesting *next_block = end_block->all;
993 block = block_stack;
995 /* First see if the END_BLOCK is inside the innermost binding level.
996 If so, then no cleanups or stack levels are relevant. */
997 while (next_block && next_block != block)
998 next_block = next_block->all;
1000 if (next_block)
1001 return 0;
1003 /* Otherwise, set END_BLOCK to the innermost binding level
1004 which is outside the relevant control-structure nesting. */
1005 next_block = block_stack->next;
1006 for (block = block_stack; block != end_block; block = block->all)
1007 if (block == next_block)
1008 next_block = next_block->next;
1009 end_block = next_block;
1012 /* Does any containing block have a stack level or cleanups?
1013 If not, no fixup is needed, and that is the normal case
1014 (the only case, for standard C). */
1015 for (block = block_stack; block != end_block; block = block->next)
1016 if (block->data.block.stack_level != 0
1017 || block->data.block.cleanups != 0)
1018 break;
1020 if (block != end_block)
1022 /* Ok, a fixup is needed. Add a fixup to the list of such. */
1023 struct goto_fixup *fixup
1024 = (struct goto_fixup *) ggc_alloc (sizeof (struct goto_fixup));
1025 /* In case an old stack level is restored, make sure that comes
1026 after any pending stack adjust. */
1027 /* ?? If the fixup isn't to come at the present position,
1028 doing the stack adjust here isn't useful. Doing it with our
1029 settings at that location isn't useful either. Let's hope
1030 someone does it! */
1031 if (last_insn == 0)
1032 do_pending_stack_adjust ();
1033 fixup->target = tree_label;
1034 fixup->target_rtl = rtl_label;
1036 /* Create a BLOCK node and a corresponding matched set of
1037 NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes at
1038 this point. The notes will encapsulate any and all fixup
1039 code which we might later insert at this point in the insn
1040 stream. Also, the BLOCK node will be the parent (i.e. the
1041 `SUPERBLOCK') of any other BLOCK nodes which we might create
1042 later on when we are expanding the fixup code.
1044 Note that optimization passes (including expand_end_loop)
1045 might move the *_BLOCK notes away, so we use a NOTE_INSN_DELETED
1046 as a placeholder. */
1049 rtx original_before_jump
1050 = last_insn ? last_insn : get_last_insn ();
1051 rtx start;
1052 rtx end;
1053 tree block;
1055 block = make_node (BLOCK);
1056 TREE_USED (block) = 1;
1058 if (!cfun->x_whole_function_mode_p)
1059 insert_block (block);
1060 else
1062 BLOCK_CHAIN (block)
1063 = BLOCK_CHAIN (DECL_INITIAL (current_function_decl));
1064 BLOCK_CHAIN (DECL_INITIAL (current_function_decl))
1065 = block;
1068 start_sequence ();
1069 start = emit_note (NULL, NOTE_INSN_BLOCK_BEG);
1070 if (cfun->x_whole_function_mode_p)
1071 NOTE_BLOCK (start) = block;
1072 fixup->before_jump = emit_note (NULL, NOTE_INSN_DELETED);
1073 end = emit_note (NULL, NOTE_INSN_BLOCK_END);
1074 if (cfun->x_whole_function_mode_p)
1075 NOTE_BLOCK (end) = block;
1076 fixup->context = block;
1077 end_sequence ();
1078 emit_insns_after (start, original_before_jump);
1081 fixup->block_start_count = current_block_start_count;
1082 fixup->stack_level = 0;
1083 fixup->cleanup_list_list
1084 = ((block->data.block.outer_cleanups
1085 || block->data.block.cleanups)
1086 ? tree_cons (NULL_TREE, block->data.block.cleanups,
1087 block->data.block.outer_cleanups)
1088 : 0);
1089 fixup->next = goto_fixup_chain;
1090 goto_fixup_chain = fixup;
1093 return block != 0;
1096 /* Expand any needed fixups in the outputmost binding level of the
1097 function. FIRST_INSN is the first insn in the function. */
1099 void
1100 expand_fixups (first_insn)
1101 rtx first_insn;
1103 fixup_gotos (NULL, NULL_RTX, NULL_TREE, first_insn, 0);
1106 /* When exiting a binding contour, process all pending gotos requiring fixups.
1107 THISBLOCK is the structure that describes the block being exited.
1108 STACK_LEVEL is the rtx for the stack level to restore exiting this contour.
1109 CLEANUP_LIST is a list of expressions to evaluate on exiting this contour.
1110 FIRST_INSN is the insn that began this contour.
1112 Gotos that jump out of this contour must restore the
1113 stack level and do the cleanups before actually jumping.
1115 DONT_JUMP_IN nonzero means report error there is a jump into this
1116 contour from before the beginning of the contour.
1117 This is also done if STACK_LEVEL is nonzero. */
1119 static void
1120 fixup_gotos (thisblock, stack_level, cleanup_list, first_insn, dont_jump_in)
1121 struct nesting *thisblock;
1122 rtx stack_level;
1123 tree cleanup_list;
1124 rtx first_insn;
1125 int dont_jump_in;
1127 struct goto_fixup *f, *prev;
1129 /* F is the fixup we are considering; PREV is the previous one. */
1130 /* We run this loop in two passes so that cleanups of exited blocks
1131 are run first, and blocks that are exited are marked so
1132 afterwards. */
1134 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1136 /* Test for a fixup that is inactive because it is already handled. */
1137 if (f->before_jump == 0)
1139 /* Delete inactive fixup from the chain, if that is easy to do. */
1140 if (prev != 0)
1141 prev->next = f->next;
1143 /* Has this fixup's target label been defined?
1144 If so, we can finalize it. */
1145 else if (PREV_INSN (f->target_rtl) != 0)
1147 rtx cleanup_insns;
1149 /* If this fixup jumped into this contour from before the beginning
1150 of this contour, report an error. This code used to use
1151 the first non-label insn after f->target_rtl, but that's
1152 wrong since such can be added, by things like put_var_into_stack
1153 and have INSN_UIDs that are out of the range of the block. */
1154 /* ??? Bug: this does not detect jumping in through intermediate
1155 blocks that have stack levels or cleanups.
1156 It detects only a problem with the innermost block
1157 around the label. */
1158 if (f->target != 0
1159 && (dont_jump_in || stack_level || cleanup_list)
1160 && INSN_UID (first_insn) < INSN_UID (f->target_rtl)
1161 && INSN_UID (first_insn) > INSN_UID (f->before_jump)
1162 && ! DECL_ERROR_ISSUED (f->target))
1164 error_with_decl (f->target,
1165 "label `%s' used before containing binding contour");
1166 /* Prevent multiple errors for one label. */
1167 DECL_ERROR_ISSUED (f->target) = 1;
1170 /* We will expand the cleanups into a sequence of their own and
1171 then later on we will attach this new sequence to the insn
1172 stream just ahead of the actual jump insn. */
1174 start_sequence ();
1176 /* Temporarily restore the lexical context where we will
1177 logically be inserting the fixup code. We do this for the
1178 sake of getting the debugging information right. */
1180 pushlevel (0);
1181 set_block (f->context);
1183 /* Expand the cleanups for blocks this jump exits. */
1184 if (f->cleanup_list_list)
1186 tree lists;
1187 for (lists = f->cleanup_list_list; lists; lists = TREE_CHAIN (lists))
1188 /* Marked elements correspond to blocks that have been closed.
1189 Do their cleanups. */
1190 if (TREE_ADDRESSABLE (lists)
1191 && TREE_VALUE (lists) != 0)
1193 expand_cleanups (TREE_VALUE (lists), NULL_TREE, 1, 1);
1194 /* Pop any pushes done in the cleanups,
1195 in case function is about to return. */
1196 do_pending_stack_adjust ();
1200 /* Restore stack level for the biggest contour that this
1201 jump jumps out of. */
1202 if (f->stack_level
1203 && ! (f->target_rtl == return_label
1204 && ((TREE_CODE (TREE_TYPE (current_function_decl))
1205 == FUNCTION_TYPE)
1206 && (TYPE_RETURNS_STACK_DEPRESSED
1207 (TREE_TYPE (current_function_decl))))))
1208 emit_stack_restore (SAVE_BLOCK, f->stack_level, f->before_jump);
1210 /* Finish up the sequence containing the insns which implement the
1211 necessary cleanups, and then attach that whole sequence to the
1212 insn stream just ahead of the actual jump insn. Attaching it
1213 at that point insures that any cleanups which are in fact
1214 implicit C++ object destructions (which must be executed upon
1215 leaving the block) appear (to the debugger) to be taking place
1216 in an area of the generated code where the object(s) being
1217 destructed are still "in scope". */
1219 cleanup_insns = get_insns ();
1220 poplevel (1, 0, 0);
1222 end_sequence ();
1223 emit_insns_after (cleanup_insns, f->before_jump);
1225 f->before_jump = 0;
1229 /* For any still-undefined labels, do the cleanups for this block now.
1230 We must do this now since items in the cleanup list may go out
1231 of scope when the block ends. */
1232 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1233 if (f->before_jump != 0
1234 && PREV_INSN (f->target_rtl) == 0
1235 /* Label has still not appeared. If we are exiting a block with
1236 a stack level to restore, that started before the fixup,
1237 mark this stack level as needing restoration
1238 when the fixup is later finalized. */
1239 && thisblock != 0
1240 /* Note: if THISBLOCK == 0 and we have a label that hasn't appeared, it
1241 means the label is undefined. That's erroneous, but possible. */
1242 && (thisblock->data.block.block_start_count
1243 <= f->block_start_count))
1245 tree lists = f->cleanup_list_list;
1246 rtx cleanup_insns;
1248 for (; lists; lists = TREE_CHAIN (lists))
1249 /* If the following elt. corresponds to our containing block
1250 then the elt. must be for this block. */
1251 if (TREE_CHAIN (lists) == thisblock->data.block.outer_cleanups)
1253 start_sequence ();
1254 pushlevel (0);
1255 set_block (f->context);
1256 expand_cleanups (TREE_VALUE (lists), NULL_TREE, 1, 1);
1257 do_pending_stack_adjust ();
1258 cleanup_insns = get_insns ();
1259 poplevel (1, 0, 0);
1260 end_sequence ();
1261 if (cleanup_insns != 0)
1262 f->before_jump
1263 = emit_insns_after (cleanup_insns, f->before_jump);
1265 f->cleanup_list_list = TREE_CHAIN (lists);
1268 if (stack_level)
1269 f->stack_level = stack_level;
1273 /* Return the number of times character C occurs in string S. */
1274 static int
1275 n_occurrences (c, s)
1276 int c;
1277 const char *s;
1279 int n = 0;
1280 while (*s)
1281 n += (*s++ == c);
1282 return n;
1285 /* Generate RTL for an asm statement (explicit assembler code).
1286 BODY is a STRING_CST node containing the assembler code text,
1287 or an ADDR_EXPR containing a STRING_CST. */
1289 void
1290 expand_asm (body)
1291 tree body;
1293 if (current_function_check_memory_usage)
1295 error ("`asm' cannot be used in function where memory usage is checked");
1296 return;
1299 if (TREE_CODE (body) == ADDR_EXPR)
1300 body = TREE_OPERAND (body, 0);
1302 emit_insn (gen_rtx_ASM_INPUT (VOIDmode,
1303 TREE_STRING_POINTER (body)));
1304 last_expr_type = 0;
1307 /* Parse the output constraint pointed to by *CONSTRAINT_P. It is the
1308 OPERAND_NUMth output operand, indexed from zero. There are NINPUTS
1309 inputs and NOUTPUTS outputs to this extended-asm. Upon return,
1310 *ALLOWS_MEM will be TRUE iff the constraint allows the use of a
1311 memory operand. Similarly, *ALLOWS_REG will be TRUE iff the
1312 constraint allows the use of a register operand. And, *IS_INOUT
1313 will be true if the operand is read-write, i.e., if it is used as
1314 an input as well as an output. If *CONSTRAINT_P is not in
1315 canonical form, it will be made canonical. (Note that `+' will be
1316 rpelaced with `=' as part of this process.)
1318 Returns TRUE if all went well; FALSE if an error occurred. */
1320 bool
1321 parse_output_constraint (constraint_p,
1322 operand_num,
1323 ninputs,
1324 noutputs,
1325 allows_mem,
1326 allows_reg,
1327 is_inout)
1328 const char **constraint_p;
1329 int operand_num;
1330 int ninputs;
1331 int noutputs;
1332 bool *allows_mem;
1333 bool *allows_reg;
1334 bool *is_inout;
1336 const char *constraint = *constraint_p;
1337 const char *p;
1339 /* Assume the constraint doesn't allow the use of either a register
1340 or memory. */
1341 *allows_mem = false;
1342 *allows_reg = false;
1344 /* Allow the `=' or `+' to not be at the beginning of the string,
1345 since it wasn't explicitly documented that way, and there is a
1346 large body of code that puts it last. Swap the character to
1347 the front, so as not to uglify any place else. */
1348 p = strchr (constraint, '=');
1349 if (!p)
1350 p = strchr (constraint, '+');
1352 /* If the string doesn't contain an `=', issue an error
1353 message. */
1354 if (!p)
1356 error ("output operand constraint lacks `='");
1357 return false;
1360 /* If the constraint begins with `+', then the operand is both read
1361 from and written to. */
1362 *is_inout = (*p == '+');
1364 /* Canonicalize the output constraint so that it begins with `='. */
1365 if (p != constraint || is_inout)
1367 char *buf;
1368 size_t c_len = strlen (constraint);
1370 if (p != constraint)
1371 warning ("output constraint `%c' for operand %d is not at the beginning",
1372 *p, operand_num);
1374 /* Make a copy of the constraint. */
1375 buf = alloca (c_len + 1);
1376 strcpy (buf, constraint);
1377 /* Swap the first character and the `=' or `+'. */
1378 buf[p - constraint] = buf[0];
1379 /* Make sure the first character is an `='. (Until we do this,
1380 it might be a `+'.) */
1381 buf[0] = '=';
1382 /* Replace the constraint with the canonicalized string. */
1383 *constraint_p = ggc_alloc_string (buf, c_len);
1384 constraint = *constraint_p;
1387 /* Loop through the constraint string. */
1388 for (p = constraint + 1; *p; ++p)
1389 switch (*p)
1391 case '+':
1392 case '=':
1393 error ("operand constraint contains '+' or '=' at illegal position.");
1394 return false;
1396 case '%':
1397 if (operand_num + 1 == ninputs + noutputs)
1399 error ("`%%' constraint used with last operand");
1400 return false;
1402 break;
1404 case 'V': case 'm': case 'o':
1405 *allows_mem = true;
1406 break;
1408 case '?': case '!': case '*': case '&': case '#':
1409 case 'E': case 'F': case 'G': case 'H':
1410 case 's': case 'i': case 'n':
1411 case 'I': case 'J': case 'K': case 'L': case 'M':
1412 case 'N': case 'O': case 'P': case ',':
1413 break;
1415 case '0': case '1': case '2': case '3': case '4':
1416 case '5': case '6': case '7': case '8': case '9':
1417 case '[':
1418 error ("matching constraint not valid in output operand");
1419 return false;
1421 case '<': case '>':
1422 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
1423 excepting those that expand_call created. So match memory
1424 and hope. */
1425 *allows_mem = true;
1426 break;
1428 case 'g': case 'X':
1429 *allows_reg = true;
1430 *allows_mem = true;
1431 break;
1433 case 'p': case 'r':
1434 *allows_reg = true;
1435 break;
1437 default:
1438 if (!ISALPHA (*p))
1439 break;
1440 if (REG_CLASS_FROM_LETTER (*p) != NO_REGS)
1441 *allows_reg = true;
1442 #ifdef EXTRA_CONSTRAINT
1443 else
1445 /* Otherwise we can't assume anything about the nature of
1446 the constraint except that it isn't purely registers.
1447 Treat it like "g" and hope for the best. */
1448 *allows_reg = true;
1449 *allows_mem = true;
1451 #endif
1452 break;
1455 return true;
1458 /* Generate RTL for an asm statement with arguments.
1459 STRING is the instruction template.
1460 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
1461 Each output or input has an expression in the TREE_VALUE and
1462 and a tree list in TREE_PURPOSE which in turn contains a constraint
1463 name in TREE_VALUE (or NULL_TREE) and a constraint string
1464 in TREE_PURPOSE.
1465 CLOBBERS is a list of STRING_CST nodes each naming a hard register
1466 that is clobbered by this insn.
1468 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
1469 Some elements of OUTPUTS may be replaced with trees representing temporary
1470 values. The caller should copy those temporary values to the originally
1471 specified lvalues.
1473 VOL nonzero means the insn is volatile; don't optimize it. */
1475 void
1476 expand_asm_operands (string, outputs, inputs, clobbers, vol, filename, line)
1477 tree string, outputs, inputs, clobbers;
1478 int vol;
1479 const char *filename;
1480 int line;
1482 rtvec argvec, constraintvec;
1483 rtx body;
1484 int ninputs = list_length (inputs);
1485 int noutputs = list_length (outputs);
1486 int ninout = 0;
1487 int nclobbers;
1488 tree tail;
1489 int i;
1490 /* Vector of RTX's of evaluated output operands. */
1491 rtx *output_rtx = (rtx *) alloca (noutputs * sizeof (rtx));
1492 int *inout_opnum = (int *) alloca (noutputs * sizeof (int));
1493 rtx *real_output_rtx = (rtx *) alloca (noutputs * sizeof (rtx));
1494 enum machine_mode *inout_mode
1495 = (enum machine_mode *) alloca (noutputs * sizeof (enum machine_mode));
1496 const char **constraints
1497 = (const char **) alloca ((noutputs + ninputs) * sizeof (const char *));
1498 /* The insn we have emitted. */
1499 rtx insn;
1500 int old_generating_concat_p = generating_concat_p;
1502 /* An ASM with no outputs needs to be treated as volatile, for now. */
1503 if (noutputs == 0)
1504 vol = 1;
1506 if (current_function_check_memory_usage)
1508 error ("`asm' cannot be used in function where memory usage is checked");
1509 return;
1512 if (! check_operand_nalternatives (outputs, inputs))
1513 return;
1515 if (! check_unique_operand_names (outputs, inputs))
1516 return;
1518 string = resolve_operand_names (string, outputs, inputs, constraints);
1520 #ifdef MD_ASM_CLOBBERS
1521 /* Sometimes we wish to automatically clobber registers across an asm.
1522 Case in point is when the i386 backend moved from cc0 to a hard reg --
1523 maintaining source-level compatibility means automatically clobbering
1524 the flags register. */
1525 MD_ASM_CLOBBERS (clobbers);
1526 #endif
1528 /* Count the number of meaningful clobbered registers, ignoring what
1529 we would ignore later. */
1530 nclobbers = 0;
1531 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1533 const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1535 i = decode_reg_name (regname);
1536 if (i >= 0 || i == -4)
1537 ++nclobbers;
1538 else if (i == -2)
1539 error ("unknown register name `%s' in `asm'", regname);
1542 last_expr_type = 0;
1544 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1546 tree val = TREE_VALUE (tail);
1547 tree type = TREE_TYPE (val);
1548 bool is_inout;
1549 bool allows_reg;
1550 bool allows_mem;
1552 /* If there's an erroneous arg, emit no insn. */
1553 if (type == error_mark_node)
1554 return;
1556 /* Make sure constraint has `=' and does not have `+'. Also, see
1557 if it allows any register. Be liberal on the latter test, since
1558 the worst that happens if we get it wrong is we issue an error
1559 message. */
1561 /* Try to parse the output constraint. If that fails, there's
1562 no point in going further. */
1563 if (!parse_output_constraint (&constraints[i],
1565 ninputs,
1566 noutputs,
1567 &allows_mem,
1568 &allows_reg,
1569 &is_inout))
1570 return;
1572 /* If an output operand is not a decl or indirect ref and our constraint
1573 allows a register, make a temporary to act as an intermediate.
1574 Make the asm insn write into that, then our caller will copy it to
1575 the real output operand. Likewise for promoted variables. */
1577 generating_concat_p = 0;
1579 real_output_rtx[i] = NULL_RTX;
1580 if ((TREE_CODE (val) == INDIRECT_REF
1581 && allows_mem)
1582 || (DECL_P (val)
1583 && (allows_mem || GET_CODE (DECL_RTL (val)) == REG)
1584 && ! (GET_CODE (DECL_RTL (val)) == REG
1585 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
1586 || ! allows_reg
1587 || is_inout)
1589 if (! allows_reg)
1590 mark_addressable (TREE_VALUE (tail));
1592 output_rtx[i]
1593 = expand_expr (TREE_VALUE (tail), NULL_RTX, VOIDmode,
1594 EXPAND_MEMORY_USE_WO);
1596 if (! allows_reg && GET_CODE (output_rtx[i]) != MEM)
1597 error ("output number %d not directly addressable", i);
1598 if ((! allows_mem && GET_CODE (output_rtx[i]) == MEM)
1599 || GET_CODE (output_rtx[i]) == CONCAT)
1601 real_output_rtx[i] = protect_from_queue (output_rtx[i], 1);
1602 output_rtx[i] = gen_reg_rtx (GET_MODE (output_rtx[i]));
1603 if (is_inout)
1604 emit_move_insn (output_rtx[i], real_output_rtx[i]);
1607 else
1609 output_rtx[i] = assign_temp (type, 0, 0, 1);
1610 TREE_VALUE (tail) = make_tree (type, output_rtx[i]);
1613 generating_concat_p = old_generating_concat_p;
1615 if (is_inout)
1617 inout_mode[ninout] = TYPE_MODE (TREE_TYPE (TREE_VALUE (tail)));
1618 inout_opnum[ninout++] = i;
1622 ninputs += ninout;
1623 if (ninputs + noutputs > MAX_RECOG_OPERANDS)
1625 error ("more than %d operands in `asm'", MAX_RECOG_OPERANDS);
1626 return;
1629 /* Make vectors for the expression-rtx, constraint strings,
1630 and named operands. */
1632 argvec = rtvec_alloc (ninputs);
1633 constraintvec = rtvec_alloc (ninputs);
1635 body = gen_rtx_ASM_OPERANDS ((noutputs == 0 ? VOIDmode
1636 : GET_MODE (output_rtx[0])),
1637 TREE_STRING_POINTER (string),
1638 empty_string, 0, argvec, constraintvec,
1639 filename, line);
1641 MEM_VOLATILE_P (body) = vol;
1643 /* Eval the inputs and put them into ARGVEC.
1644 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
1646 for (i = 0, tail = inputs; tail; tail = TREE_CHAIN (tail), ++i)
1648 int j;
1649 int allows_reg = 0, allows_mem = 0;
1650 const char *constraint, *orig_constraint;
1651 int c_len;
1652 rtx op;
1654 /* If there's an erroneous arg, emit no insn,
1655 because the ASM_INPUT would get VOIDmode
1656 and that could cause a crash in reload. */
1657 if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
1658 return;
1660 /* ??? Can this happen, and does the error message make any sense? */
1661 if (TREE_PURPOSE (tail) == NULL_TREE)
1663 error ("hard register `%s' listed as input operand to `asm'",
1664 TREE_STRING_POINTER (TREE_VALUE (tail)) );
1665 return;
1668 orig_constraint = constraint = constraints[i + noutputs];
1669 c_len = strlen (constraint);
1671 /* Make sure constraint has neither `=', `+', nor '&'. */
1673 for (j = 0; j < c_len; j++)
1674 switch (constraint[j])
1676 case '+': case '=': case '&':
1677 if (constraint == orig_constraint)
1679 error ("input operand constraint contains `%c'",
1680 constraint[j]);
1681 return;
1683 break;
1685 case '%':
1686 if (constraint == orig_constraint
1687 && i + 1 == ninputs - ninout)
1689 error ("`%%' constraint used with last operand");
1690 return;
1692 break;
1694 case 'V': case 'm': case 'o':
1695 allows_mem = 1;
1696 break;
1698 case '<': case '>':
1699 case '?': case '!': case '*': case '#':
1700 case 'E': case 'F': case 'G': case 'H':
1701 case 's': case 'i': case 'n':
1702 case 'I': case 'J': case 'K': case 'L': case 'M':
1703 case 'N': case 'O': case 'P': case ',':
1704 break;
1706 /* Whether or not a numeric constraint allows a register is
1707 decided by the matching constraint, and so there is no need
1708 to do anything special with them. We must handle them in
1709 the default case, so that we don't unnecessarily force
1710 operands to memory. */
1711 case '0': case '1': case '2': case '3': case '4':
1712 case '5': case '6': case '7': case '8': case '9':
1714 char *end;
1715 unsigned long match;
1717 match = strtoul (constraint + j, &end, 10);
1718 if (match >= (unsigned long) noutputs)
1720 error ("matching constraint references invalid operand number");
1721 return;
1724 /* Try and find the real constraint for this dup. Only do
1725 this if the matching constraint is the only alternative. */
1726 if (*end == '\0'
1727 && (j == 0 || (j == 1 && constraint[0] == '%')))
1729 constraint = constraints[match];
1730 c_len = strlen (constraint);
1731 j = 0;
1732 break;
1734 else
1735 j = end - constraint;
1737 /* Fall through. */
1739 case 'p': case 'r':
1740 allows_reg = 1;
1741 break;
1743 case 'g': case 'X':
1744 allows_reg = 1;
1745 allows_mem = 1;
1746 break;
1748 default:
1749 if (! ISALPHA (constraint[j]))
1751 error ("invalid punctuation `%c' in constraint",
1752 constraint[j]);
1753 return;
1755 if (REG_CLASS_FROM_LETTER (constraint[j]) != NO_REGS)
1756 allows_reg = 1;
1757 #ifdef EXTRA_CONSTRAINT
1758 else
1760 /* Otherwise we can't assume anything about the nature of
1761 the constraint except that it isn't purely registers.
1762 Treat it like "g" and hope for the best. */
1763 allows_reg = 1;
1764 allows_mem = 1;
1766 #endif
1767 break;
1770 if (! allows_reg && allows_mem)
1771 mark_addressable (TREE_VALUE (tail));
1773 op = expand_expr (TREE_VALUE (tail), NULL_RTX, VOIDmode, 0);
1775 /* Never pass a CONCAT to an ASM. */
1776 generating_concat_p = 0;
1777 if (GET_CODE (op) == CONCAT)
1778 op = force_reg (GET_MODE (op), op);
1780 if (asm_operand_ok (op, constraint) <= 0)
1782 if (allows_reg)
1783 op = force_reg (TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))), op);
1784 else if (!allows_mem)
1785 warning ("asm operand %d probably doesn't match constraints",
1786 i + noutputs);
1787 else if (CONSTANT_P (op))
1788 op = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))),
1789 op);
1790 else if (GET_CODE (op) == REG
1791 || GET_CODE (op) == SUBREG
1792 || GET_CODE (op) == ADDRESSOF
1793 || GET_CODE (op) == CONCAT)
1795 tree type = TREE_TYPE (TREE_VALUE (tail));
1796 tree qual_type = build_qualified_type (type,
1797 (TYPE_QUALS (type)
1798 | TYPE_QUAL_CONST));
1799 rtx memloc = assign_temp (qual_type, 1, 1, 1);
1801 emit_move_insn (memloc, op);
1802 op = memloc;
1805 else if (GET_CODE (op) == MEM && MEM_VOLATILE_P (op))
1806 /* We won't recognize volatile memory as available a
1807 memory_operand at this point. Ignore it. */
1809 else if (queued_subexp_p (op))
1811 else
1812 /* ??? Leave this only until we have experience with what
1813 happens in combine and elsewhere when constraints are
1814 not satisfied. */
1815 warning ("asm operand %d probably doesn't match constraints",
1816 i + noutputs);
1818 generating_concat_p = old_generating_concat_p;
1819 ASM_OPERANDS_INPUT (body, i) = op;
1821 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, i)
1822 = gen_rtx_ASM_INPUT (TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))),
1823 orig_constraint);
1826 /* Protect all the operands from the queue now that they have all been
1827 evaluated. */
1829 generating_concat_p = 0;
1831 for (i = 0; i < ninputs - ninout; i++)
1832 ASM_OPERANDS_INPUT (body, i)
1833 = protect_from_queue (ASM_OPERANDS_INPUT (body, i), 0);
1835 for (i = 0; i < noutputs; i++)
1836 output_rtx[i] = protect_from_queue (output_rtx[i], 1);
1838 /* For in-out operands, copy output rtx to input rtx. */
1839 for (i = 0; i < ninout; i++)
1841 int j = inout_opnum[i];
1842 char buffer[16];
1844 ASM_OPERANDS_INPUT (body, ninputs - ninout + i)
1845 = output_rtx[j];
1847 sprintf (buffer, "%d", j);
1848 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, ninputs - ninout + i)
1849 = gen_rtx_ASM_INPUT (inout_mode[i], ggc_alloc_string (buffer, -1));
1852 generating_concat_p = old_generating_concat_p;
1854 /* Now, for each output, construct an rtx
1855 (set OUTPUT (asm_operands INSN OUTPUTCONSTRAINT OUTPUTNUMBER
1856 ARGVEC CONSTRAINTS OPNAMES))
1857 If there is more than one, put them inside a PARALLEL. */
1859 if (noutputs == 1 && nclobbers == 0)
1861 ASM_OPERANDS_OUTPUT_CONSTRAINT (body) = constraints[0];
1862 insn = emit_insn (gen_rtx_SET (VOIDmode, output_rtx[0], body));
1865 else if (noutputs == 0 && nclobbers == 0)
1867 /* No output operands: put in a raw ASM_OPERANDS rtx. */
1868 insn = emit_insn (body);
1871 else
1873 rtx obody = body;
1874 int num = noutputs;
1876 if (num == 0)
1877 num = 1;
1879 body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
1881 /* For each output operand, store a SET. */
1882 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1884 XVECEXP (body, 0, i)
1885 = gen_rtx_SET (VOIDmode,
1886 output_rtx[i],
1887 gen_rtx_ASM_OPERANDS
1888 (GET_MODE (output_rtx[i]),
1889 TREE_STRING_POINTER (string),
1890 constraints[i], i, argvec, constraintvec,
1891 filename, line));
1893 MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
1896 /* If there are no outputs (but there are some clobbers)
1897 store the bare ASM_OPERANDS into the PARALLEL. */
1899 if (i == 0)
1900 XVECEXP (body, 0, i++) = obody;
1902 /* Store (clobber REG) for each clobbered register specified. */
1904 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1906 const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1907 int j = decode_reg_name (regname);
1909 if (j < 0)
1911 if (j == -3) /* `cc', which is not a register */
1912 continue;
1914 if (j == -4) /* `memory', don't cache memory across asm */
1916 XVECEXP (body, 0, i++)
1917 = gen_rtx_CLOBBER (VOIDmode,
1918 gen_rtx_MEM
1919 (BLKmode,
1920 gen_rtx_SCRATCH (VOIDmode)));
1921 continue;
1924 /* Ignore unknown register, error already signaled. */
1925 continue;
1928 /* Use QImode since that's guaranteed to clobber just one reg. */
1929 XVECEXP (body, 0, i++)
1930 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (QImode, j));
1933 insn = emit_insn (body);
1936 /* For any outputs that needed reloading into registers, spill them
1937 back to where they belong. */
1938 for (i = 0; i < noutputs; ++i)
1939 if (real_output_rtx[i])
1940 emit_move_insn (real_output_rtx[i], output_rtx[i]);
1942 free_temp_slots ();
1945 /* A subroutine of expand_asm_operands. Check that all operands have
1946 the same number of alternatives. Return true if so. */
1948 static bool
1949 check_operand_nalternatives (outputs, inputs)
1950 tree outputs, inputs;
1952 if (outputs || inputs)
1954 tree tmp = TREE_PURPOSE (outputs ? outputs : inputs);
1955 int nalternatives
1956 = n_occurrences (',', TREE_STRING_POINTER (TREE_VALUE (tmp)));
1957 tree next = inputs;
1959 if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
1961 error ("too many alternatives in `asm'");
1962 return false;
1965 tmp = outputs;
1966 while (tmp)
1968 const char *constraint
1969 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (tmp)));
1971 if (n_occurrences (',', constraint) != nalternatives)
1973 error ("operand constraints for `asm' differ in number of alternatives");
1974 return false;
1977 if (TREE_CHAIN (tmp))
1978 tmp = TREE_CHAIN (tmp);
1979 else
1980 tmp = next, next = 0;
1984 return true;
1987 /* A subroutine of expand_asm_operands. Check that all operand names
1988 are unique. Return true if so. We rely on the fact that these names
1989 are identifiers, and so have been canonicalized by get_identifier,
1990 so all we need are pointer comparisons. */
1992 static bool
1993 check_unique_operand_names (outputs, inputs)
1994 tree outputs, inputs;
1996 tree i, j;
1998 for (i = outputs; i ; i = TREE_CHAIN (i))
2000 tree i_name = TREE_PURPOSE (TREE_PURPOSE (i));
2001 if (! i_name)
2002 continue;
2004 for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
2005 if (i_name == TREE_PURPOSE (TREE_PURPOSE (j)))
2006 goto failure;
2009 for (i = inputs; i ; i = TREE_CHAIN (i))
2011 tree i_name = TREE_PURPOSE (TREE_PURPOSE (i));
2012 if (! i_name)
2013 continue;
2015 for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
2016 if (i_name == TREE_PURPOSE (TREE_PURPOSE (j)))
2017 goto failure;
2018 for (j = outputs; j ; j = TREE_CHAIN (j))
2019 if (i_name == TREE_PURPOSE (TREE_PURPOSE (j)))
2020 goto failure;
2023 return true;
2025 failure:
2026 error ("duplicate asm operand name '%s'",
2027 IDENTIFIER_POINTER (TREE_PURPOSE (TREE_PURPOSE (i))));
2028 return false;
2031 /* A subroutine of expand_asm_operands. Resolve the names of the operands
2032 in *POUTPUTS and *PINPUTS to numbers, and replace the name expansions in
2033 STRING and in the constraints to those numbers. */
2035 static tree
2036 resolve_operand_names (string, outputs, inputs, pconstraints)
2037 tree string;
2038 tree outputs, inputs;
2039 const char **pconstraints;
2041 char *buffer = xstrdup (TREE_STRING_POINTER (string));
2042 char *p;
2043 tree t;
2045 /* Assume that we will not need extra space to perform the substitution.
2046 This because we get to remove '[' and ']', which means we cannot have
2047 a problem until we have more than 999 operands. */
2049 p = buffer;
2050 while ((p = strchr (p, '%')) != NULL)
2052 if (*++p != '[')
2053 continue;
2054 p = resolve_operand_name_1 (p, outputs, inputs);
2057 string = build_string (strlen (buffer), buffer);
2058 free (buffer);
2060 /* Collect output constraints here because it's convenient.
2061 There should be no named operands here; this is verified
2062 in expand_asm_operand. */
2063 for (t = outputs; t ; t = TREE_CHAIN (t), pconstraints++)
2064 *pconstraints = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
2066 /* Substitute [<name>] in input constraint strings. */
2067 for (t = inputs; t ; t = TREE_CHAIN (t), pconstraints++)
2069 const char *c = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
2070 if (strchr (c, '[') == NULL)
2071 *pconstraints = c;
2072 else
2074 p = buffer = xstrdup (c);
2075 while ((p = strchr (p, '[')) != NULL)
2076 p = resolve_operand_name_1 (p, outputs, inputs);
2078 *pconstraints = ggc_alloc_string (buffer, -1);
2079 free (buffer);
2083 return string;
2086 /* A subroutine of resolve_operand_names. P points to the '[' for a
2087 potential named operand of the form [<name>]. In place, replace
2088 the name and brackets with a number. Return a pointer to the
2089 balance of the string after substitution. */
2091 static char *
2092 resolve_operand_name_1 (p, outputs, inputs)
2093 char *p;
2094 tree outputs, inputs;
2096 char *q;
2097 int op;
2098 tree t;
2099 size_t len;
2101 /* Collect the operand name. */
2102 q = strchr (p, ']');
2103 if (!q)
2105 error ("missing close brace for named operand");
2106 return strchr (p, '\0');
2108 len = q - p - 1;
2110 /* Resolve the name to a number. */
2111 for (op = 0, t = outputs; t ; t = TREE_CHAIN (t), op++)
2113 const char *c = IDENTIFIER_POINTER (TREE_PURPOSE (TREE_PURPOSE (t)));
2114 if (strncmp (c, p + 1, len) == 0 && c[len] == '\0')
2115 goto found;
2117 for (t = inputs; t ; t = TREE_CHAIN (t), op++)
2119 const char *c = IDENTIFIER_POINTER (TREE_PURPOSE (TREE_PURPOSE (t)));
2120 if (strncmp (c, p + 1, len) == 0 && c[len] == '\0')
2121 goto found;
2124 *q = '\0';
2125 error ("undefined named operand '%s'", p + 1);
2126 op = 0;
2127 found:
2129 /* Replace the name with the number. Unfortunately, not all libraries
2130 get the return value of sprintf correct, so search for the end of the
2131 generated string by hand. */
2132 sprintf (p, "%d", op);
2133 p = strchr (p, '\0');
2135 /* Verify the no extra buffer space assumption. */
2136 if (p > q)
2137 abort ();
2139 /* Shift the rest of the buffer down to fill the gap. */
2140 memmove (p, q + 1, strlen (q + 1) + 1);
2142 return p;
2145 /* Generate RTL to evaluate the expression EXP
2146 and remember it in case this is the VALUE in a ({... VALUE; }) constr. */
2148 void
2149 expand_expr_stmt (exp)
2150 tree exp;
2152 /* If -W, warn about statements with no side effects,
2153 except for an explicit cast to void (e.g. for assert()), and
2154 except inside a ({...}) where they may be useful. */
2155 if (expr_stmts_for_value == 0 && exp != error_mark_node)
2157 if (! TREE_SIDE_EFFECTS (exp))
2159 if ((extra_warnings || warn_unused_value)
2160 && !(TREE_CODE (exp) == CONVERT_EXPR
2161 && VOID_TYPE_P (TREE_TYPE (exp))))
2162 warning_with_file_and_line (emit_filename, emit_lineno,
2163 "statement with no effect");
2165 else if (warn_unused_value)
2166 warn_if_unused_value (exp);
2169 /* If EXP is of function type and we are expanding statements for
2170 value, convert it to pointer-to-function. */
2171 if (expr_stmts_for_value && TREE_CODE (TREE_TYPE (exp)) == FUNCTION_TYPE)
2172 exp = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (exp)), exp);
2174 /* The call to `expand_expr' could cause last_expr_type and
2175 last_expr_value to get reset. Therefore, we set last_expr_value
2176 and last_expr_type *after* calling expand_expr. */
2177 last_expr_value = expand_expr (exp,
2178 (expr_stmts_for_value
2179 ? NULL_RTX : const0_rtx),
2180 VOIDmode, 0);
2181 last_expr_type = TREE_TYPE (exp);
2183 /* If all we do is reference a volatile value in memory,
2184 copy it to a register to be sure it is actually touched. */
2185 if (last_expr_value != 0 && GET_CODE (last_expr_value) == MEM
2186 && TREE_THIS_VOLATILE (exp))
2188 if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode)
2190 else if (TYPE_MODE (TREE_TYPE (exp)) != BLKmode)
2191 copy_to_reg (last_expr_value);
2192 else
2194 rtx lab = gen_label_rtx ();
2196 /* Compare the value with itself to reference it. */
2197 emit_cmp_and_jump_insns (last_expr_value, last_expr_value, EQ,
2198 expand_expr (TYPE_SIZE (last_expr_type),
2199 NULL_RTX, VOIDmode, 0),
2200 BLKmode, 0,
2201 TYPE_ALIGN (last_expr_type) / BITS_PER_UNIT,
2202 lab);
2203 emit_label (lab);
2207 /* If this expression is part of a ({...}) and is in memory, we may have
2208 to preserve temporaries. */
2209 preserve_temp_slots (last_expr_value);
2211 /* Free any temporaries used to evaluate this expression. Any temporary
2212 used as a result of this expression will already have been preserved
2213 above. */
2214 free_temp_slots ();
2216 emit_queue ();
2219 /* Warn if EXP contains any computations whose results are not used.
2220 Return 1 if a warning is printed; 0 otherwise. */
2223 warn_if_unused_value (exp)
2224 tree exp;
2226 if (TREE_USED (exp))
2227 return 0;
2229 /* Don't warn about void constructs. This includes casting to void,
2230 void function calls, and statement expressions with a final cast
2231 to void. */
2232 if (VOID_TYPE_P (TREE_TYPE (exp)))
2233 return 0;
2235 /* If this is an expression with side effects, don't warn. */
2236 if (TREE_SIDE_EFFECTS (exp))
2237 return 0;
2239 switch (TREE_CODE (exp))
2241 case PREINCREMENT_EXPR:
2242 case POSTINCREMENT_EXPR:
2243 case PREDECREMENT_EXPR:
2244 case POSTDECREMENT_EXPR:
2245 case MODIFY_EXPR:
2246 case INIT_EXPR:
2247 case TARGET_EXPR:
2248 case CALL_EXPR:
2249 case METHOD_CALL_EXPR:
2250 case RTL_EXPR:
2251 case TRY_CATCH_EXPR:
2252 case WITH_CLEANUP_EXPR:
2253 case EXIT_EXPR:
2254 return 0;
2256 case BIND_EXPR:
2257 /* For a binding, warn if no side effect within it. */
2258 return warn_if_unused_value (TREE_OPERAND (exp, 1));
2260 case SAVE_EXPR:
2261 return warn_if_unused_value (TREE_OPERAND (exp, 1));
2263 case TRUTH_ORIF_EXPR:
2264 case TRUTH_ANDIF_EXPR:
2265 /* In && or ||, warn if 2nd operand has no side effect. */
2266 return warn_if_unused_value (TREE_OPERAND (exp, 1));
2268 case COMPOUND_EXPR:
2269 if (TREE_NO_UNUSED_WARNING (exp))
2270 return 0;
2271 if (warn_if_unused_value (TREE_OPERAND (exp, 0)))
2272 return 1;
2273 /* Let people do `(foo (), 0)' without a warning. */
2274 if (TREE_CONSTANT (TREE_OPERAND (exp, 1)))
2275 return 0;
2276 return warn_if_unused_value (TREE_OPERAND (exp, 1));
2278 case NOP_EXPR:
2279 case CONVERT_EXPR:
2280 case NON_LVALUE_EXPR:
2281 /* Don't warn about conversions not explicit in the user's program. */
2282 if (TREE_NO_UNUSED_WARNING (exp))
2283 return 0;
2284 /* Assignment to a cast usually results in a cast of a modify.
2285 Don't complain about that. There can be an arbitrary number of
2286 casts before the modify, so we must loop until we find the first
2287 non-cast expression and then test to see if that is a modify. */
2289 tree tem = TREE_OPERAND (exp, 0);
2291 while (TREE_CODE (tem) == CONVERT_EXPR || TREE_CODE (tem) == NOP_EXPR)
2292 tem = TREE_OPERAND (tem, 0);
2294 if (TREE_CODE (tem) == MODIFY_EXPR || TREE_CODE (tem) == INIT_EXPR
2295 || TREE_CODE (tem) == CALL_EXPR)
2296 return 0;
2298 goto warn;
2300 case INDIRECT_REF:
2301 /* Don't warn about automatic dereferencing of references, since
2302 the user cannot control it. */
2303 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == REFERENCE_TYPE)
2304 return warn_if_unused_value (TREE_OPERAND (exp, 0));
2305 /* Fall through. */
2307 default:
2308 /* Referencing a volatile value is a side effect, so don't warn. */
2309 if ((DECL_P (exp)
2310 || TREE_CODE_CLASS (TREE_CODE (exp)) == 'r')
2311 && TREE_THIS_VOLATILE (exp))
2312 return 0;
2314 /* If this is an expression which has no operands, there is no value
2315 to be unused. There are no such language-independent codes,
2316 but front ends may define such. */
2317 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'e'
2318 && TREE_CODE_LENGTH (TREE_CODE (exp)) == 0)
2319 return 0;
2321 warn:
2322 warning_with_file_and_line (emit_filename, emit_lineno,
2323 "value computed is not used");
2324 return 1;
2328 /* Clear out the memory of the last expression evaluated. */
2330 void
2331 clear_last_expr ()
2333 last_expr_type = 0;
2336 /* Begin a statement which will return a value.
2337 Return the RTL_EXPR for this statement expr.
2338 The caller must save that value and pass it to expand_end_stmt_expr. */
2340 tree
2341 expand_start_stmt_expr ()
2343 tree t;
2345 /* Make the RTL_EXPR node temporary, not momentary,
2346 so that rtl_expr_chain doesn't become garbage. */
2347 t = make_node (RTL_EXPR);
2348 do_pending_stack_adjust ();
2349 start_sequence_for_rtl_expr (t);
2350 NO_DEFER_POP;
2351 expr_stmts_for_value++;
2352 return t;
2355 /* Restore the previous state at the end of a statement that returns a value.
2356 Returns a tree node representing the statement's value and the
2357 insns to compute the value.
2359 The nodes of that expression have been freed by now, so we cannot use them.
2360 But we don't want to do that anyway; the expression has already been
2361 evaluated and now we just want to use the value. So generate a RTL_EXPR
2362 with the proper type and RTL value.
2364 If the last substatement was not an expression,
2365 return something with type `void'. */
2367 tree
2368 expand_end_stmt_expr (t)
2369 tree t;
2371 OK_DEFER_POP;
2373 if (last_expr_type == 0)
2375 last_expr_type = void_type_node;
2376 last_expr_value = const0_rtx;
2378 else if (last_expr_value == 0)
2379 /* There are some cases where this can happen, such as when the
2380 statement is void type. */
2381 last_expr_value = const0_rtx;
2382 else if (GET_CODE (last_expr_value) != REG && ! CONSTANT_P (last_expr_value))
2383 /* Remove any possible QUEUED. */
2384 last_expr_value = protect_from_queue (last_expr_value, 0);
2386 emit_queue ();
2388 TREE_TYPE (t) = last_expr_type;
2389 RTL_EXPR_RTL (t) = last_expr_value;
2390 RTL_EXPR_SEQUENCE (t) = get_insns ();
2392 rtl_expr_chain = tree_cons (NULL_TREE, t, rtl_expr_chain);
2394 end_sequence ();
2396 /* Don't consider deleting this expr or containing exprs at tree level. */
2397 TREE_SIDE_EFFECTS (t) = 1;
2398 /* Propagate volatility of the actual RTL expr. */
2399 TREE_THIS_VOLATILE (t) = volatile_refs_p (last_expr_value);
2401 last_expr_type = 0;
2402 expr_stmts_for_value--;
2404 return t;
2407 /* Generate RTL for the start of an if-then. COND is the expression
2408 whose truth should be tested.
2410 If EXITFLAG is nonzero, this conditional is visible to
2411 `exit_something'. */
2413 void
2414 expand_start_cond (cond, exitflag)
2415 tree cond;
2416 int exitflag;
2418 struct nesting *thiscond = ALLOC_NESTING ();
2420 /* Make an entry on cond_stack for the cond we are entering. */
2422 thiscond->next = cond_stack;
2423 thiscond->all = nesting_stack;
2424 thiscond->depth = ++nesting_depth;
2425 thiscond->data.cond.next_label = gen_label_rtx ();
2426 /* Before we encounter an `else', we don't need a separate exit label
2427 unless there are supposed to be exit statements
2428 to exit this conditional. */
2429 thiscond->exit_label = exitflag ? gen_label_rtx () : 0;
2430 thiscond->data.cond.endif_label = thiscond->exit_label;
2431 cond_stack = thiscond;
2432 nesting_stack = thiscond;
2434 do_jump (cond, thiscond->data.cond.next_label, NULL_RTX);
2437 /* Generate RTL between then-clause and the elseif-clause
2438 of an if-then-elseif-.... */
2440 void
2441 expand_start_elseif (cond)
2442 tree cond;
2444 if (cond_stack->data.cond.endif_label == 0)
2445 cond_stack->data.cond.endif_label = gen_label_rtx ();
2446 emit_jump (cond_stack->data.cond.endif_label);
2447 emit_label (cond_stack->data.cond.next_label);
2448 cond_stack->data.cond.next_label = gen_label_rtx ();
2449 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
2452 /* Generate RTL between the then-clause and the else-clause
2453 of an if-then-else. */
2455 void
2456 expand_start_else ()
2458 if (cond_stack->data.cond.endif_label == 0)
2459 cond_stack->data.cond.endif_label = gen_label_rtx ();
2461 emit_jump (cond_stack->data.cond.endif_label);
2462 emit_label (cond_stack->data.cond.next_label);
2463 cond_stack->data.cond.next_label = 0; /* No more _else or _elseif calls. */
2466 /* After calling expand_start_else, turn this "else" into an "else if"
2467 by providing another condition. */
2469 void
2470 expand_elseif (cond)
2471 tree cond;
2473 cond_stack->data.cond.next_label = gen_label_rtx ();
2474 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
2477 /* Generate RTL for the end of an if-then.
2478 Pop the record for it off of cond_stack. */
2480 void
2481 expand_end_cond ()
2483 struct nesting *thiscond = cond_stack;
2485 do_pending_stack_adjust ();
2486 if (thiscond->data.cond.next_label)
2487 emit_label (thiscond->data.cond.next_label);
2488 if (thiscond->data.cond.endif_label)
2489 emit_label (thiscond->data.cond.endif_label);
2491 POPSTACK (cond_stack);
2492 last_expr_type = 0;
2495 /* Generate RTL for the start of a loop. EXIT_FLAG is nonzero if this
2496 loop should be exited by `exit_something'. This is a loop for which
2497 `expand_continue' will jump to the top of the loop.
2499 Make an entry on loop_stack to record the labels associated with
2500 this loop. */
2502 struct nesting *
2503 expand_start_loop (exit_flag)
2504 int exit_flag;
2506 struct nesting *thisloop = ALLOC_NESTING ();
2508 /* Make an entry on loop_stack for the loop we are entering. */
2510 thisloop->next = loop_stack;
2511 thisloop->all = nesting_stack;
2512 thisloop->depth = ++nesting_depth;
2513 thisloop->data.loop.start_label = gen_label_rtx ();
2514 thisloop->data.loop.end_label = gen_label_rtx ();
2515 thisloop->data.loop.alt_end_label = 0;
2516 thisloop->data.loop.continue_label = thisloop->data.loop.start_label;
2517 thisloop->exit_label = exit_flag ? thisloop->data.loop.end_label : 0;
2518 loop_stack = thisloop;
2519 nesting_stack = thisloop;
2521 do_pending_stack_adjust ();
2522 emit_queue ();
2523 emit_note (NULL, NOTE_INSN_LOOP_BEG);
2524 emit_label (thisloop->data.loop.start_label);
2526 return thisloop;
2529 /* Like expand_start_loop but for a loop where the continuation point
2530 (for expand_continue_loop) will be specified explicitly. */
2532 struct nesting *
2533 expand_start_loop_continue_elsewhere (exit_flag)
2534 int exit_flag;
2536 struct nesting *thisloop = expand_start_loop (exit_flag);
2537 loop_stack->data.loop.continue_label = gen_label_rtx ();
2538 return thisloop;
2541 /* Begin a null, aka do { } while (0) "loop". But since the contents
2542 of said loop can still contain a break, we must frob the loop nest. */
2544 struct nesting *
2545 expand_start_null_loop ()
2547 struct nesting *thisloop = ALLOC_NESTING ();
2549 /* Make an entry on loop_stack for the loop we are entering. */
2551 thisloop->next = loop_stack;
2552 thisloop->all = nesting_stack;
2553 thisloop->depth = ++nesting_depth;
2554 thisloop->data.loop.start_label = emit_note (NULL, NOTE_INSN_DELETED);
2555 thisloop->data.loop.end_label = gen_label_rtx ();
2556 thisloop->data.loop.alt_end_label = NULL_RTX;
2557 thisloop->data.loop.continue_label = thisloop->data.loop.end_label;
2558 thisloop->exit_label = thisloop->data.loop.end_label;
2559 loop_stack = thisloop;
2560 nesting_stack = thisloop;
2562 return thisloop;
2565 /* Specify the continuation point for a loop started with
2566 expand_start_loop_continue_elsewhere.
2567 Use this at the point in the code to which a continue statement
2568 should jump. */
2570 void
2571 expand_loop_continue_here ()
2573 do_pending_stack_adjust ();
2574 emit_note (NULL, NOTE_INSN_LOOP_CONT);
2575 emit_label (loop_stack->data.loop.continue_label);
2578 /* Finish a loop. Generate a jump back to the top and the loop-exit label.
2579 Pop the block off of loop_stack. */
2581 void
2582 expand_end_loop ()
2584 rtx start_label = loop_stack->data.loop.start_label;
2585 rtx insn = get_last_insn ();
2586 int needs_end_jump = 1;
2588 /* Mark the continue-point at the top of the loop if none elsewhere. */
2589 if (start_label == loop_stack->data.loop.continue_label)
2590 emit_note_before (NOTE_INSN_LOOP_CONT, start_label);
2592 do_pending_stack_adjust ();
2594 /* If optimizing, perhaps reorder the loop.
2595 First, try to use a condjump near the end.
2596 expand_exit_loop_if_false ends loops with unconditional jumps,
2597 like this:
2599 if (test) goto label;
2600 optional: cleanup
2601 goto loop_stack->data.loop.end_label
2602 barrier
2603 label:
2605 If we find such a pattern, we can end the loop earlier. */
2607 if (optimize
2608 && GET_CODE (insn) == CODE_LABEL
2609 && LABEL_NAME (insn) == NULL
2610 && GET_CODE (PREV_INSN (insn)) == BARRIER)
2612 rtx label = insn;
2613 rtx jump = PREV_INSN (PREV_INSN (label));
2615 if (GET_CODE (jump) == JUMP_INSN
2616 && GET_CODE (PATTERN (jump)) == SET
2617 && SET_DEST (PATTERN (jump)) == pc_rtx
2618 && GET_CODE (SET_SRC (PATTERN (jump))) == LABEL_REF
2619 && (XEXP (SET_SRC (PATTERN (jump)), 0)
2620 == loop_stack->data.loop.end_label))
2622 rtx prev;
2624 /* The test might be complex and reference LABEL multiple times,
2625 like the loop in loop_iterations to set vtop. To handle this,
2626 we move LABEL. */
2627 insn = PREV_INSN (label);
2628 reorder_insns (label, label, start_label);
2630 for (prev = PREV_INSN (jump);; prev = PREV_INSN (prev))
2632 /* We ignore line number notes, but if we see any other note,
2633 in particular NOTE_INSN_BLOCK_*, NOTE_INSN_EH_REGION_*,
2634 NOTE_INSN_LOOP_*, we disable this optimization. */
2635 if (GET_CODE (prev) == NOTE)
2637 if (NOTE_LINE_NUMBER (prev) < 0)
2638 break;
2639 continue;
2641 if (GET_CODE (prev) == CODE_LABEL)
2642 break;
2643 if (GET_CODE (prev) == JUMP_INSN)
2645 if (GET_CODE (PATTERN (prev)) == SET
2646 && SET_DEST (PATTERN (prev)) == pc_rtx
2647 && GET_CODE (SET_SRC (PATTERN (prev))) == IF_THEN_ELSE
2648 && (GET_CODE (XEXP (SET_SRC (PATTERN (prev)), 1))
2649 == LABEL_REF)
2650 && XEXP (XEXP (SET_SRC (PATTERN (prev)), 1), 0) == label)
2652 XEXP (XEXP (SET_SRC (PATTERN (prev)), 1), 0)
2653 = start_label;
2654 emit_note_after (NOTE_INSN_LOOP_END, prev);
2655 needs_end_jump = 0;
2657 break;
2663 /* If the loop starts with a loop exit, roll that to the end where
2664 it will optimize together with the jump back.
2666 We look for the conditional branch to the exit, except that once
2667 we find such a branch, we don't look past 30 instructions.
2669 In more detail, if the loop presently looks like this (in pseudo-C):
2671 start_label:
2672 if (test) goto end_label;
2673 body;
2674 goto start_label;
2675 end_label:
2677 transform it to look like:
2679 goto start_label;
2680 newstart_label:
2681 body;
2682 start_label:
2683 if (test) goto end_label;
2684 goto newstart_label;
2685 end_label:
2687 Here, the `test' may actually consist of some reasonably complex
2688 code, terminating in a test. */
2690 if (optimize
2691 && needs_end_jump
2693 ! (GET_CODE (insn) == JUMP_INSN
2694 && GET_CODE (PATTERN (insn)) == SET
2695 && SET_DEST (PATTERN (insn)) == pc_rtx
2696 && GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE))
2698 int eh_regions = 0;
2699 int num_insns = 0;
2700 rtx last_test_insn = NULL_RTX;
2702 /* Scan insns from the top of the loop looking for a qualified
2703 conditional exit. */
2704 for (insn = NEXT_INSN (loop_stack->data.loop.start_label); insn;
2705 insn = NEXT_INSN (insn))
2707 if (GET_CODE (insn) == NOTE)
2709 if (optimize < 2
2710 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2711 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
2712 /* The code that actually moves the exit test will
2713 carefully leave BLOCK notes in their original
2714 location. That means, however, that we can't debug
2715 the exit test itself. So, we refuse to move code
2716 containing BLOCK notes at low optimization levels. */
2717 break;
2719 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
2720 ++eh_regions;
2721 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END)
2723 --eh_regions;
2724 if (eh_regions < 0)
2725 /* We've come to the end of an EH region, but
2726 never saw the beginning of that region. That
2727 means that an EH region begins before the top
2728 of the loop, and ends in the middle of it. The
2729 existence of such a situation violates a basic
2730 assumption in this code, since that would imply
2731 that even when EH_REGIONS is zero, we might
2732 move code out of an exception region. */
2733 abort ();
2736 /* We must not walk into a nested loop. */
2737 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
2738 break;
2740 /* We already know this INSN is a NOTE, so there's no
2741 point in looking at it to see if it's a JUMP. */
2742 continue;
2745 if (GET_CODE (insn) == JUMP_INSN || GET_CODE (insn) == INSN)
2746 num_insns++;
2748 if (last_test_insn && num_insns > 30)
2749 break;
2751 if (eh_regions > 0)
2752 /* We don't want to move a partial EH region. Consider:
2754 while ( ( { try {
2755 if (cond ()) 0;
2756 else {
2757 bar();
2760 } catch (...) {
2762 } )) {
2763 body;
2766 This isn't legal C++, but here's what it's supposed to
2767 mean: if cond() is true, stop looping. Otherwise,
2768 call bar, and keep looping. In addition, if cond
2769 throws an exception, catch it and keep looping. Such
2770 constructs are certainy legal in LISP.
2772 We should not move the `if (cond()) 0' test since then
2773 the EH-region for the try-block would be broken up.
2774 (In this case we would the EH_BEG note for the `try'
2775 and `if cond()' but not the call to bar() or the
2776 EH_END note.)
2778 So we don't look for tests within an EH region. */
2779 continue;
2781 if (GET_CODE (insn) == JUMP_INSN
2782 && GET_CODE (PATTERN (insn)) == SET
2783 && SET_DEST (PATTERN (insn)) == pc_rtx)
2785 /* This is indeed a jump. */
2786 rtx dest1 = NULL_RTX;
2787 rtx dest2 = NULL_RTX;
2788 rtx potential_last_test;
2789 if (GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE)
2791 /* A conditional jump. */
2792 dest1 = XEXP (SET_SRC (PATTERN (insn)), 1);
2793 dest2 = XEXP (SET_SRC (PATTERN (insn)), 2);
2794 potential_last_test = insn;
2796 else
2798 /* An unconditional jump. */
2799 dest1 = SET_SRC (PATTERN (insn));
2800 /* Include the BARRIER after the JUMP. */
2801 potential_last_test = NEXT_INSN (insn);
2804 do {
2805 if (dest1 && GET_CODE (dest1) == LABEL_REF
2806 && ((XEXP (dest1, 0)
2807 == loop_stack->data.loop.alt_end_label)
2808 || (XEXP (dest1, 0)
2809 == loop_stack->data.loop.end_label)))
2811 last_test_insn = potential_last_test;
2812 break;
2815 /* If this was a conditional jump, there may be
2816 another label at which we should look. */
2817 dest1 = dest2;
2818 dest2 = NULL_RTX;
2819 } while (dest1);
2823 if (last_test_insn != 0 && last_test_insn != get_last_insn ())
2825 /* We found one. Move everything from there up
2826 to the end of the loop, and add a jump into the loop
2827 to jump to there. */
2828 rtx newstart_label = gen_label_rtx ();
2829 rtx start_move = start_label;
2830 rtx next_insn;
2832 /* If the start label is preceded by a NOTE_INSN_LOOP_CONT note,
2833 then we want to move this note also. */
2834 if (GET_CODE (PREV_INSN (start_move)) == NOTE
2835 && (NOTE_LINE_NUMBER (PREV_INSN (start_move))
2836 == NOTE_INSN_LOOP_CONT))
2837 start_move = PREV_INSN (start_move);
2839 emit_label_after (newstart_label, PREV_INSN (start_move));
2841 /* Actually move the insns. Start at the beginning, and
2842 keep copying insns until we've copied the
2843 last_test_insn. */
2844 for (insn = start_move; insn; insn = next_insn)
2846 /* Figure out which insn comes after this one. We have
2847 to do this before we move INSN. */
2848 if (insn == last_test_insn)
2849 /* We've moved all the insns. */
2850 next_insn = NULL_RTX;
2851 else
2852 next_insn = NEXT_INSN (insn);
2854 if (GET_CODE (insn) == NOTE
2855 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2856 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
2857 /* We don't want to move NOTE_INSN_BLOCK_BEGs or
2858 NOTE_INSN_BLOCK_ENDs because the correct generation
2859 of debugging information depends on these appearing
2860 in the same order in the RTL and in the tree
2861 structure, where they are represented as BLOCKs.
2862 So, we don't move block notes. Of course, moving
2863 the code inside the block is likely to make it
2864 impossible to debug the instructions in the exit
2865 test, but such is the price of optimization. */
2866 continue;
2868 /* Move the INSN. */
2869 reorder_insns (insn, insn, get_last_insn ());
2872 emit_jump_insn_after (gen_jump (start_label),
2873 PREV_INSN (newstart_label));
2874 emit_barrier_after (PREV_INSN (newstart_label));
2875 start_label = newstart_label;
2879 if (needs_end_jump)
2881 emit_jump (start_label);
2882 emit_note (NULL, NOTE_INSN_LOOP_END);
2884 emit_label (loop_stack->data.loop.end_label);
2886 POPSTACK (loop_stack);
2888 last_expr_type = 0;
2891 /* Finish a null loop, aka do { } while (0). */
2893 void
2894 expand_end_null_loop ()
2896 do_pending_stack_adjust ();
2897 emit_label (loop_stack->data.loop.end_label);
2899 POPSTACK (loop_stack);
2901 last_expr_type = 0;
2904 /* Generate a jump to the current loop's continue-point.
2905 This is usually the top of the loop, but may be specified
2906 explicitly elsewhere. If not currently inside a loop,
2907 return 0 and do nothing; caller will print an error message. */
2910 expand_continue_loop (whichloop)
2911 struct nesting *whichloop;
2913 last_expr_type = 0;
2914 if (whichloop == 0)
2915 whichloop = loop_stack;
2916 if (whichloop == 0)
2917 return 0;
2918 expand_goto_internal (NULL_TREE, whichloop->data.loop.continue_label,
2919 NULL_RTX);
2920 return 1;
2923 /* Generate a jump to exit the current loop. If not currently inside a loop,
2924 return 0 and do nothing; caller will print an error message. */
2927 expand_exit_loop (whichloop)
2928 struct nesting *whichloop;
2930 last_expr_type = 0;
2931 if (whichloop == 0)
2932 whichloop = loop_stack;
2933 if (whichloop == 0)
2934 return 0;
2935 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label, NULL_RTX);
2936 return 1;
2939 /* Generate a conditional jump to exit the current loop if COND
2940 evaluates to zero. If not currently inside a loop,
2941 return 0 and do nothing; caller will print an error message. */
2944 expand_exit_loop_if_false (whichloop, cond)
2945 struct nesting *whichloop;
2946 tree cond;
2948 rtx label = gen_label_rtx ();
2949 rtx last_insn;
2950 last_expr_type = 0;
2952 if (whichloop == 0)
2953 whichloop = loop_stack;
2954 if (whichloop == 0)
2955 return 0;
2956 /* In order to handle fixups, we actually create a conditional jump
2957 around an unconditional branch to exit the loop. If fixups are
2958 necessary, they go before the unconditional branch. */
2960 do_jump (cond, NULL_RTX, label);
2961 last_insn = get_last_insn ();
2962 if (GET_CODE (last_insn) == CODE_LABEL)
2963 whichloop->data.loop.alt_end_label = last_insn;
2964 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label,
2965 NULL_RTX);
2966 emit_label (label);
2968 return 1;
2971 /* Return nonzero if the loop nest is empty. Else return zero. */
2974 stmt_loop_nest_empty ()
2976 /* cfun->stmt can be NULL if we are building a call to get the
2977 EH context for a setjmp/longjmp EH target and the current
2978 function was a deferred inline function. */
2979 return (cfun->stmt == NULL || loop_stack == NULL);
2982 /* Return non-zero if we should preserve sub-expressions as separate
2983 pseudos. We never do so if we aren't optimizing. We always do so
2984 if -fexpensive-optimizations.
2986 Otherwise, we only do so if we are in the "early" part of a loop. I.e.,
2987 the loop may still be a small one. */
2990 preserve_subexpressions_p ()
2992 rtx insn;
2994 if (flag_expensive_optimizations)
2995 return 1;
2997 if (optimize == 0 || cfun == 0 || cfun->stmt == 0 || loop_stack == 0)
2998 return 0;
3000 insn = get_last_insn_anywhere ();
3002 return (insn
3003 && (INSN_UID (insn) - INSN_UID (loop_stack->data.loop.start_label)
3004 < n_non_fixed_regs * 3));
3008 /* Generate a jump to exit the current loop, conditional, binding contour
3009 or case statement. Not all such constructs are visible to this function,
3010 only those started with EXIT_FLAG nonzero. Individual languages use
3011 the EXIT_FLAG parameter to control which kinds of constructs you can
3012 exit this way.
3014 If not currently inside anything that can be exited,
3015 return 0 and do nothing; caller will print an error message. */
3018 expand_exit_something ()
3020 struct nesting *n;
3021 last_expr_type = 0;
3022 for (n = nesting_stack; n; n = n->all)
3023 if (n->exit_label != 0)
3025 expand_goto_internal (NULL_TREE, n->exit_label, NULL_RTX);
3026 return 1;
3029 return 0;
3032 /* Generate RTL to return from the current function, with no value.
3033 (That is, we do not do anything about returning any value.) */
3035 void
3036 expand_null_return ()
3038 rtx last_insn = get_last_insn ();
3040 /* If this function was declared to return a value, but we
3041 didn't, clobber the return registers so that they are not
3042 propogated live to the rest of the function. */
3043 clobber_return_register ();
3045 expand_null_return_1 (last_insn);
3048 /* Generate RTL to return from the current function, with value VAL. */
3050 static void
3051 expand_value_return (val)
3052 rtx val;
3054 rtx last_insn = get_last_insn ();
3055 rtx return_reg = DECL_RTL (DECL_RESULT (current_function_decl));
3057 /* Copy the value to the return location
3058 unless it's already there. */
3060 if (return_reg != val)
3062 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
3063 #ifdef PROMOTE_FUNCTION_RETURN
3064 int unsignedp = TREE_UNSIGNED (type);
3065 enum machine_mode old_mode
3066 = DECL_MODE (DECL_RESULT (current_function_decl));
3067 enum machine_mode mode
3068 = promote_mode (type, old_mode, &unsignedp, 1);
3070 if (mode != old_mode)
3071 val = convert_modes (mode, old_mode, val, unsignedp);
3072 #endif
3073 if (GET_CODE (return_reg) == PARALLEL)
3074 emit_group_load (return_reg, val, int_size_in_bytes (type),
3075 TYPE_ALIGN (type));
3076 else
3077 emit_move_insn (return_reg, val);
3080 expand_null_return_1 (last_insn);
3083 /* Output a return with no value. If LAST_INSN is nonzero,
3084 pretend that the return takes place after LAST_INSN. */
3086 static void
3087 expand_null_return_1 (last_insn)
3088 rtx last_insn;
3090 rtx end_label = cleanup_label ? cleanup_label : return_label;
3092 clear_pending_stack_adjust ();
3093 do_pending_stack_adjust ();
3094 last_expr_type = 0;
3096 if (end_label == 0)
3097 end_label = return_label = gen_label_rtx ();
3098 expand_goto_internal (NULL_TREE, end_label, last_insn);
3101 /* Generate RTL to evaluate the expression RETVAL and return it
3102 from the current function. */
3104 void
3105 expand_return (retval)
3106 tree retval;
3108 /* If there are any cleanups to be performed, then they will
3109 be inserted following LAST_INSN. It is desirable
3110 that the last_insn, for such purposes, should be the
3111 last insn before computing the return value. Otherwise, cleanups
3112 which call functions can clobber the return value. */
3113 /* ??? rms: I think that is erroneous, because in C++ it would
3114 run destructors on variables that might be used in the subsequent
3115 computation of the return value. */
3116 rtx last_insn = 0;
3117 rtx result_rtl;
3118 rtx val = 0;
3119 tree retval_rhs;
3121 /* If function wants no value, give it none. */
3122 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
3124 expand_expr (retval, NULL_RTX, VOIDmode, 0);
3125 emit_queue ();
3126 expand_null_return ();
3127 return;
3130 if (retval == error_mark_node)
3132 /* Treat this like a return of no value from a function that
3133 returns a value. */
3134 expand_null_return ();
3135 return;
3137 else if (TREE_CODE (retval) == RESULT_DECL)
3138 retval_rhs = retval;
3139 else if ((TREE_CODE (retval) == MODIFY_EXPR || TREE_CODE (retval) == INIT_EXPR)
3140 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
3141 retval_rhs = TREE_OPERAND (retval, 1);
3142 else if (VOID_TYPE_P (TREE_TYPE (retval)))
3143 /* Recognize tail-recursive call to void function. */
3144 retval_rhs = retval;
3145 else
3146 retval_rhs = NULL_TREE;
3148 last_insn = get_last_insn ();
3150 /* Distribute return down conditional expr if either of the sides
3151 may involve tail recursion (see test below). This enhances the number
3152 of tail recursions we see. Don't do this always since it can produce
3153 sub-optimal code in some cases and we distribute assignments into
3154 conditional expressions when it would help. */
3156 if (optimize && retval_rhs != 0
3157 && frame_offset == 0
3158 && TREE_CODE (retval_rhs) == COND_EXPR
3159 && (TREE_CODE (TREE_OPERAND (retval_rhs, 1)) == CALL_EXPR
3160 || TREE_CODE (TREE_OPERAND (retval_rhs, 2)) == CALL_EXPR))
3162 rtx label = gen_label_rtx ();
3163 tree expr;
3165 do_jump (TREE_OPERAND (retval_rhs, 0), label, NULL_RTX);
3166 start_cleanup_deferral ();
3167 expr = build (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (current_function_decl)),
3168 DECL_RESULT (current_function_decl),
3169 TREE_OPERAND (retval_rhs, 1));
3170 TREE_SIDE_EFFECTS (expr) = 1;
3171 expand_return (expr);
3172 emit_label (label);
3174 expr = build (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (current_function_decl)),
3175 DECL_RESULT (current_function_decl),
3176 TREE_OPERAND (retval_rhs, 2));
3177 TREE_SIDE_EFFECTS (expr) = 1;
3178 expand_return (expr);
3179 end_cleanup_deferral ();
3180 return;
3183 result_rtl = DECL_RTL (DECL_RESULT (current_function_decl));
3185 /* If the result is an aggregate that is being returned in one (or more)
3186 registers, load the registers here. The compiler currently can't handle
3187 copying a BLKmode value into registers. We could put this code in a
3188 more general area (for use by everyone instead of just function
3189 call/return), but until this feature is generally usable it is kept here
3190 (and in expand_call). The value must go into a pseudo in case there
3191 are cleanups that will clobber the real return register. */
3193 if (retval_rhs != 0
3194 && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
3195 && GET_CODE (result_rtl) == REG)
3197 int i;
3198 unsigned HOST_WIDE_INT bitpos, xbitpos;
3199 unsigned HOST_WIDE_INT big_endian_correction = 0;
3200 unsigned HOST_WIDE_INT bytes
3201 = int_size_in_bytes (TREE_TYPE (retval_rhs));
3202 int n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
3203 unsigned int bitsize
3204 = MIN (TYPE_ALIGN (TREE_TYPE (retval_rhs)), BITS_PER_WORD);
3205 rtx *result_pseudos = (rtx *) alloca (sizeof (rtx) * n_regs);
3206 rtx result_reg, src = NULL_RTX, dst = NULL_RTX;
3207 rtx result_val = expand_expr (retval_rhs, NULL_RTX, VOIDmode, 0);
3208 enum machine_mode tmpmode, result_reg_mode;
3210 if (bytes == 0)
3212 expand_null_return ();
3213 return;
3216 /* Structures whose size is not a multiple of a word are aligned
3217 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
3218 machine, this means we must skip the empty high order bytes when
3219 calculating the bit offset. */
3220 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
3221 big_endian_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
3222 * BITS_PER_UNIT));
3224 /* Copy the structure BITSIZE bits at a time. */
3225 for (bitpos = 0, xbitpos = big_endian_correction;
3226 bitpos < bytes * BITS_PER_UNIT;
3227 bitpos += bitsize, xbitpos += bitsize)
3229 /* We need a new destination pseudo each time xbitpos is
3230 on a word boundary and when xbitpos == big_endian_correction
3231 (the first time through). */
3232 if (xbitpos % BITS_PER_WORD == 0
3233 || xbitpos == big_endian_correction)
3235 /* Generate an appropriate register. */
3236 dst = gen_reg_rtx (word_mode);
3237 result_pseudos[xbitpos / BITS_PER_WORD] = dst;
3239 /* Clobber the destination before we move anything into it. */
3240 emit_insn (gen_rtx_CLOBBER (VOIDmode, dst));
3243 /* We need a new source operand each time bitpos is on a word
3244 boundary. */
3245 if (bitpos % BITS_PER_WORD == 0)
3246 src = operand_subword_force (result_val,
3247 bitpos / BITS_PER_WORD,
3248 BLKmode);
3250 /* Use bitpos for the source extraction (left justified) and
3251 xbitpos for the destination store (right justified). */
3252 store_bit_field (dst, bitsize, xbitpos % BITS_PER_WORD, word_mode,
3253 extract_bit_field (src, bitsize,
3254 bitpos % BITS_PER_WORD, 1,
3255 NULL_RTX, word_mode, word_mode,
3256 bitsize, BITS_PER_WORD),
3257 bitsize, BITS_PER_WORD);
3260 /* Find the smallest integer mode large enough to hold the
3261 entire structure and use that mode instead of BLKmode
3262 on the USE insn for the return register. */
3263 for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3264 tmpmode != VOIDmode;
3265 tmpmode = GET_MODE_WIDER_MODE (tmpmode))
3266 /* Have we found a large enough mode? */
3267 if (GET_MODE_SIZE (tmpmode) >= bytes)
3268 break;
3270 /* No suitable mode found. */
3271 if (tmpmode == VOIDmode)
3272 abort ();
3274 PUT_MODE (result_rtl, tmpmode);
3276 if (GET_MODE_SIZE (tmpmode) < GET_MODE_SIZE (word_mode))
3277 result_reg_mode = word_mode;
3278 else
3279 result_reg_mode = tmpmode;
3280 result_reg = gen_reg_rtx (result_reg_mode);
3282 emit_queue ();
3283 for (i = 0; i < n_regs; i++)
3284 emit_move_insn (operand_subword (result_reg, i, 0, result_reg_mode),
3285 result_pseudos[i]);
3287 if (tmpmode != result_reg_mode)
3288 result_reg = gen_lowpart (tmpmode, result_reg);
3290 expand_value_return (result_reg);
3292 else if (retval_rhs != 0
3293 && !VOID_TYPE_P (TREE_TYPE (retval_rhs))
3294 && (GET_CODE (result_rtl) == REG
3295 || (GET_CODE (result_rtl) == PARALLEL)))
3297 /* Calculate the return value into a temporary (usually a pseudo
3298 reg). */
3299 tree ot = TREE_TYPE (DECL_RESULT (current_function_decl));
3300 tree nt = build_qualified_type (ot, TYPE_QUALS (ot) | TYPE_QUAL_CONST);
3302 val = assign_temp (nt, 0, 0, 1);
3303 val = expand_expr (retval_rhs, val, GET_MODE (val), 0);
3304 val = force_not_mem (val);
3305 emit_queue ();
3306 /* Return the calculated value, doing cleanups first. */
3307 expand_value_return (val);
3309 else
3311 /* No cleanups or no hard reg used;
3312 calculate value into hard return reg. */
3313 expand_expr (retval, const0_rtx, VOIDmode, 0);
3314 emit_queue ();
3315 expand_value_return (result_rtl);
3319 /* Return 1 if the end of the generated RTX is not a barrier.
3320 This means code already compiled can drop through. */
3323 drop_through_at_end_p ()
3325 rtx insn = get_last_insn ();
3326 while (insn && GET_CODE (insn) == NOTE)
3327 insn = PREV_INSN (insn);
3328 return insn && GET_CODE (insn) != BARRIER;
3331 /* Attempt to optimize a potential tail recursion call into a goto.
3332 ARGUMENTS are the arguments to a CALL_EXPR; LAST_INSN indicates
3333 where to place the jump to the tail recursion label.
3335 Return TRUE if the call was optimized into a goto. */
3338 optimize_tail_recursion (arguments, last_insn)
3339 tree arguments;
3340 rtx last_insn;
3342 /* Finish checking validity, and if valid emit code to set the
3343 argument variables for the new call. */
3344 if (tail_recursion_args (arguments, DECL_ARGUMENTS (current_function_decl)))
3346 if (tail_recursion_label == 0)
3348 tail_recursion_label = gen_label_rtx ();
3349 emit_label_after (tail_recursion_label,
3350 tail_recursion_reentry);
3352 emit_queue ();
3353 expand_goto_internal (NULL_TREE, tail_recursion_label, last_insn);
3354 emit_barrier ();
3355 return 1;
3357 return 0;
3360 /* Emit code to alter this function's formal parms for a tail-recursive call.
3361 ACTUALS is a list of actual parameter expressions (chain of TREE_LISTs).
3362 FORMALS is the chain of decls of formals.
3363 Return 1 if this can be done;
3364 otherwise return 0 and do not emit any code. */
3366 static int
3367 tail_recursion_args (actuals, formals)
3368 tree actuals, formals;
3370 tree a = actuals, f = formals;
3371 int i;
3372 rtx *argvec;
3374 /* Check that number and types of actuals are compatible
3375 with the formals. This is not always true in valid C code.
3376 Also check that no formal needs to be addressable
3377 and that all formals are scalars. */
3379 /* Also count the args. */
3381 for (a = actuals, f = formals, i = 0; a && f; a = TREE_CHAIN (a), f = TREE_CHAIN (f), i++)
3383 if (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_VALUE (a)))
3384 != TYPE_MAIN_VARIANT (TREE_TYPE (f)))
3385 return 0;
3386 if (GET_CODE (DECL_RTL (f)) != REG || DECL_MODE (f) == BLKmode)
3387 return 0;
3389 if (a != 0 || f != 0)
3390 return 0;
3392 /* Compute all the actuals. */
3394 argvec = (rtx *) alloca (i * sizeof (rtx));
3396 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
3397 argvec[i] = expand_expr (TREE_VALUE (a), NULL_RTX, VOIDmode, 0);
3399 /* Find which actual values refer to current values of previous formals.
3400 Copy each of them now, before any formal is changed. */
3402 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
3404 int copy = 0;
3405 int j;
3406 for (f = formals, j = 0; j < i; f = TREE_CHAIN (f), j++)
3407 if (reg_mentioned_p (DECL_RTL (f), argvec[i]))
3409 copy = 1;
3410 break;
3412 if (copy)
3413 argvec[i] = copy_to_reg (argvec[i]);
3416 /* Store the values of the actuals into the formals. */
3418 for (f = formals, a = actuals, i = 0; f;
3419 f = TREE_CHAIN (f), a = TREE_CHAIN (a), i++)
3421 if (GET_MODE (DECL_RTL (f)) == GET_MODE (argvec[i]))
3422 emit_move_insn (DECL_RTL (f), argvec[i]);
3423 else
3424 convert_move (DECL_RTL (f), argvec[i],
3425 TREE_UNSIGNED (TREE_TYPE (TREE_VALUE (a))));
3428 free_temp_slots ();
3429 return 1;
3432 /* Generate the RTL code for entering a binding contour.
3433 The variables are declared one by one, by calls to `expand_decl'.
3435 FLAGS is a bitwise or of the following flags:
3437 1 - Nonzero if this construct should be visible to
3438 `exit_something'.
3440 2 - Nonzero if this contour does not require a
3441 NOTE_INSN_BLOCK_BEG note. Virtually all calls from
3442 language-independent code should set this flag because they
3443 will not create corresponding BLOCK nodes. (There should be
3444 a one-to-one correspondence between NOTE_INSN_BLOCK_BEG notes
3445 and BLOCKs.) If this flag is set, MARK_ENDS should be zero
3446 when expand_end_bindings is called.
3448 If we are creating a NOTE_INSN_BLOCK_BEG note, a BLOCK may
3449 optionally be supplied. If so, it becomes the NOTE_BLOCK for the
3450 note. */
3452 void
3453 expand_start_bindings_and_block (flags, block)
3454 int flags;
3455 tree block;
3457 struct nesting *thisblock = ALLOC_NESTING ();
3458 rtx note;
3459 int exit_flag = ((flags & 1) != 0);
3460 int block_flag = ((flags & 2) == 0);
3462 /* If a BLOCK is supplied, then the caller should be requesting a
3463 NOTE_INSN_BLOCK_BEG note. */
3464 if (!block_flag && block)
3465 abort ();
3467 /* Create a note to mark the beginning of the block. */
3468 if (block_flag)
3470 note = emit_note (NULL, NOTE_INSN_BLOCK_BEG);
3471 NOTE_BLOCK (note) = block;
3473 else
3474 note = emit_note (NULL, NOTE_INSN_DELETED);
3476 /* Make an entry on block_stack for the block we are entering. */
3478 thisblock->next = block_stack;
3479 thisblock->all = nesting_stack;
3480 thisblock->depth = ++nesting_depth;
3481 thisblock->data.block.stack_level = 0;
3482 thisblock->data.block.cleanups = 0;
3483 thisblock->data.block.n_function_calls = 0;
3484 thisblock->data.block.exception_region = 0;
3485 thisblock->data.block.block_target_temp_slot_level = target_temp_slot_level;
3487 thisblock->data.block.conditional_code = 0;
3488 thisblock->data.block.last_unconditional_cleanup = note;
3489 /* When we insert instructions after the last unconditional cleanup,
3490 we don't adjust last_insn. That means that a later add_insn will
3491 clobber the instructions we've just added. The easiest way to
3492 fix this is to just insert another instruction here, so that the
3493 instructions inserted after the last unconditional cleanup are
3494 never the last instruction. */
3495 emit_note (NULL, NOTE_INSN_DELETED);
3496 thisblock->data.block.cleanup_ptr = &thisblock->data.block.cleanups;
3498 if (block_stack
3499 && !(block_stack->data.block.cleanups == NULL_TREE
3500 && block_stack->data.block.outer_cleanups == NULL_TREE))
3501 thisblock->data.block.outer_cleanups
3502 = tree_cons (NULL_TREE, block_stack->data.block.cleanups,
3503 block_stack->data.block.outer_cleanups);
3504 else
3505 thisblock->data.block.outer_cleanups = 0;
3506 thisblock->data.block.label_chain = 0;
3507 thisblock->data.block.innermost_stack_block = stack_block_stack;
3508 thisblock->data.block.first_insn = note;
3509 thisblock->data.block.block_start_count = ++current_block_start_count;
3510 thisblock->exit_label = exit_flag ? gen_label_rtx () : 0;
3511 block_stack = thisblock;
3512 nesting_stack = thisblock;
3514 /* Make a new level for allocating stack slots. */
3515 push_temp_slots ();
3518 /* Specify the scope of temporaries created by TARGET_EXPRs. Similar
3519 to CLEANUP_POINT_EXPR, but handles cases when a series of calls to
3520 expand_expr are made. After we end the region, we know that all
3521 space for all temporaries that were created by TARGET_EXPRs will be
3522 destroyed and their space freed for reuse. */
3524 void
3525 expand_start_target_temps ()
3527 /* This is so that even if the result is preserved, the space
3528 allocated will be freed, as we know that it is no longer in use. */
3529 push_temp_slots ();
3531 /* Start a new binding layer that will keep track of all cleanup
3532 actions to be performed. */
3533 expand_start_bindings (2);
3535 target_temp_slot_level = temp_slot_level;
3538 void
3539 expand_end_target_temps ()
3541 expand_end_bindings (NULL_TREE, 0, 0);
3543 /* This is so that even if the result is preserved, the space
3544 allocated will be freed, as we know that it is no longer in use. */
3545 pop_temp_slots ();
3548 /* Given a pointer to a BLOCK node return non-zero if (and only if) the node
3549 in question represents the outermost pair of curly braces (i.e. the "body
3550 block") of a function or method.
3552 For any BLOCK node representing a "body block" of a function or method, the
3553 BLOCK_SUPERCONTEXT of the node will point to another BLOCK node which
3554 represents the outermost (function) scope for the function or method (i.e.
3555 the one which includes the formal parameters). The BLOCK_SUPERCONTEXT of
3556 *that* node in turn will point to the relevant FUNCTION_DECL node. */
3559 is_body_block (stmt)
3560 tree stmt;
3562 if (TREE_CODE (stmt) == BLOCK)
3564 tree parent = BLOCK_SUPERCONTEXT (stmt);
3566 if (parent && TREE_CODE (parent) == BLOCK)
3568 tree grandparent = BLOCK_SUPERCONTEXT (parent);
3570 if (grandparent && TREE_CODE (grandparent) == FUNCTION_DECL)
3571 return 1;
3575 return 0;
3578 /* True if we are currently emitting insns in an area of output code
3579 that is controlled by a conditional expression. This is used by
3580 the cleanup handling code to generate conditional cleanup actions. */
3583 conditional_context ()
3585 return block_stack && block_stack->data.block.conditional_code;
3588 /* Return an opaque pointer to the current nesting level, so frontend code
3589 can check its own sanity. */
3591 struct nesting *
3592 current_nesting_level ()
3594 return cfun ? block_stack : 0;
3597 /* Emit a handler label for a nonlocal goto handler.
3598 Also emit code to store the handler label in SLOT before BEFORE_INSN. */
3600 static rtx
3601 expand_nl_handler_label (slot, before_insn)
3602 rtx slot, before_insn;
3604 rtx insns;
3605 rtx handler_label = gen_label_rtx ();
3607 /* Don't let cleanup_cfg delete the handler. */
3608 LABEL_PRESERVE_P (handler_label) = 1;
3610 start_sequence ();
3611 emit_move_insn (slot, gen_rtx_LABEL_REF (Pmode, handler_label));
3612 insns = get_insns ();
3613 end_sequence ();
3614 emit_insns_before (insns, before_insn);
3616 emit_label (handler_label);
3618 return handler_label;
3621 /* Emit code to restore vital registers at the beginning of a nonlocal goto
3622 handler. */
3623 static void
3624 expand_nl_goto_receiver ()
3626 #ifdef HAVE_nonlocal_goto
3627 if (! HAVE_nonlocal_goto)
3628 #endif
3629 /* First adjust our frame pointer to its actual value. It was
3630 previously set to the start of the virtual area corresponding to
3631 the stacked variables when we branched here and now needs to be
3632 adjusted to the actual hardware fp value.
3634 Assignments are to virtual registers are converted by
3635 instantiate_virtual_regs into the corresponding assignment
3636 to the underlying register (fp in this case) that makes
3637 the original assignment true.
3638 So the following insn will actually be
3639 decrementing fp by STARTING_FRAME_OFFSET. */
3640 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
3642 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3643 if (fixed_regs[ARG_POINTER_REGNUM])
3645 #ifdef ELIMINABLE_REGS
3646 /* If the argument pointer can be eliminated in favor of the
3647 frame pointer, we don't need to restore it. We assume here
3648 that if such an elimination is present, it can always be used.
3649 This is the case on all known machines; if we don't make this
3650 assumption, we do unnecessary saving on many machines. */
3651 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
3652 size_t i;
3654 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
3655 if (elim_regs[i].from == ARG_POINTER_REGNUM
3656 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
3657 break;
3659 if (i == ARRAY_SIZE (elim_regs))
3660 #endif
3662 /* Now restore our arg pointer from the address at which it
3663 was saved in our stack frame. */
3664 emit_move_insn (virtual_incoming_args_rtx,
3665 copy_to_reg (get_arg_pointer_save_area (cfun)));
3668 #endif
3670 #ifdef HAVE_nonlocal_goto_receiver
3671 if (HAVE_nonlocal_goto_receiver)
3672 emit_insn (gen_nonlocal_goto_receiver ());
3673 #endif
3676 /* Make handlers for nonlocal gotos taking place in the function calls in
3677 block THISBLOCK. */
3679 static void
3680 expand_nl_goto_receivers (thisblock)
3681 struct nesting *thisblock;
3683 tree link;
3684 rtx afterward = gen_label_rtx ();
3685 rtx insns, slot;
3686 rtx label_list;
3687 int any_invalid;
3689 /* Record the handler address in the stack slot for that purpose,
3690 during this block, saving and restoring the outer value. */
3691 if (thisblock->next != 0)
3692 for (slot = nonlocal_goto_handler_slots; slot; slot = XEXP (slot, 1))
3694 rtx save_receiver = gen_reg_rtx (Pmode);
3695 emit_move_insn (XEXP (slot, 0), save_receiver);
3697 start_sequence ();
3698 emit_move_insn (save_receiver, XEXP (slot, 0));
3699 insns = get_insns ();
3700 end_sequence ();
3701 emit_insns_before (insns, thisblock->data.block.first_insn);
3704 /* Jump around the handlers; they run only when specially invoked. */
3705 emit_jump (afterward);
3707 /* Make a separate handler for each label. */
3708 link = nonlocal_labels;
3709 slot = nonlocal_goto_handler_slots;
3710 label_list = NULL_RTX;
3711 for (; link; link = TREE_CHAIN (link), slot = XEXP (slot, 1))
3712 /* Skip any labels we shouldn't be able to jump to from here,
3713 we generate one special handler for all of them below which just calls
3714 abort. */
3715 if (! DECL_TOO_LATE (TREE_VALUE (link)))
3717 rtx lab;
3718 lab = expand_nl_handler_label (XEXP (slot, 0),
3719 thisblock->data.block.first_insn);
3720 label_list = gen_rtx_EXPR_LIST (VOIDmode, lab, label_list);
3722 expand_nl_goto_receiver ();
3724 /* Jump to the "real" nonlocal label. */
3725 expand_goto (TREE_VALUE (link));
3728 /* A second pass over all nonlocal labels; this time we handle those
3729 we should not be able to jump to at this point. */
3730 link = nonlocal_labels;
3731 slot = nonlocal_goto_handler_slots;
3732 any_invalid = 0;
3733 for (; link; link = TREE_CHAIN (link), slot = XEXP (slot, 1))
3734 if (DECL_TOO_LATE (TREE_VALUE (link)))
3736 rtx lab;
3737 lab = expand_nl_handler_label (XEXP (slot, 0),
3738 thisblock->data.block.first_insn);
3739 label_list = gen_rtx_EXPR_LIST (VOIDmode, lab, label_list);
3740 any_invalid = 1;
3743 if (any_invalid)
3745 expand_nl_goto_receiver ();
3746 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "abort"), 0,
3747 VOIDmode, 0);
3748 emit_barrier ();
3751 nonlocal_goto_handler_labels = label_list;
3752 emit_label (afterward);
3755 /* Warn about any unused VARS (which may contain nodes other than
3756 VAR_DECLs, but such nodes are ignored). The nodes are connected
3757 via the TREE_CHAIN field. */
3759 void
3760 warn_about_unused_variables (vars)
3761 tree vars;
3763 tree decl;
3765 if (warn_unused_variable)
3766 for (decl = vars; decl; decl = TREE_CHAIN (decl))
3767 if (TREE_CODE (decl) == VAR_DECL
3768 && ! TREE_USED (decl)
3769 && ! DECL_IN_SYSTEM_HEADER (decl)
3770 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
3771 warning_with_decl (decl, "unused variable `%s'");
3774 /* Generate RTL code to terminate a binding contour.
3776 VARS is the chain of VAR_DECL nodes for the variables bound in this
3777 contour. There may actually be other nodes in this chain, but any
3778 nodes other than VAR_DECLS are ignored.
3780 MARK_ENDS is nonzero if we should put a note at the beginning
3781 and end of this binding contour.
3783 DONT_JUMP_IN is nonzero if it is not valid to jump into this contour.
3784 (That is true automatically if the contour has a saved stack level.) */
3786 void
3787 expand_end_bindings (vars, mark_ends, dont_jump_in)
3788 tree vars;
3789 int mark_ends;
3790 int dont_jump_in;
3792 struct nesting *thisblock = block_stack;
3794 /* If any of the variables in this scope were not used, warn the
3795 user. */
3796 warn_about_unused_variables (vars);
3798 if (thisblock->exit_label)
3800 do_pending_stack_adjust ();
3801 emit_label (thisblock->exit_label);
3804 /* If necessary, make handlers for nonlocal gotos taking
3805 place in the function calls in this block. */
3806 if (function_call_count != thisblock->data.block.n_function_calls
3807 && nonlocal_labels
3808 /* Make handler for outermost block
3809 if there were any nonlocal gotos to this function. */
3810 && (thisblock->next == 0 ? current_function_has_nonlocal_label
3811 /* Make handler for inner block if it has something
3812 special to do when you jump out of it. */
3813 : (thisblock->data.block.cleanups != 0
3814 || thisblock->data.block.stack_level != 0)))
3815 expand_nl_goto_receivers (thisblock);
3817 /* Don't allow jumping into a block that has a stack level.
3818 Cleanups are allowed, though. */
3819 if (dont_jump_in
3820 || thisblock->data.block.stack_level != 0)
3822 struct label_chain *chain;
3824 /* Any labels in this block are no longer valid to go to.
3825 Mark them to cause an error message. */
3826 for (chain = thisblock->data.block.label_chain; chain; chain = chain->next)
3828 DECL_TOO_LATE (chain->label) = 1;
3829 /* If any goto without a fixup came to this label,
3830 that must be an error, because gotos without fixups
3831 come from outside all saved stack-levels. */
3832 if (TREE_ADDRESSABLE (chain->label))
3833 error_with_decl (chain->label,
3834 "label `%s' used before containing binding contour");
3838 /* Restore stack level in effect before the block
3839 (only if variable-size objects allocated). */
3840 /* Perform any cleanups associated with the block. */
3842 if (thisblock->data.block.stack_level != 0
3843 || thisblock->data.block.cleanups != 0)
3845 int reachable;
3846 rtx insn;
3848 /* Don't let cleanups affect ({...}) constructs. */
3849 int old_expr_stmts_for_value = expr_stmts_for_value;
3850 rtx old_last_expr_value = last_expr_value;
3851 tree old_last_expr_type = last_expr_type;
3852 expr_stmts_for_value = 0;
3854 /* Only clean up here if this point can actually be reached. */
3855 insn = get_last_insn ();
3856 if (GET_CODE (insn) == NOTE)
3857 insn = prev_nonnote_insn (insn);
3858 reachable = (! insn || GET_CODE (insn) != BARRIER);
3860 /* Do the cleanups. */
3861 expand_cleanups (thisblock->data.block.cleanups, NULL_TREE, 0, reachable);
3862 if (reachable)
3863 do_pending_stack_adjust ();
3865 expr_stmts_for_value = old_expr_stmts_for_value;
3866 last_expr_value = old_last_expr_value;
3867 last_expr_type = old_last_expr_type;
3869 /* Restore the stack level. */
3871 if (reachable && thisblock->data.block.stack_level != 0)
3873 emit_stack_restore (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3874 thisblock->data.block.stack_level, NULL_RTX);
3875 if (nonlocal_goto_handler_slots != 0)
3876 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level,
3877 NULL_RTX);
3880 /* Any gotos out of this block must also do these things.
3881 Also report any gotos with fixups that came to labels in this
3882 level. */
3883 fixup_gotos (thisblock,
3884 thisblock->data.block.stack_level,
3885 thisblock->data.block.cleanups,
3886 thisblock->data.block.first_insn,
3887 dont_jump_in);
3890 /* Mark the beginning and end of the scope if requested.
3891 We do this now, after running cleanups on the variables
3892 just going out of scope, so they are in scope for their cleanups. */
3894 if (mark_ends)
3896 rtx note = emit_note (NULL, NOTE_INSN_BLOCK_END);
3897 NOTE_BLOCK (note) = NOTE_BLOCK (thisblock->data.block.first_insn);
3899 else
3900 /* Get rid of the beginning-mark if we don't make an end-mark. */
3901 NOTE_LINE_NUMBER (thisblock->data.block.first_insn) = NOTE_INSN_DELETED;
3903 /* Restore the temporary level of TARGET_EXPRs. */
3904 target_temp_slot_level = thisblock->data.block.block_target_temp_slot_level;
3906 /* Restore block_stack level for containing block. */
3908 stack_block_stack = thisblock->data.block.innermost_stack_block;
3909 POPSTACK (block_stack);
3911 /* Pop the stack slot nesting and free any slots at this level. */
3912 pop_temp_slots ();
3915 /* Generate code to save the stack pointer at the start of the current block
3916 and set up to restore it on exit. */
3918 void
3919 save_stack_pointer ()
3921 struct nesting *thisblock = block_stack;
3923 if (thisblock->data.block.stack_level == 0)
3925 emit_stack_save (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3926 &thisblock->data.block.stack_level,
3927 thisblock->data.block.first_insn);
3928 stack_block_stack = thisblock;
3932 /* Generate RTL for the automatic variable declaration DECL.
3933 (Other kinds of declarations are simply ignored if seen here.) */
3935 void
3936 expand_decl (decl)
3937 tree decl;
3939 struct nesting *thisblock;
3940 tree type;
3942 type = TREE_TYPE (decl);
3944 /* For a CONST_DECL, set mode, alignment, and sizes from those of the
3945 type in case this node is used in a reference. */
3946 if (TREE_CODE (decl) == CONST_DECL)
3948 DECL_MODE (decl) = TYPE_MODE (type);
3949 DECL_ALIGN (decl) = TYPE_ALIGN (type);
3950 DECL_SIZE (decl) = TYPE_SIZE (type);
3951 DECL_SIZE_UNIT (decl) = TYPE_SIZE_UNIT (type);
3952 return;
3955 /* Otherwise, only automatic variables need any expansion done. Static and
3956 external variables, and external functions, will be handled by
3957 `assemble_variable' (called from finish_decl). TYPE_DECL requires
3958 nothing. PARM_DECLs are handled in `assign_parms'. */
3959 if (TREE_CODE (decl) != VAR_DECL)
3960 return;
3962 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
3963 return;
3965 thisblock = block_stack;
3967 /* Create the RTL representation for the variable. */
3969 if (type == error_mark_node)
3970 SET_DECL_RTL (decl, gen_rtx_MEM (BLKmode, const0_rtx));
3972 else if (DECL_SIZE (decl) == 0)
3973 /* Variable with incomplete type. */
3975 rtx x;
3976 if (DECL_INITIAL (decl) == 0)
3977 /* Error message was already done; now avoid a crash. */
3978 x = gen_rtx_MEM (BLKmode, const0_rtx);
3979 else
3980 /* An initializer is going to decide the size of this array.
3981 Until we know the size, represent its address with a reg. */
3982 x = gen_rtx_MEM (BLKmode, gen_reg_rtx (Pmode));
3984 set_mem_attributes (x, decl, 1);
3985 SET_DECL_RTL (decl, x);
3987 else if (DECL_MODE (decl) != BLKmode
3988 /* If -ffloat-store, don't put explicit float vars
3989 into regs. */
3990 && !(flag_float_store
3991 && TREE_CODE (type) == REAL_TYPE)
3992 && ! TREE_THIS_VOLATILE (decl)
3993 && (DECL_REGISTER (decl) || optimize)
3994 /* if -fcheck-memory-usage, check all variables. */
3995 && ! current_function_check_memory_usage)
3997 /* Automatic variable that can go in a register. */
3998 int unsignedp = TREE_UNSIGNED (type);
3999 enum machine_mode reg_mode
4000 = promote_mode (type, DECL_MODE (decl), &unsignedp, 0);
4002 SET_DECL_RTL (decl, gen_reg_rtx (reg_mode));
4004 if (GET_CODE (DECL_RTL (decl)) == REG)
4005 REGNO_DECL (REGNO (DECL_RTL (decl))) = decl;
4006 else if (GET_CODE (DECL_RTL (decl)) == CONCAT)
4008 REGNO_DECL (REGNO (XEXP (DECL_RTL (decl), 0))) = decl;
4009 REGNO_DECL (REGNO (XEXP (DECL_RTL (decl), 1))) = decl;
4012 mark_user_reg (DECL_RTL (decl));
4014 if (POINTER_TYPE_P (type))
4015 mark_reg_pointer (DECL_RTL (decl),
4016 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (decl))));
4018 maybe_set_unchanging (DECL_RTL (decl), decl);
4020 /* If something wants our address, try to use ADDRESSOF. */
4021 if (TREE_ADDRESSABLE (decl))
4022 put_var_into_stack (decl);
4025 else if (TREE_CODE (DECL_SIZE_UNIT (decl)) == INTEGER_CST
4026 && ! (flag_stack_check && ! STACK_CHECK_BUILTIN
4027 && 0 < compare_tree_int (DECL_SIZE_UNIT (decl),
4028 STACK_CHECK_MAX_VAR_SIZE)))
4030 /* Variable of fixed size that goes on the stack. */
4031 rtx oldaddr = 0;
4032 rtx addr;
4033 rtx x;
4035 /* If we previously made RTL for this decl, it must be an array
4036 whose size was determined by the initializer.
4037 The old address was a register; set that register now
4038 to the proper address. */
4039 if (DECL_RTL_SET_P (decl))
4041 if (GET_CODE (DECL_RTL (decl)) != MEM
4042 || GET_CODE (XEXP (DECL_RTL (decl), 0)) != REG)
4043 abort ();
4044 oldaddr = XEXP (DECL_RTL (decl), 0);
4047 /* Set alignment we actually gave this decl. */
4048 DECL_ALIGN (decl) = (DECL_MODE (decl) == BLKmode ? BIGGEST_ALIGNMENT
4049 : GET_MODE_BITSIZE (DECL_MODE (decl)));
4050 DECL_USER_ALIGN (decl) = 0;
4052 x = assign_temp (TREE_TYPE (decl), 1, 1, 1);
4053 set_mem_attributes (x, decl, 1);
4054 SET_DECL_RTL (decl, x);
4056 if (oldaddr)
4058 addr = force_operand (XEXP (DECL_RTL (decl), 0), oldaddr);
4059 if (addr != oldaddr)
4060 emit_move_insn (oldaddr, addr);
4063 else
4064 /* Dynamic-size object: must push space on the stack. */
4066 rtx address, size, x;
4068 /* Record the stack pointer on entry to block, if have
4069 not already done so. */
4070 do_pending_stack_adjust ();
4071 save_stack_pointer ();
4073 /* In function-at-a-time mode, variable_size doesn't expand this,
4074 so do it now. */
4075 if (TREE_CODE (type) == ARRAY_TYPE && TYPE_DOMAIN (type))
4076 expand_expr (TYPE_MAX_VALUE (TYPE_DOMAIN (type)),
4077 const0_rtx, VOIDmode, 0);
4079 /* Compute the variable's size, in bytes. */
4080 size = expand_expr (DECL_SIZE_UNIT (decl), NULL_RTX, VOIDmode, 0);
4081 free_temp_slots ();
4083 /* Allocate space on the stack for the variable. Note that
4084 DECL_ALIGN says how the variable is to be aligned and we
4085 cannot use it to conclude anything about the alignment of
4086 the size. */
4087 address = allocate_dynamic_stack_space (size, NULL_RTX,
4088 TYPE_ALIGN (TREE_TYPE (decl)));
4090 /* Reference the variable indirect through that rtx. */
4091 x = gen_rtx_MEM (DECL_MODE (decl), address);
4092 set_mem_attributes (x, decl, 1);
4093 SET_DECL_RTL (decl, x);
4096 /* Indicate the alignment we actually gave this variable. */
4097 #ifdef STACK_BOUNDARY
4098 DECL_ALIGN (decl) = STACK_BOUNDARY;
4099 #else
4100 DECL_ALIGN (decl) = BIGGEST_ALIGNMENT;
4101 #endif
4102 DECL_USER_ALIGN (decl) = 0;
4106 /* Emit code to perform the initialization of a declaration DECL. */
4108 void
4109 expand_decl_init (decl)
4110 tree decl;
4112 int was_used = TREE_USED (decl);
4114 /* If this is a CONST_DECL, we don't have to generate any code, but
4115 if DECL_INITIAL is a constant, call expand_expr to force TREE_CST_RTL
4116 to be set while in the obstack containing the constant. If we don't
4117 do this, we can lose if we have functions nested three deep and the middle
4118 function makes a CONST_DECL whose DECL_INITIAL is a STRING_CST while
4119 the innermost function is the first to expand that STRING_CST. */
4120 if (TREE_CODE (decl) == CONST_DECL)
4122 if (DECL_INITIAL (decl) && TREE_CONSTANT (DECL_INITIAL (decl)))
4123 expand_expr (DECL_INITIAL (decl), NULL_RTX, VOIDmode,
4124 EXPAND_INITIALIZER);
4125 return;
4128 if (TREE_STATIC (decl))
4129 return;
4131 /* Compute and store the initial value now. */
4133 if (DECL_INITIAL (decl) == error_mark_node)
4135 enum tree_code code = TREE_CODE (TREE_TYPE (decl));
4137 if (code == INTEGER_TYPE || code == REAL_TYPE || code == ENUMERAL_TYPE
4138 || code == POINTER_TYPE || code == REFERENCE_TYPE)
4139 expand_assignment (decl, convert (TREE_TYPE (decl), integer_zero_node),
4140 0, 0);
4141 emit_queue ();
4143 else if (DECL_INITIAL (decl) && TREE_CODE (DECL_INITIAL (decl)) != TREE_LIST)
4145 emit_line_note (DECL_SOURCE_FILE (decl), DECL_SOURCE_LINE (decl));
4146 expand_assignment (decl, DECL_INITIAL (decl), 0, 0);
4147 emit_queue ();
4150 /* Don't let the initialization count as "using" the variable. */
4151 TREE_USED (decl) = was_used;
4153 /* Free any temporaries we made while initializing the decl. */
4154 preserve_temp_slots (NULL_RTX);
4155 free_temp_slots ();
4158 /* CLEANUP is an expression to be executed at exit from this binding contour;
4159 for example, in C++, it might call the destructor for this variable.
4161 We wrap CLEANUP in an UNSAVE_EXPR node, so that we can expand the
4162 CLEANUP multiple times, and have the correct semantics. This
4163 happens in exception handling, for gotos, returns, breaks that
4164 leave the current scope.
4166 If CLEANUP is nonzero and DECL is zero, we record a cleanup
4167 that is not associated with any particular variable. */
4170 expand_decl_cleanup (decl, cleanup)
4171 tree decl, cleanup;
4173 struct nesting *thisblock;
4175 /* Error if we are not in any block. */
4176 if (cfun == 0 || block_stack == 0)
4177 return 0;
4179 thisblock = block_stack;
4181 /* Record the cleanup if there is one. */
4183 if (cleanup != 0)
4185 tree t;
4186 rtx seq;
4187 tree *cleanups = &thisblock->data.block.cleanups;
4188 int cond_context = conditional_context ();
4190 if (cond_context)
4192 rtx flag = gen_reg_rtx (word_mode);
4193 rtx set_flag_0;
4194 tree cond;
4196 start_sequence ();
4197 emit_move_insn (flag, const0_rtx);
4198 set_flag_0 = get_insns ();
4199 end_sequence ();
4201 thisblock->data.block.last_unconditional_cleanup
4202 = emit_insns_after (set_flag_0,
4203 thisblock->data.block.last_unconditional_cleanup);
4205 emit_move_insn (flag, const1_rtx);
4207 cond = build_decl (VAR_DECL, NULL_TREE, type_for_mode (word_mode, 1));
4208 SET_DECL_RTL (cond, flag);
4210 /* Conditionalize the cleanup. */
4211 cleanup = build (COND_EXPR, void_type_node,
4212 truthvalue_conversion (cond),
4213 cleanup, integer_zero_node);
4214 cleanup = fold (cleanup);
4216 cleanups = thisblock->data.block.cleanup_ptr;
4219 cleanup = unsave_expr (cleanup);
4221 t = *cleanups = tree_cons (decl, cleanup, *cleanups);
4223 if (! cond_context)
4224 /* If this block has a cleanup, it belongs in stack_block_stack. */
4225 stack_block_stack = thisblock;
4227 if (cond_context)
4229 start_sequence ();
4232 if (! using_eh_for_cleanups_p)
4233 TREE_ADDRESSABLE (t) = 1;
4234 else
4235 expand_eh_region_start ();
4237 if (cond_context)
4239 seq = get_insns ();
4240 end_sequence ();
4241 if (seq)
4242 thisblock->data.block.last_unconditional_cleanup
4243 = emit_insns_after (seq,
4244 thisblock->data.block.last_unconditional_cleanup);
4246 else
4248 thisblock->data.block.last_unconditional_cleanup
4249 = get_last_insn ();
4250 /* When we insert instructions after the last unconditional cleanup,
4251 we don't adjust last_insn. That means that a later add_insn will
4252 clobber the instructions we've just added. The easiest way to
4253 fix this is to just insert another instruction here, so that the
4254 instructions inserted after the last unconditional cleanup are
4255 never the last instruction. */
4256 emit_note (NULL, NOTE_INSN_DELETED);
4257 thisblock->data.block.cleanup_ptr = &thisblock->data.block.cleanups;
4260 return 1;
4263 /* DECL is an anonymous union. CLEANUP is a cleanup for DECL.
4264 DECL_ELTS is the list of elements that belong to DECL's type.
4265 In each, the TREE_VALUE is a VAR_DECL, and the TREE_PURPOSE a cleanup. */
4267 void
4268 expand_anon_union_decl (decl, cleanup, decl_elts)
4269 tree decl, cleanup, decl_elts;
4271 struct nesting *thisblock = cfun == 0 ? 0 : block_stack;
4272 rtx x;
4273 tree t;
4275 /* If any of the elements are addressable, so is the entire union. */
4276 for (t = decl_elts; t; t = TREE_CHAIN (t))
4277 if (TREE_ADDRESSABLE (TREE_VALUE (t)))
4279 TREE_ADDRESSABLE (decl) = 1;
4280 break;
4283 expand_decl (decl);
4284 expand_decl_cleanup (decl, cleanup);
4285 x = DECL_RTL (decl);
4287 /* Go through the elements, assigning RTL to each. */
4288 for (t = decl_elts; t; t = TREE_CHAIN (t))
4290 tree decl_elt = TREE_VALUE (t);
4291 tree cleanup_elt = TREE_PURPOSE (t);
4292 enum machine_mode mode = TYPE_MODE (TREE_TYPE (decl_elt));
4294 /* Propagate the union's alignment to the elements. */
4295 DECL_ALIGN (decl_elt) = DECL_ALIGN (decl);
4296 DECL_USER_ALIGN (decl_elt) = DECL_USER_ALIGN (decl);
4298 /* If the element has BLKmode and the union doesn't, the union is
4299 aligned such that the element doesn't need to have BLKmode, so
4300 change the element's mode to the appropriate one for its size. */
4301 if (mode == BLKmode && DECL_MODE (decl) != BLKmode)
4302 DECL_MODE (decl_elt) = mode
4303 = mode_for_size_tree (DECL_SIZE (decl_elt), MODE_INT, 1);
4305 /* (SUBREG (MEM ...)) at RTL generation time is invalid, so we
4306 instead create a new MEM rtx with the proper mode. */
4307 if (GET_CODE (x) == MEM)
4309 if (mode == GET_MODE (x))
4310 SET_DECL_RTL (decl_elt, x);
4311 else
4312 SET_DECL_RTL (decl_elt, adjust_address_nv (x, mode, 0));
4314 else if (GET_CODE (x) == REG)
4316 if (mode == GET_MODE (x))
4317 SET_DECL_RTL (decl_elt, x);
4318 else
4319 SET_DECL_RTL (decl_elt, gen_lowpart_SUBREG (mode, x));
4321 else
4322 abort ();
4324 /* Record the cleanup if there is one. */
4326 if (cleanup != 0)
4327 thisblock->data.block.cleanups
4328 = tree_cons (decl_elt, cleanup_elt,
4329 thisblock->data.block.cleanups);
4333 /* Expand a list of cleanups LIST.
4334 Elements may be expressions or may be nested lists.
4336 If DONT_DO is nonnull, then any list-element
4337 whose TREE_PURPOSE matches DONT_DO is omitted.
4338 This is sometimes used to avoid a cleanup associated with
4339 a value that is being returned out of the scope.
4341 If IN_FIXUP is non-zero, we are generating this cleanup for a fixup
4342 goto and handle protection regions specially in that case.
4344 If REACHABLE, we emit code, otherwise just inform the exception handling
4345 code about this finalization. */
4347 static void
4348 expand_cleanups (list, dont_do, in_fixup, reachable)
4349 tree list;
4350 tree dont_do;
4351 int in_fixup;
4352 int reachable;
4354 tree tail;
4355 for (tail = list; tail; tail = TREE_CHAIN (tail))
4356 if (dont_do == 0 || TREE_PURPOSE (tail) != dont_do)
4358 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
4359 expand_cleanups (TREE_VALUE (tail), dont_do, in_fixup, reachable);
4360 else
4362 if (! in_fixup && using_eh_for_cleanups_p)
4363 expand_eh_region_end_cleanup (TREE_VALUE (tail));
4365 if (reachable)
4367 /* Cleanups may be run multiple times. For example,
4368 when exiting a binding contour, we expand the
4369 cleanups associated with that contour. When a goto
4370 within that binding contour has a target outside that
4371 contour, it will expand all cleanups from its scope to
4372 the target. Though the cleanups are expanded multiple
4373 times, the control paths are non-overlapping so the
4374 cleanups will not be executed twice. */
4376 /* We may need to protect from outer cleanups. */
4377 if (in_fixup && using_eh_for_cleanups_p)
4379 expand_eh_region_start ();
4381 expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
4383 expand_eh_region_end_fixup (TREE_VALUE (tail));
4385 else
4386 expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
4388 free_temp_slots ();
4394 /* Mark when the context we are emitting RTL for as a conditional
4395 context, so that any cleanup actions we register with
4396 expand_decl_init will be properly conditionalized when those
4397 cleanup actions are later performed. Must be called before any
4398 expression (tree) is expanded that is within a conditional context. */
4400 void
4401 start_cleanup_deferral ()
4403 /* block_stack can be NULL if we are inside the parameter list. It is
4404 OK to do nothing, because cleanups aren't possible here. */
4405 if (block_stack)
4406 ++block_stack->data.block.conditional_code;
4409 /* Mark the end of a conditional region of code. Because cleanup
4410 deferrals may be nested, we may still be in a conditional region
4411 after we end the currently deferred cleanups, only after we end all
4412 deferred cleanups, are we back in unconditional code. */
4414 void
4415 end_cleanup_deferral ()
4417 /* block_stack can be NULL if we are inside the parameter list. It is
4418 OK to do nothing, because cleanups aren't possible here. */
4419 if (block_stack)
4420 --block_stack->data.block.conditional_code;
4423 /* Move all cleanups from the current block_stack
4424 to the containing block_stack, where they are assumed to
4425 have been created. If anything can cause a temporary to
4426 be created, but not expanded for more than one level of
4427 block_stacks, then this code will have to change. */
4429 void
4430 move_cleanups_up ()
4432 struct nesting *block = block_stack;
4433 struct nesting *outer = block->next;
4435 outer->data.block.cleanups
4436 = chainon (block->data.block.cleanups,
4437 outer->data.block.cleanups);
4438 block->data.block.cleanups = 0;
4441 tree
4442 last_cleanup_this_contour ()
4444 if (block_stack == 0)
4445 return 0;
4447 return block_stack->data.block.cleanups;
4450 /* Return 1 if there are any pending cleanups at this point.
4451 If THIS_CONTOUR is nonzero, check the current contour as well.
4452 Otherwise, look only at the contours that enclose this one. */
4455 any_pending_cleanups (this_contour)
4456 int this_contour;
4458 struct nesting *block;
4460 if (cfun == NULL || cfun->stmt == NULL || block_stack == 0)
4461 return 0;
4463 if (this_contour && block_stack->data.block.cleanups != NULL)
4464 return 1;
4465 if (block_stack->data.block.cleanups == 0
4466 && block_stack->data.block.outer_cleanups == 0)
4467 return 0;
4469 for (block = block_stack->next; block; block = block->next)
4470 if (block->data.block.cleanups != 0)
4471 return 1;
4473 return 0;
4476 /* Enter a case (Pascal) or switch (C) statement.
4477 Push a block onto case_stack and nesting_stack
4478 to accumulate the case-labels that are seen
4479 and to record the labels generated for the statement.
4481 EXIT_FLAG is nonzero if `exit_something' should exit this case stmt.
4482 Otherwise, this construct is transparent for `exit_something'.
4484 EXPR is the index-expression to be dispatched on.
4485 TYPE is its nominal type. We could simply convert EXPR to this type,
4486 but instead we take short cuts. */
4488 void
4489 expand_start_case (exit_flag, expr, type, printname)
4490 int exit_flag;
4491 tree expr;
4492 tree type;
4493 const char *printname;
4495 struct nesting *thiscase = ALLOC_NESTING ();
4497 /* Make an entry on case_stack for the case we are entering. */
4499 thiscase->next = case_stack;
4500 thiscase->all = nesting_stack;
4501 thiscase->depth = ++nesting_depth;
4502 thiscase->exit_label = exit_flag ? gen_label_rtx () : 0;
4503 thiscase->data.case_stmt.case_list = 0;
4504 thiscase->data.case_stmt.index_expr = expr;
4505 thiscase->data.case_stmt.nominal_type = type;
4506 thiscase->data.case_stmt.default_label = 0;
4507 thiscase->data.case_stmt.printname = printname;
4508 thiscase->data.case_stmt.line_number_status = force_line_numbers ();
4509 case_stack = thiscase;
4510 nesting_stack = thiscase;
4512 do_pending_stack_adjust ();
4514 /* Make sure case_stmt.start points to something that won't
4515 need any transformation before expand_end_case. */
4516 if (GET_CODE (get_last_insn ()) != NOTE)
4517 emit_note (NULL, NOTE_INSN_DELETED);
4519 thiscase->data.case_stmt.start = get_last_insn ();
4521 start_cleanup_deferral ();
4524 /* Start a "dummy case statement" within which case labels are invalid
4525 and are not connected to any larger real case statement.
4526 This can be used if you don't want to let a case statement jump
4527 into the middle of certain kinds of constructs. */
4529 void
4530 expand_start_case_dummy ()
4532 struct nesting *thiscase = ALLOC_NESTING ();
4534 /* Make an entry on case_stack for the dummy. */
4536 thiscase->next = case_stack;
4537 thiscase->all = nesting_stack;
4538 thiscase->depth = ++nesting_depth;
4539 thiscase->exit_label = 0;
4540 thiscase->data.case_stmt.case_list = 0;
4541 thiscase->data.case_stmt.start = 0;
4542 thiscase->data.case_stmt.nominal_type = 0;
4543 thiscase->data.case_stmt.default_label = 0;
4544 case_stack = thiscase;
4545 nesting_stack = thiscase;
4546 start_cleanup_deferral ();
4549 /* End a dummy case statement. */
4551 void
4552 expand_end_case_dummy ()
4554 end_cleanup_deferral ();
4555 POPSTACK (case_stack);
4558 /* Return the data type of the index-expression
4559 of the innermost case statement, or null if none. */
4561 tree
4562 case_index_expr_type ()
4564 if (case_stack)
4565 return TREE_TYPE (case_stack->data.case_stmt.index_expr);
4566 return 0;
4569 static void
4570 check_seenlabel ()
4572 /* If this is the first label, warn if any insns have been emitted. */
4573 if (case_stack->data.case_stmt.line_number_status >= 0)
4575 rtx insn;
4577 restore_line_number_status
4578 (case_stack->data.case_stmt.line_number_status);
4579 case_stack->data.case_stmt.line_number_status = -1;
4581 for (insn = case_stack->data.case_stmt.start;
4582 insn;
4583 insn = NEXT_INSN (insn))
4585 if (GET_CODE (insn) == CODE_LABEL)
4586 break;
4587 if (GET_CODE (insn) != NOTE
4588 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn)) != USE))
4591 insn = PREV_INSN (insn);
4592 while (insn && (GET_CODE (insn) != NOTE || NOTE_LINE_NUMBER (insn) < 0));
4594 /* If insn is zero, then there must have been a syntax error. */
4595 if (insn)
4596 warning_with_file_and_line (NOTE_SOURCE_FILE (insn),
4597 NOTE_LINE_NUMBER (insn),
4598 "unreachable code at beginning of %s",
4599 case_stack->data.case_stmt.printname);
4600 break;
4606 /* Accumulate one case or default label inside a case or switch statement.
4607 VALUE is the value of the case (a null pointer, for a default label).
4608 The function CONVERTER, when applied to arguments T and V,
4609 converts the value V to the type T.
4611 If not currently inside a case or switch statement, return 1 and do
4612 nothing. The caller will print a language-specific error message.
4613 If VALUE is a duplicate or overlaps, return 2 and do nothing
4614 except store the (first) duplicate node in *DUPLICATE.
4615 If VALUE is out of range, return 3 and do nothing.
4616 If we are jumping into the scope of a cleanup or var-sized array, return 5.
4617 Return 0 on success.
4619 Extended to handle range statements. */
4622 pushcase (value, converter, label, duplicate)
4623 tree value;
4624 tree (*converter) PARAMS ((tree, tree));
4625 tree label;
4626 tree *duplicate;
4628 tree index_type;
4629 tree nominal_type;
4631 /* Fail if not inside a real case statement. */
4632 if (! (case_stack && case_stack->data.case_stmt.start))
4633 return 1;
4635 if (stack_block_stack
4636 && stack_block_stack->depth > case_stack->depth)
4637 return 5;
4639 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4640 nominal_type = case_stack->data.case_stmt.nominal_type;
4642 /* If the index is erroneous, avoid more problems: pretend to succeed. */
4643 if (index_type == error_mark_node)
4644 return 0;
4646 /* Convert VALUE to the type in which the comparisons are nominally done. */
4647 if (value != 0)
4648 value = (*converter) (nominal_type, value);
4650 check_seenlabel ();
4652 /* Fail if this value is out of range for the actual type of the index
4653 (which may be narrower than NOMINAL_TYPE). */
4654 if (value != 0
4655 && (TREE_CONSTANT_OVERFLOW (value)
4656 || ! int_fits_type_p (value, index_type)))
4657 return 3;
4659 return add_case_node (value, value, label, duplicate);
4662 /* Like pushcase but this case applies to all values between VALUE1 and
4663 VALUE2 (inclusive). If VALUE1 is NULL, the range starts at the lowest
4664 value of the index type and ends at VALUE2. If VALUE2 is NULL, the range
4665 starts at VALUE1 and ends at the highest value of the index type.
4666 If both are NULL, this case applies to all values.
4668 The return value is the same as that of pushcase but there is one
4669 additional error code: 4 means the specified range was empty. */
4672 pushcase_range (value1, value2, converter, label, duplicate)
4673 tree value1, value2;
4674 tree (*converter) PARAMS ((tree, tree));
4675 tree label;
4676 tree *duplicate;
4678 tree index_type;
4679 tree nominal_type;
4681 /* Fail if not inside a real case statement. */
4682 if (! (case_stack && case_stack->data.case_stmt.start))
4683 return 1;
4685 if (stack_block_stack
4686 && stack_block_stack->depth > case_stack->depth)
4687 return 5;
4689 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4690 nominal_type = case_stack->data.case_stmt.nominal_type;
4692 /* If the index is erroneous, avoid more problems: pretend to succeed. */
4693 if (index_type == error_mark_node)
4694 return 0;
4696 check_seenlabel ();
4698 /* Convert VALUEs to type in which the comparisons are nominally done
4699 and replace any unspecified value with the corresponding bound. */
4700 if (value1 == 0)
4701 value1 = TYPE_MIN_VALUE (index_type);
4702 if (value2 == 0)
4703 value2 = TYPE_MAX_VALUE (index_type);
4705 /* Fail if the range is empty. Do this before any conversion since
4706 we want to allow out-of-range empty ranges. */
4707 if (value2 != 0 && tree_int_cst_lt (value2, value1))
4708 return 4;
4710 /* If the max was unbounded, use the max of the nominal_type we are
4711 converting to. Do this after the < check above to suppress false
4712 positives. */
4713 if (value2 == 0)
4714 value2 = TYPE_MAX_VALUE (nominal_type);
4716 value1 = (*converter) (nominal_type, value1);
4717 value2 = (*converter) (nominal_type, value2);
4719 /* Fail if these values are out of range. */
4720 if (TREE_CONSTANT_OVERFLOW (value1)
4721 || ! int_fits_type_p (value1, index_type))
4722 return 3;
4724 if (TREE_CONSTANT_OVERFLOW (value2)
4725 || ! int_fits_type_p (value2, index_type))
4726 return 3;
4728 return add_case_node (value1, value2, label, duplicate);
4731 /* Do the actual insertion of a case label for pushcase and pushcase_range
4732 into case_stack->data.case_stmt.case_list. Use an AVL tree to avoid
4733 slowdown for large switch statements. */
4736 add_case_node (low, high, label, duplicate)
4737 tree low, high;
4738 tree label;
4739 tree *duplicate;
4741 struct case_node *p, **q, *r;
4743 /* If there's no HIGH value, then this is not a case range; it's
4744 just a simple case label. But that's just a degenerate case
4745 range. */
4746 if (!high)
4747 high = low;
4749 /* Handle default labels specially. */
4750 if (!high && !low)
4752 if (case_stack->data.case_stmt.default_label != 0)
4754 *duplicate = case_stack->data.case_stmt.default_label;
4755 return 2;
4757 case_stack->data.case_stmt.default_label = label;
4758 expand_label (label);
4759 return 0;
4762 q = &case_stack->data.case_stmt.case_list;
4763 p = *q;
4765 while ((r = *q))
4767 p = r;
4769 /* Keep going past elements distinctly greater than HIGH. */
4770 if (tree_int_cst_lt (high, p->low))
4771 q = &p->left;
4773 /* or distinctly less than LOW. */
4774 else if (tree_int_cst_lt (p->high, low))
4775 q = &p->right;
4777 else
4779 /* We have an overlap; this is an error. */
4780 *duplicate = p->code_label;
4781 return 2;
4785 /* Add this label to the chain, and succeed. */
4787 r = (struct case_node *) xmalloc (sizeof (struct case_node));
4788 r->low = low;
4790 /* If the bounds are equal, turn this into the one-value case. */
4791 if (tree_int_cst_equal (low, high))
4792 r->high = r->low;
4793 else
4794 r->high = high;
4796 r->code_label = label;
4797 expand_label (label);
4799 *q = r;
4800 r->parent = p;
4801 r->left = 0;
4802 r->right = 0;
4803 r->balance = 0;
4805 while (p)
4807 struct case_node *s;
4809 if (r == p->left)
4811 int b;
4813 if (! (b = p->balance))
4814 /* Growth propagation from left side. */
4815 p->balance = -1;
4816 else if (b < 0)
4818 if (r->balance < 0)
4820 /* R-Rotation */
4821 if ((p->left = s = r->right))
4822 s->parent = p;
4824 r->right = p;
4825 p->balance = 0;
4826 r->balance = 0;
4827 s = p->parent;
4828 p->parent = r;
4830 if ((r->parent = s))
4832 if (s->left == p)
4833 s->left = r;
4834 else
4835 s->right = r;
4837 else
4838 case_stack->data.case_stmt.case_list = r;
4840 else
4841 /* r->balance == +1 */
4843 /* LR-Rotation */
4845 int b2;
4846 struct case_node *t = r->right;
4848 if ((p->left = s = t->right))
4849 s->parent = p;
4851 t->right = p;
4852 if ((r->right = s = t->left))
4853 s->parent = r;
4855 t->left = r;
4856 b = t->balance;
4857 b2 = b < 0;
4858 p->balance = b2;
4859 b2 = -b2 - b;
4860 r->balance = b2;
4861 t->balance = 0;
4862 s = p->parent;
4863 p->parent = t;
4864 r->parent = t;
4866 if ((t->parent = s))
4868 if (s->left == p)
4869 s->left = t;
4870 else
4871 s->right = t;
4873 else
4874 case_stack->data.case_stmt.case_list = t;
4876 break;
4879 else
4881 /* p->balance == +1; growth of left side balances the node. */
4882 p->balance = 0;
4883 break;
4886 else
4887 /* r == p->right */
4889 int b;
4891 if (! (b = p->balance))
4892 /* Growth propagation from right side. */
4893 p->balance++;
4894 else if (b > 0)
4896 if (r->balance > 0)
4898 /* L-Rotation */
4900 if ((p->right = s = r->left))
4901 s->parent = p;
4903 r->left = p;
4904 p->balance = 0;
4905 r->balance = 0;
4906 s = p->parent;
4907 p->parent = r;
4908 if ((r->parent = s))
4910 if (s->left == p)
4911 s->left = r;
4912 else
4913 s->right = r;
4916 else
4917 case_stack->data.case_stmt.case_list = r;
4920 else
4921 /* r->balance == -1 */
4923 /* RL-Rotation */
4924 int b2;
4925 struct case_node *t = r->left;
4927 if ((p->right = s = t->left))
4928 s->parent = p;
4930 t->left = p;
4932 if ((r->left = s = t->right))
4933 s->parent = r;
4935 t->right = r;
4936 b = t->balance;
4937 b2 = b < 0;
4938 r->balance = b2;
4939 b2 = -b2 - b;
4940 p->balance = b2;
4941 t->balance = 0;
4942 s = p->parent;
4943 p->parent = t;
4944 r->parent = t;
4946 if ((t->parent = s))
4948 if (s->left == p)
4949 s->left = t;
4950 else
4951 s->right = t;
4954 else
4955 case_stack->data.case_stmt.case_list = t;
4957 break;
4959 else
4961 /* p->balance == -1; growth of right side balances the node. */
4962 p->balance = 0;
4963 break;
4967 r = p;
4968 p = p->parent;
4971 return 0;
4974 /* Returns the number of possible values of TYPE.
4975 Returns -1 if the number is unknown, variable, or if the number does not
4976 fit in a HOST_WIDE_INT.
4977 Sets *SPARENESS to 2 if TYPE is an ENUMERAL_TYPE whose values
4978 do not increase monotonically (there may be duplicates);
4979 to 1 if the values increase monotonically, but not always by 1;
4980 otherwise sets it to 0. */
4982 HOST_WIDE_INT
4983 all_cases_count (type, spareness)
4984 tree type;
4985 int *spareness;
4987 tree t;
4988 HOST_WIDE_INT count, minval, lastval;
4990 *spareness = 0;
4992 switch (TREE_CODE (type))
4994 case BOOLEAN_TYPE:
4995 count = 2;
4996 break;
4998 case CHAR_TYPE:
4999 count = 1 << BITS_PER_UNIT;
5000 break;
5002 default:
5003 case INTEGER_TYPE:
5004 if (TYPE_MAX_VALUE (type) != 0
5005 && 0 != (t = fold (build (MINUS_EXPR, type, TYPE_MAX_VALUE (type),
5006 TYPE_MIN_VALUE (type))))
5007 && 0 != (t = fold (build (PLUS_EXPR, type, t,
5008 convert (type, integer_zero_node))))
5009 && host_integerp (t, 1))
5010 count = tree_low_cst (t, 1);
5011 else
5012 return -1;
5013 break;
5015 case ENUMERAL_TYPE:
5016 /* Don't waste time with enumeral types with huge values. */
5017 if (! host_integerp (TYPE_MIN_VALUE (type), 0)
5018 || TYPE_MAX_VALUE (type) == 0
5019 || ! host_integerp (TYPE_MAX_VALUE (type), 0))
5020 return -1;
5022 lastval = minval = tree_low_cst (TYPE_MIN_VALUE (type), 0);
5023 count = 0;
5025 for (t = TYPE_VALUES (type); t != NULL_TREE; t = TREE_CHAIN (t))
5027 HOST_WIDE_INT thisval = tree_low_cst (TREE_VALUE (t), 0);
5029 if (*spareness == 2 || thisval < lastval)
5030 *spareness = 2;
5031 else if (thisval != minval + count)
5032 *spareness = 1;
5034 count++;
5038 return count;
5041 #define BITARRAY_TEST(ARRAY, INDEX) \
5042 ((ARRAY)[(unsigned) (INDEX) / HOST_BITS_PER_CHAR]\
5043 & (1 << ((unsigned) (INDEX) % HOST_BITS_PER_CHAR)))
5044 #define BITARRAY_SET(ARRAY, INDEX) \
5045 ((ARRAY)[(unsigned) (INDEX) / HOST_BITS_PER_CHAR]\
5046 |= 1 << ((unsigned) (INDEX) % HOST_BITS_PER_CHAR))
5048 /* Set the elements of the bitstring CASES_SEEN (which has length COUNT),
5049 with the case values we have seen, assuming the case expression
5050 has the given TYPE.
5051 SPARSENESS is as determined by all_cases_count.
5053 The time needed is proportional to COUNT, unless
5054 SPARSENESS is 2, in which case quadratic time is needed. */
5056 void
5057 mark_seen_cases (type, cases_seen, count, sparseness)
5058 tree type;
5059 unsigned char *cases_seen;
5060 HOST_WIDE_INT count;
5061 int sparseness;
5063 tree next_node_to_try = NULL_TREE;
5064 HOST_WIDE_INT next_node_offset = 0;
5066 struct case_node *n, *root = case_stack->data.case_stmt.case_list;
5067 tree val = make_node (INTEGER_CST);
5069 TREE_TYPE (val) = type;
5070 if (! root)
5071 /* Do nothing. */
5073 else if (sparseness == 2)
5075 tree t;
5076 unsigned HOST_WIDE_INT xlo;
5078 /* This less efficient loop is only needed to handle
5079 duplicate case values (multiple enum constants
5080 with the same value). */
5081 TREE_TYPE (val) = TREE_TYPE (root->low);
5082 for (t = TYPE_VALUES (type), xlo = 0; t != NULL_TREE;
5083 t = TREE_CHAIN (t), xlo++)
5085 TREE_INT_CST_LOW (val) = TREE_INT_CST_LOW (TREE_VALUE (t));
5086 TREE_INT_CST_HIGH (val) = TREE_INT_CST_HIGH (TREE_VALUE (t));
5087 n = root;
5090 /* Keep going past elements distinctly greater than VAL. */
5091 if (tree_int_cst_lt (val, n->low))
5092 n = n->left;
5094 /* or distinctly less than VAL. */
5095 else if (tree_int_cst_lt (n->high, val))
5096 n = n->right;
5098 else
5100 /* We have found a matching range. */
5101 BITARRAY_SET (cases_seen, xlo);
5102 break;
5105 while (n);
5108 else
5110 if (root->left)
5111 case_stack->data.case_stmt.case_list = root = case_tree2list (root, 0);
5113 for (n = root; n; n = n->right)
5115 TREE_INT_CST_LOW (val) = TREE_INT_CST_LOW (n->low);
5116 TREE_INT_CST_HIGH (val) = TREE_INT_CST_HIGH (n->low);
5117 while (! tree_int_cst_lt (n->high, val))
5119 /* Calculate (into xlo) the "offset" of the integer (val).
5120 The element with lowest value has offset 0, the next smallest
5121 element has offset 1, etc. */
5123 unsigned HOST_WIDE_INT xlo;
5124 HOST_WIDE_INT xhi;
5125 tree t;
5127 if (sparseness && TYPE_VALUES (type) != NULL_TREE)
5129 /* The TYPE_VALUES will be in increasing order, so
5130 starting searching where we last ended. */
5131 t = next_node_to_try;
5132 xlo = next_node_offset;
5133 xhi = 0;
5134 for (;;)
5136 if (t == NULL_TREE)
5138 t = TYPE_VALUES (type);
5139 xlo = 0;
5141 if (tree_int_cst_equal (val, TREE_VALUE (t)))
5143 next_node_to_try = TREE_CHAIN (t);
5144 next_node_offset = xlo + 1;
5145 break;
5147 xlo++;
5148 t = TREE_CHAIN (t);
5149 if (t == next_node_to_try)
5151 xlo = -1;
5152 break;
5156 else
5158 t = TYPE_MIN_VALUE (type);
5159 if (t)
5160 neg_double (TREE_INT_CST_LOW (t), TREE_INT_CST_HIGH (t),
5161 &xlo, &xhi);
5162 else
5163 xlo = xhi = 0;
5164 add_double (xlo, xhi,
5165 TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
5166 &xlo, &xhi);
5169 if (xhi == 0 && xlo < (unsigned HOST_WIDE_INT) count)
5170 BITARRAY_SET (cases_seen, xlo);
5172 add_double (TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
5173 1, 0,
5174 &TREE_INT_CST_LOW (val), &TREE_INT_CST_HIGH (val));
5180 /* Called when the index of a switch statement is an enumerated type
5181 and there is no default label.
5183 Checks that all enumeration literals are covered by the case
5184 expressions of a switch. Also, warn if there are any extra
5185 switch cases that are *not* elements of the enumerated type.
5187 If all enumeration literals were covered by the case expressions,
5188 turn one of the expressions into the default expression since it should
5189 not be possible to fall through such a switch. */
5191 void
5192 check_for_full_enumeration_handling (type)
5193 tree type;
5195 struct case_node *n;
5196 tree chain;
5198 /* True iff the selector type is a numbered set mode. */
5199 int sparseness = 0;
5201 /* The number of possible selector values. */
5202 HOST_WIDE_INT size;
5204 /* For each possible selector value. a one iff it has been matched
5205 by a case value alternative. */
5206 unsigned char *cases_seen;
5208 /* The allocated size of cases_seen, in chars. */
5209 HOST_WIDE_INT bytes_needed;
5211 if (! warn_switch)
5212 return;
5214 size = all_cases_count (type, &sparseness);
5215 bytes_needed = (size + HOST_BITS_PER_CHAR) / HOST_BITS_PER_CHAR;
5217 if (size > 0 && size < 600000
5218 /* We deliberately use calloc here, not cmalloc, so that we can suppress
5219 this optimization if we don't have enough memory rather than
5220 aborting, as xmalloc would do. */
5221 && (cases_seen =
5222 (unsigned char *) really_call_calloc (bytes_needed, 1)) != NULL)
5224 HOST_WIDE_INT i;
5225 tree v = TYPE_VALUES (type);
5227 /* The time complexity of this code is normally O(N), where
5228 N being the number of members in the enumerated type.
5229 However, if type is a ENUMERAL_TYPE whose values do not
5230 increase monotonically, O(N*log(N)) time may be needed. */
5232 mark_seen_cases (type, cases_seen, size, sparseness);
5234 for (i = 0; v != NULL_TREE && i < size; i++, v = TREE_CHAIN (v))
5235 if (BITARRAY_TEST (cases_seen, i) == 0)
5236 warning ("enumeration value `%s' not handled in switch",
5237 IDENTIFIER_POINTER (TREE_PURPOSE (v)));
5239 free (cases_seen);
5242 /* Now we go the other way around; we warn if there are case
5243 expressions that don't correspond to enumerators. This can
5244 occur since C and C++ don't enforce type-checking of
5245 assignments to enumeration variables. */
5247 if (case_stack->data.case_stmt.case_list
5248 && case_stack->data.case_stmt.case_list->left)
5249 case_stack->data.case_stmt.case_list
5250 = case_tree2list (case_stack->data.case_stmt.case_list, 0);
5251 if (warn_switch)
5252 for (n = case_stack->data.case_stmt.case_list; n; n = n->right)
5254 for (chain = TYPE_VALUES (type);
5255 chain && !tree_int_cst_equal (n->low, TREE_VALUE (chain));
5256 chain = TREE_CHAIN (chain))
5259 if (!chain)
5261 if (TYPE_NAME (type) == 0)
5262 warning ("case value `%ld' not in enumerated type",
5263 (long) TREE_INT_CST_LOW (n->low));
5264 else
5265 warning ("case value `%ld' not in enumerated type `%s'",
5266 (long) TREE_INT_CST_LOW (n->low),
5267 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
5268 == IDENTIFIER_NODE)
5269 ? TYPE_NAME (type)
5270 : DECL_NAME (TYPE_NAME (type))));
5272 if (!tree_int_cst_equal (n->low, n->high))
5274 for (chain = TYPE_VALUES (type);
5275 chain && !tree_int_cst_equal (n->high, TREE_VALUE (chain));
5276 chain = TREE_CHAIN (chain))
5279 if (!chain)
5281 if (TYPE_NAME (type) == 0)
5282 warning ("case value `%ld' not in enumerated type",
5283 (long) TREE_INT_CST_LOW (n->high));
5284 else
5285 warning ("case value `%ld' not in enumerated type `%s'",
5286 (long) TREE_INT_CST_LOW (n->high),
5287 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
5288 == IDENTIFIER_NODE)
5289 ? TYPE_NAME (type)
5290 : DECL_NAME (TYPE_NAME (type))));
5296 /* Free CN, and its children. */
5298 static void
5299 free_case_nodes (cn)
5300 case_node_ptr cn;
5302 if (cn)
5304 free_case_nodes (cn->left);
5305 free_case_nodes (cn->right);
5306 free (cn);
5312 /* Terminate a case (Pascal) or switch (C) statement
5313 in which ORIG_INDEX is the expression to be tested.
5314 Generate the code to test it and jump to the right place. */
5316 void
5317 expand_end_case (orig_index)
5318 tree orig_index;
5320 tree minval = NULL_TREE, maxval = NULL_TREE, range = NULL_TREE;
5321 rtx default_label = 0;
5322 struct case_node *n;
5323 unsigned int count;
5324 rtx index;
5325 rtx table_label;
5326 int ncases;
5327 rtx *labelvec;
5328 int i;
5329 rtx before_case, end;
5330 struct nesting *thiscase = case_stack;
5331 tree index_expr, index_type;
5332 int unsignedp;
5334 /* Don't crash due to previous errors. */
5335 if (thiscase == NULL)
5336 return;
5338 table_label = gen_label_rtx ();
5339 index_expr = thiscase->data.case_stmt.index_expr;
5340 index_type = TREE_TYPE (index_expr);
5341 unsignedp = TREE_UNSIGNED (index_type);
5343 do_pending_stack_adjust ();
5345 /* This might get an spurious warning in the presence of a syntax error;
5346 it could be fixed by moving the call to check_seenlabel after the
5347 check for error_mark_node, and copying the code of check_seenlabel that
5348 deals with case_stack->data.case_stmt.line_number_status /
5349 restore_line_number_status in front of the call to end_cleanup_deferral;
5350 However, this might miss some useful warnings in the presence of
5351 non-syntax errors. */
5352 check_seenlabel ();
5354 /* An ERROR_MARK occurs for various reasons including invalid data type. */
5355 if (index_type != error_mark_node)
5357 /* If switch expression was an enumerated type, check that all
5358 enumeration literals are covered by the cases.
5359 No sense trying this if there's a default case, however. */
5361 if (!thiscase->data.case_stmt.default_label
5362 && TREE_CODE (TREE_TYPE (orig_index)) == ENUMERAL_TYPE
5363 && TREE_CODE (index_expr) != INTEGER_CST)
5364 check_for_full_enumeration_handling (TREE_TYPE (orig_index));
5366 /* If we don't have a default-label, create one here,
5367 after the body of the switch. */
5368 if (thiscase->data.case_stmt.default_label == 0)
5370 thiscase->data.case_stmt.default_label
5371 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
5372 expand_label (thiscase->data.case_stmt.default_label);
5374 default_label = label_rtx (thiscase->data.case_stmt.default_label);
5376 before_case = get_last_insn ();
5378 if (thiscase->data.case_stmt.case_list
5379 && thiscase->data.case_stmt.case_list->left)
5380 thiscase->data.case_stmt.case_list
5381 = case_tree2list (thiscase->data.case_stmt.case_list, 0);
5383 /* Simplify the case-list before we count it. */
5384 group_case_nodes (thiscase->data.case_stmt.case_list);
5386 /* Get upper and lower bounds of case values.
5387 Also convert all the case values to the index expr's data type. */
5389 count = 0;
5390 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5392 /* Check low and high label values are integers. */
5393 if (TREE_CODE (n->low) != INTEGER_CST)
5394 abort ();
5395 if (TREE_CODE (n->high) != INTEGER_CST)
5396 abort ();
5398 n->low = convert (index_type, n->low);
5399 n->high = convert (index_type, n->high);
5401 /* Count the elements and track the largest and smallest
5402 of them (treating them as signed even if they are not). */
5403 if (count++ == 0)
5405 minval = n->low;
5406 maxval = n->high;
5408 else
5410 if (INT_CST_LT (n->low, minval))
5411 minval = n->low;
5412 if (INT_CST_LT (maxval, n->high))
5413 maxval = n->high;
5415 /* A range counts double, since it requires two compares. */
5416 if (! tree_int_cst_equal (n->low, n->high))
5417 count++;
5420 /* Compute span of values. */
5421 if (count != 0)
5422 range = fold (build (MINUS_EXPR, index_type, maxval, minval));
5424 end_cleanup_deferral ();
5426 if (count == 0)
5428 expand_expr (index_expr, const0_rtx, VOIDmode, 0);
5429 emit_queue ();
5430 emit_jump (default_label);
5433 /* If range of values is much bigger than number of values,
5434 make a sequence of conditional branches instead of a dispatch.
5435 If the switch-index is a constant, do it this way
5436 because we can optimize it. */
5438 else if (count < case_values_threshold ()
5439 || compare_tree_int (range, 10 * count) > 0
5440 /* RANGE may be signed, and really large ranges will show up
5441 as negative numbers. */
5442 || compare_tree_int (range, 0) < 0
5443 #ifndef ASM_OUTPUT_ADDR_DIFF_ELT
5444 || flag_pic
5445 #endif
5446 || TREE_CODE (index_expr) == INTEGER_CST
5447 || (TREE_CODE (index_expr) == COMPOUND_EXPR
5448 && TREE_CODE (TREE_OPERAND (index_expr, 1)) == INTEGER_CST))
5450 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5452 /* If the index is a short or char that we do not have
5453 an insn to handle comparisons directly, convert it to
5454 a full integer now, rather than letting each comparison
5455 generate the conversion. */
5457 if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT
5458 && ! have_insn_for (COMPARE, GET_MODE (index)))
5460 enum machine_mode wider_mode;
5461 for (wider_mode = GET_MODE (index); wider_mode != VOIDmode;
5462 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
5463 if (have_insn_for (COMPARE, wider_mode))
5465 index = convert_to_mode (wider_mode, index, unsignedp);
5466 break;
5470 emit_queue ();
5471 do_pending_stack_adjust ();
5473 index = protect_from_queue (index, 0);
5474 if (GET_CODE (index) == MEM)
5475 index = copy_to_reg (index);
5476 if (GET_CODE (index) == CONST_INT
5477 || TREE_CODE (index_expr) == INTEGER_CST)
5479 /* Make a tree node with the proper constant value
5480 if we don't already have one. */
5481 if (TREE_CODE (index_expr) != INTEGER_CST)
5483 index_expr
5484 = build_int_2 (INTVAL (index),
5485 unsignedp || INTVAL (index) >= 0 ? 0 : -1);
5486 index_expr = convert (index_type, index_expr);
5489 /* For constant index expressions we need only
5490 issue an unconditional branch to the appropriate
5491 target code. The job of removing any unreachable
5492 code is left to the optimisation phase if the
5493 "-O" option is specified. */
5494 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5495 if (! tree_int_cst_lt (index_expr, n->low)
5496 && ! tree_int_cst_lt (n->high, index_expr))
5497 break;
5499 if (n)
5500 emit_jump (label_rtx (n->code_label));
5501 else
5502 emit_jump (default_label);
5504 else
5506 /* If the index expression is not constant we generate
5507 a binary decision tree to select the appropriate
5508 target code. This is done as follows:
5510 The list of cases is rearranged into a binary tree,
5511 nearly optimal assuming equal probability for each case.
5513 The tree is transformed into RTL, eliminating
5514 redundant test conditions at the same time.
5516 If program flow could reach the end of the
5517 decision tree an unconditional jump to the
5518 default code is emitted. */
5520 use_cost_table
5521 = (TREE_CODE (TREE_TYPE (orig_index)) != ENUMERAL_TYPE
5522 && estimate_case_costs (thiscase->data.case_stmt.case_list));
5523 balance_case_nodes (&thiscase->data.case_stmt.case_list, NULL);
5524 emit_case_nodes (index, thiscase->data.case_stmt.case_list,
5525 default_label, index_type);
5526 emit_jump_if_reachable (default_label);
5529 else
5531 if (! try_casesi (index_type, index_expr, minval, range,
5532 table_label, default_label))
5534 index_type = thiscase->data.case_stmt.nominal_type;
5536 /* Index jumptables from zero for suitable values of
5537 minval to avoid a subtraction. */
5538 if (! optimize_size
5539 && compare_tree_int (minval, 0) > 0
5540 && compare_tree_int (minval, 3) < 0)
5542 minval = integer_zero_node;
5543 range = maxval;
5546 if (! try_tablejump (index_type, index_expr, minval, range,
5547 table_label, default_label))
5548 abort ();
5551 /* Get table of labels to jump to, in order of case index. */
5553 ncases = tree_low_cst (range, 0) + 1;
5554 labelvec = (rtx *) alloca (ncases * sizeof (rtx));
5555 memset ((char *) labelvec, 0, ncases * sizeof (rtx));
5557 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5559 HOST_WIDE_INT i
5560 = tree_low_cst (n->low, 0) - tree_low_cst (minval, 0);
5562 while (1)
5564 labelvec[i]
5565 = gen_rtx_LABEL_REF (Pmode, label_rtx (n->code_label));
5566 if (i + tree_low_cst (minval, 0)
5567 == tree_low_cst (n->high, 0))
5568 break;
5569 i++;
5573 /* Fill in the gaps with the default. */
5574 for (i = 0; i < ncases; i++)
5575 if (labelvec[i] == 0)
5576 labelvec[i] = gen_rtx_LABEL_REF (Pmode, default_label);
5578 /* Output the table */
5579 emit_label (table_label);
5581 if (CASE_VECTOR_PC_RELATIVE || flag_pic)
5582 emit_jump_insn (gen_rtx_ADDR_DIFF_VEC (CASE_VECTOR_MODE,
5583 gen_rtx_LABEL_REF (Pmode, table_label),
5584 gen_rtvec_v (ncases, labelvec),
5585 const0_rtx, const0_rtx));
5586 else
5587 emit_jump_insn (gen_rtx_ADDR_VEC (CASE_VECTOR_MODE,
5588 gen_rtvec_v (ncases, labelvec)));
5590 /* If the case insn drops through the table,
5591 after the table we must jump to the default-label.
5592 Otherwise record no drop-through after the table. */
5593 #ifdef CASE_DROPS_THROUGH
5594 emit_jump (default_label);
5595 #else
5596 emit_barrier ();
5597 #endif
5600 before_case = NEXT_INSN (before_case);
5601 end = get_last_insn ();
5602 squeeze_notes (&before_case, &end);
5603 reorder_insns (before_case, end,
5604 thiscase->data.case_stmt.start);
5606 else
5607 end_cleanup_deferral ();
5609 if (thiscase->exit_label)
5610 emit_label (thiscase->exit_label);
5612 free_case_nodes (case_stack->data.case_stmt.case_list);
5613 POPSTACK (case_stack);
5615 free_temp_slots ();
5618 /* Convert the tree NODE into a list linked by the right field, with the left
5619 field zeroed. RIGHT is used for recursion; it is a list to be placed
5620 rightmost in the resulting list. */
5622 static struct case_node *
5623 case_tree2list (node, right)
5624 struct case_node *node, *right;
5626 struct case_node *left;
5628 if (node->right)
5629 right = case_tree2list (node->right, right);
5631 node->right = right;
5632 if ((left = node->left))
5634 node->left = 0;
5635 return case_tree2list (left, node);
5638 return node;
5641 /* Generate code to jump to LABEL if OP1 and OP2 are equal. */
5643 static void
5644 do_jump_if_equal (op1, op2, label, unsignedp)
5645 rtx op1, op2, label;
5646 int unsignedp;
5648 if (GET_CODE (op1) == CONST_INT
5649 && GET_CODE (op2) == CONST_INT)
5651 if (INTVAL (op1) == INTVAL (op2))
5652 emit_jump (label);
5654 else
5656 enum machine_mode mode = GET_MODE (op1);
5657 if (mode == VOIDmode)
5658 mode = GET_MODE (op2);
5659 emit_cmp_and_jump_insns (op1, op2, EQ, NULL_RTX, mode, unsignedp,
5660 0, label);
5664 /* Not all case values are encountered equally. This function
5665 uses a heuristic to weight case labels, in cases where that
5666 looks like a reasonable thing to do.
5668 Right now, all we try to guess is text, and we establish the
5669 following weights:
5671 chars above space: 16
5672 digits: 16
5673 default: 12
5674 space, punct: 8
5675 tab: 4
5676 newline: 2
5677 other "\" chars: 1
5678 remaining chars: 0
5680 If we find any cases in the switch that are not either -1 or in the range
5681 of valid ASCII characters, or are control characters other than those
5682 commonly used with "\", don't treat this switch scanning text.
5684 Return 1 if these nodes are suitable for cost estimation, otherwise
5685 return 0. */
5687 static int
5688 estimate_case_costs (node)
5689 case_node_ptr node;
5691 tree min_ascii = integer_minus_one_node;
5692 tree max_ascii = convert (TREE_TYPE (node->high), build_int_2 (127, 0));
5693 case_node_ptr n;
5694 int i;
5696 /* If we haven't already made the cost table, make it now. Note that the
5697 lower bound of the table is -1, not zero. */
5699 if (! cost_table_initialized)
5701 cost_table_initialized = 1;
5703 for (i = 0; i < 128; i++)
5705 if (ISALNUM (i))
5706 COST_TABLE (i) = 16;
5707 else if (ISPUNCT (i))
5708 COST_TABLE (i) = 8;
5709 else if (ISCNTRL (i))
5710 COST_TABLE (i) = -1;
5713 COST_TABLE (' ') = 8;
5714 COST_TABLE ('\t') = 4;
5715 COST_TABLE ('\0') = 4;
5716 COST_TABLE ('\n') = 2;
5717 COST_TABLE ('\f') = 1;
5718 COST_TABLE ('\v') = 1;
5719 COST_TABLE ('\b') = 1;
5722 /* See if all the case expressions look like text. It is text if the
5723 constant is >= -1 and the highest constant is <= 127. Do all comparisons
5724 as signed arithmetic since we don't want to ever access cost_table with a
5725 value less than -1. Also check that none of the constants in a range
5726 are strange control characters. */
5728 for (n = node; n; n = n->right)
5730 if ((INT_CST_LT (n->low, min_ascii)) || INT_CST_LT (max_ascii, n->high))
5731 return 0;
5733 for (i = (HOST_WIDE_INT) TREE_INT_CST_LOW (n->low);
5734 i <= (HOST_WIDE_INT) TREE_INT_CST_LOW (n->high); i++)
5735 if (COST_TABLE (i) < 0)
5736 return 0;
5739 /* All interesting values are within the range of interesting
5740 ASCII characters. */
5741 return 1;
5744 /* Scan an ordered list of case nodes
5745 combining those with consecutive values or ranges.
5747 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
5749 static void
5750 group_case_nodes (head)
5751 case_node_ptr head;
5753 case_node_ptr node = head;
5755 while (node)
5757 rtx lb = next_real_insn (label_rtx (node->code_label));
5758 rtx lb2;
5759 case_node_ptr np = node;
5761 /* Try to group the successors of NODE with NODE. */
5762 while (((np = np->right) != 0)
5763 /* Do they jump to the same place? */
5764 && ((lb2 = next_real_insn (label_rtx (np->code_label))) == lb
5765 || (lb != 0 && lb2 != 0
5766 && simplejump_p (lb)
5767 && simplejump_p (lb2)
5768 && rtx_equal_p (SET_SRC (PATTERN (lb)),
5769 SET_SRC (PATTERN (lb2)))))
5770 /* Are their ranges consecutive? */
5771 && tree_int_cst_equal (np->low,
5772 fold (build (PLUS_EXPR,
5773 TREE_TYPE (node->high),
5774 node->high,
5775 integer_one_node)))
5776 /* An overflow is not consecutive. */
5777 && tree_int_cst_lt (node->high,
5778 fold (build (PLUS_EXPR,
5779 TREE_TYPE (node->high),
5780 node->high,
5781 integer_one_node))))
5783 node->high = np->high;
5785 /* NP is the first node after NODE which can't be grouped with it.
5786 Delete the nodes in between, and move on to that node. */
5787 node->right = np;
5788 node = np;
5792 /* Take an ordered list of case nodes
5793 and transform them into a near optimal binary tree,
5794 on the assumption that any target code selection value is as
5795 likely as any other.
5797 The transformation is performed by splitting the ordered
5798 list into two equal sections plus a pivot. The parts are
5799 then attached to the pivot as left and right branches. Each
5800 branch is then transformed recursively. */
5802 static void
5803 balance_case_nodes (head, parent)
5804 case_node_ptr *head;
5805 case_node_ptr parent;
5807 case_node_ptr np;
5809 np = *head;
5810 if (np)
5812 int cost = 0;
5813 int i = 0;
5814 int ranges = 0;
5815 case_node_ptr *npp;
5816 case_node_ptr left;
5818 /* Count the number of entries on branch. Also count the ranges. */
5820 while (np)
5822 if (!tree_int_cst_equal (np->low, np->high))
5824 ranges++;
5825 if (use_cost_table)
5826 cost += COST_TABLE (TREE_INT_CST_LOW (np->high));
5829 if (use_cost_table)
5830 cost += COST_TABLE (TREE_INT_CST_LOW (np->low));
5832 i++;
5833 np = np->right;
5836 if (i > 2)
5838 /* Split this list if it is long enough for that to help. */
5839 npp = head;
5840 left = *npp;
5841 if (use_cost_table)
5843 /* Find the place in the list that bisects the list's total cost,
5844 Here I gets half the total cost. */
5845 int n_moved = 0;
5846 i = (cost + 1) / 2;
5847 while (1)
5849 /* Skip nodes while their cost does not reach that amount. */
5850 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5851 i -= COST_TABLE (TREE_INT_CST_LOW ((*npp)->high));
5852 i -= COST_TABLE (TREE_INT_CST_LOW ((*npp)->low));
5853 if (i <= 0)
5854 break;
5855 npp = &(*npp)->right;
5856 n_moved += 1;
5858 if (n_moved == 0)
5860 /* Leave this branch lopsided, but optimize left-hand
5861 side and fill in `parent' fields for right-hand side. */
5862 np = *head;
5863 np->parent = parent;
5864 balance_case_nodes (&np->left, np);
5865 for (; np->right; np = np->right)
5866 np->right->parent = np;
5867 return;
5870 /* If there are just three nodes, split at the middle one. */
5871 else if (i == 3)
5872 npp = &(*npp)->right;
5873 else
5875 /* Find the place in the list that bisects the list's total cost,
5876 where ranges count as 2.
5877 Here I gets half the total cost. */
5878 i = (i + ranges + 1) / 2;
5879 while (1)
5881 /* Skip nodes while their cost does not reach that amount. */
5882 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5883 i--;
5884 i--;
5885 if (i <= 0)
5886 break;
5887 npp = &(*npp)->right;
5890 *head = np = *npp;
5891 *npp = 0;
5892 np->parent = parent;
5893 np->left = left;
5895 /* Optimize each of the two split parts. */
5896 balance_case_nodes (&np->left, np);
5897 balance_case_nodes (&np->right, np);
5899 else
5901 /* Else leave this branch as one level,
5902 but fill in `parent' fields. */
5903 np = *head;
5904 np->parent = parent;
5905 for (; np->right; np = np->right)
5906 np->right->parent = np;
5911 /* Search the parent sections of the case node tree
5912 to see if a test for the lower bound of NODE would be redundant.
5913 INDEX_TYPE is the type of the index expression.
5915 The instructions to generate the case decision tree are
5916 output in the same order as nodes are processed so it is
5917 known that if a parent node checks the range of the current
5918 node minus one that the current node is bounded at its lower
5919 span. Thus the test would be redundant. */
5921 static int
5922 node_has_low_bound (node, index_type)
5923 case_node_ptr node;
5924 tree index_type;
5926 tree low_minus_one;
5927 case_node_ptr pnode;
5929 /* If the lower bound of this node is the lowest value in the index type,
5930 we need not test it. */
5932 if (tree_int_cst_equal (node->low, TYPE_MIN_VALUE (index_type)))
5933 return 1;
5935 /* If this node has a left branch, the value at the left must be less
5936 than that at this node, so it cannot be bounded at the bottom and
5937 we need not bother testing any further. */
5939 if (node->left)
5940 return 0;
5942 low_minus_one = fold (build (MINUS_EXPR, TREE_TYPE (node->low),
5943 node->low, integer_one_node));
5945 /* If the subtraction above overflowed, we can't verify anything.
5946 Otherwise, look for a parent that tests our value - 1. */
5948 if (! tree_int_cst_lt (low_minus_one, node->low))
5949 return 0;
5951 for (pnode = node->parent; pnode; pnode = pnode->parent)
5952 if (tree_int_cst_equal (low_minus_one, pnode->high))
5953 return 1;
5955 return 0;
5958 /* Search the parent sections of the case node tree
5959 to see if a test for the upper bound of NODE would be redundant.
5960 INDEX_TYPE is the type of the index expression.
5962 The instructions to generate the case decision tree are
5963 output in the same order as nodes are processed so it is
5964 known that if a parent node checks the range of the current
5965 node plus one that the current node is bounded at its upper
5966 span. Thus the test would be redundant. */
5968 static int
5969 node_has_high_bound (node, index_type)
5970 case_node_ptr node;
5971 tree index_type;
5973 tree high_plus_one;
5974 case_node_ptr pnode;
5976 /* If there is no upper bound, obviously no test is needed. */
5978 if (TYPE_MAX_VALUE (index_type) == NULL)
5979 return 1;
5981 /* If the upper bound of this node is the highest value in the type
5982 of the index expression, we need not test against it. */
5984 if (tree_int_cst_equal (node->high, TYPE_MAX_VALUE (index_type)))
5985 return 1;
5987 /* If this node has a right branch, the value at the right must be greater
5988 than that at this node, so it cannot be bounded at the top and
5989 we need not bother testing any further. */
5991 if (node->right)
5992 return 0;
5994 high_plus_one = fold (build (PLUS_EXPR, TREE_TYPE (node->high),
5995 node->high, integer_one_node));
5997 /* If the addition above overflowed, we can't verify anything.
5998 Otherwise, look for a parent that tests our value + 1. */
6000 if (! tree_int_cst_lt (node->high, high_plus_one))
6001 return 0;
6003 for (pnode = node->parent; pnode; pnode = pnode->parent)
6004 if (tree_int_cst_equal (high_plus_one, pnode->low))
6005 return 1;
6007 return 0;
6010 /* Search the parent sections of the
6011 case node tree to see if both tests for the upper and lower
6012 bounds of NODE would be redundant. */
6014 static int
6015 node_is_bounded (node, index_type)
6016 case_node_ptr node;
6017 tree index_type;
6019 return (node_has_low_bound (node, index_type)
6020 && node_has_high_bound (node, index_type));
6023 /* Emit an unconditional jump to LABEL unless it would be dead code. */
6025 static void
6026 emit_jump_if_reachable (label)
6027 rtx label;
6029 if (GET_CODE (get_last_insn ()) != BARRIER)
6030 emit_jump (label);
6033 /* Emit step-by-step code to select a case for the value of INDEX.
6034 The thus generated decision tree follows the form of the
6035 case-node binary tree NODE, whose nodes represent test conditions.
6036 INDEX_TYPE is the type of the index of the switch.
6038 Care is taken to prune redundant tests from the decision tree
6039 by detecting any boundary conditions already checked by
6040 emitted rtx. (See node_has_high_bound, node_has_low_bound
6041 and node_is_bounded, above.)
6043 Where the test conditions can be shown to be redundant we emit
6044 an unconditional jump to the target code. As a further
6045 optimization, the subordinates of a tree node are examined to
6046 check for bounded nodes. In this case conditional and/or
6047 unconditional jumps as a result of the boundary check for the
6048 current node are arranged to target the subordinates associated
6049 code for out of bound conditions on the current node.
6051 We can assume that when control reaches the code generated here,
6052 the index value has already been compared with the parents
6053 of this node, and determined to be on the same side of each parent
6054 as this node is. Thus, if this node tests for the value 51,
6055 and a parent tested for 52, we don't need to consider
6056 the possibility of a value greater than 51. If another parent
6057 tests for the value 50, then this node need not test anything. */
6059 static void
6060 emit_case_nodes (index, node, default_label, index_type)
6061 rtx index;
6062 case_node_ptr node;
6063 rtx default_label;
6064 tree index_type;
6066 /* If INDEX has an unsigned type, we must make unsigned branches. */
6067 int unsignedp = TREE_UNSIGNED (index_type);
6068 enum machine_mode mode = GET_MODE (index);
6069 enum machine_mode imode = TYPE_MODE (index_type);
6071 /* See if our parents have already tested everything for us.
6072 If they have, emit an unconditional jump for this node. */
6073 if (node_is_bounded (node, index_type))
6074 emit_jump (label_rtx (node->code_label));
6076 else if (tree_int_cst_equal (node->low, node->high))
6078 /* Node is single valued. First see if the index expression matches
6079 this node and then check our children, if any. */
6081 do_jump_if_equal (index,
6082 convert_modes (mode, imode,
6083 expand_expr (node->low, NULL_RTX,
6084 VOIDmode, 0),
6085 unsignedp),
6086 label_rtx (node->code_label), unsignedp);
6088 if (node->right != 0 && node->left != 0)
6090 /* This node has children on both sides.
6091 Dispatch to one side or the other
6092 by comparing the index value with this node's value.
6093 If one subtree is bounded, check that one first,
6094 so we can avoid real branches in the tree. */
6096 if (node_is_bounded (node->right, index_type))
6098 emit_cmp_and_jump_insns (index,
6099 convert_modes
6100 (mode, imode,
6101 expand_expr (node->high, NULL_RTX,
6102 VOIDmode, 0),
6103 unsignedp),
6104 GT, NULL_RTX, mode, unsignedp, 0,
6105 label_rtx (node->right->code_label));
6106 emit_case_nodes (index, node->left, default_label, index_type);
6109 else if (node_is_bounded (node->left, index_type))
6111 emit_cmp_and_jump_insns (index,
6112 convert_modes
6113 (mode, imode,
6114 expand_expr (node->high, NULL_RTX,
6115 VOIDmode, 0),
6116 unsignedp),
6117 LT, NULL_RTX, mode, unsignedp, 0,
6118 label_rtx (node->left->code_label));
6119 emit_case_nodes (index, node->right, default_label, index_type);
6122 else
6124 /* Neither node is bounded. First distinguish the two sides;
6125 then emit the code for one side at a time. */
6127 tree test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
6129 /* See if the value is on the right. */
6130 emit_cmp_and_jump_insns (index,
6131 convert_modes
6132 (mode, imode,
6133 expand_expr (node->high, NULL_RTX,
6134 VOIDmode, 0),
6135 unsignedp),
6136 GT, NULL_RTX, mode, unsignedp, 0,
6137 label_rtx (test_label));
6139 /* Value must be on the left.
6140 Handle the left-hand subtree. */
6141 emit_case_nodes (index, node->left, default_label, index_type);
6142 /* If left-hand subtree does nothing,
6143 go to default. */
6144 emit_jump_if_reachable (default_label);
6146 /* Code branches here for the right-hand subtree. */
6147 expand_label (test_label);
6148 emit_case_nodes (index, node->right, default_label, index_type);
6152 else if (node->right != 0 && node->left == 0)
6154 /* Here we have a right child but no left so we issue conditional
6155 branch to default and process the right child.
6157 Omit the conditional branch to default if we it avoid only one
6158 right child; it costs too much space to save so little time. */
6160 if (node->right->right || node->right->left
6161 || !tree_int_cst_equal (node->right->low, node->right->high))
6163 if (!node_has_low_bound (node, index_type))
6165 emit_cmp_and_jump_insns (index,
6166 convert_modes
6167 (mode, imode,
6168 expand_expr (node->high, NULL_RTX,
6169 VOIDmode, 0),
6170 unsignedp),
6171 LT, NULL_RTX, mode, unsignedp, 0,
6172 default_label);
6175 emit_case_nodes (index, node->right, default_label, index_type);
6177 else
6178 /* We cannot process node->right normally
6179 since we haven't ruled out the numbers less than
6180 this node's value. So handle node->right explicitly. */
6181 do_jump_if_equal (index,
6182 convert_modes
6183 (mode, imode,
6184 expand_expr (node->right->low, NULL_RTX,
6185 VOIDmode, 0),
6186 unsignedp),
6187 label_rtx (node->right->code_label), unsignedp);
6190 else if (node->right == 0 && node->left != 0)
6192 /* Just one subtree, on the left. */
6193 if (node->left->left || node->left->right
6194 || !tree_int_cst_equal (node->left->low, node->left->high))
6196 if (!node_has_high_bound (node, index_type))
6198 emit_cmp_and_jump_insns (index,
6199 convert_modes
6200 (mode, imode,
6201 expand_expr (node->high, NULL_RTX,
6202 VOIDmode, 0),
6203 unsignedp),
6204 GT, NULL_RTX, mode, unsignedp, 0,
6205 default_label);
6208 emit_case_nodes (index, node->left, default_label, index_type);
6210 else
6211 /* We cannot process node->left normally
6212 since we haven't ruled out the numbers less than
6213 this node's value. So handle node->left explicitly. */
6214 do_jump_if_equal (index,
6215 convert_modes
6216 (mode, imode,
6217 expand_expr (node->left->low, NULL_RTX,
6218 VOIDmode, 0),
6219 unsignedp),
6220 label_rtx (node->left->code_label), unsignedp);
6223 else
6225 /* Node is a range. These cases are very similar to those for a single
6226 value, except that we do not start by testing whether this node
6227 is the one to branch to. */
6229 if (node->right != 0 && node->left != 0)
6231 /* Node has subtrees on both sides.
6232 If the right-hand subtree is bounded,
6233 test for it first, since we can go straight there.
6234 Otherwise, we need to make a branch in the control structure,
6235 then handle the two subtrees. */
6236 tree test_label = 0;
6238 if (node_is_bounded (node->right, index_type))
6239 /* Right hand node is fully bounded so we can eliminate any
6240 testing and branch directly to the target code. */
6241 emit_cmp_and_jump_insns (index,
6242 convert_modes
6243 (mode, imode,
6244 expand_expr (node->high, NULL_RTX,
6245 VOIDmode, 0),
6246 unsignedp),
6247 GT, NULL_RTX, mode, unsignedp, 0,
6248 label_rtx (node->right->code_label));
6249 else
6251 /* Right hand node requires testing.
6252 Branch to a label where we will handle it later. */
6254 test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
6255 emit_cmp_and_jump_insns (index,
6256 convert_modes
6257 (mode, imode,
6258 expand_expr (node->high, NULL_RTX,
6259 VOIDmode, 0),
6260 unsignedp),
6261 GT, NULL_RTX, mode, unsignedp, 0,
6262 label_rtx (test_label));
6265 /* Value belongs to this node or to the left-hand subtree. */
6267 emit_cmp_and_jump_insns (index,
6268 convert_modes
6269 (mode, imode,
6270 expand_expr (node->low, NULL_RTX,
6271 VOIDmode, 0),
6272 unsignedp),
6273 GE, NULL_RTX, mode, unsignedp, 0,
6274 label_rtx (node->code_label));
6276 /* Handle the left-hand subtree. */
6277 emit_case_nodes (index, node->left, default_label, index_type);
6279 /* If right node had to be handled later, do that now. */
6281 if (test_label)
6283 /* If the left-hand subtree fell through,
6284 don't let it fall into the right-hand subtree. */
6285 emit_jump_if_reachable (default_label);
6287 expand_label (test_label);
6288 emit_case_nodes (index, node->right, default_label, index_type);
6292 else if (node->right != 0 && node->left == 0)
6294 /* Deal with values to the left of this node,
6295 if they are possible. */
6296 if (!node_has_low_bound (node, index_type))
6298 emit_cmp_and_jump_insns (index,
6299 convert_modes
6300 (mode, imode,
6301 expand_expr (node->low, NULL_RTX,
6302 VOIDmode, 0),
6303 unsignedp),
6304 LT, NULL_RTX, mode, unsignedp, 0,
6305 default_label);
6308 /* Value belongs to this node or to the right-hand subtree. */
6310 emit_cmp_and_jump_insns (index,
6311 convert_modes
6312 (mode, imode,
6313 expand_expr (node->high, NULL_RTX,
6314 VOIDmode, 0),
6315 unsignedp),
6316 LE, NULL_RTX, mode, unsignedp, 0,
6317 label_rtx (node->code_label));
6319 emit_case_nodes (index, node->right, default_label, index_type);
6322 else if (node->right == 0 && node->left != 0)
6324 /* Deal with values to the right of this node,
6325 if they are possible. */
6326 if (!node_has_high_bound (node, index_type))
6328 emit_cmp_and_jump_insns (index,
6329 convert_modes
6330 (mode, imode,
6331 expand_expr (node->high, NULL_RTX,
6332 VOIDmode, 0),
6333 unsignedp),
6334 GT, NULL_RTX, mode, unsignedp, 0,
6335 default_label);
6338 /* Value belongs to this node or to the left-hand subtree. */
6340 emit_cmp_and_jump_insns (index,
6341 convert_modes
6342 (mode, imode,
6343 expand_expr (node->low, NULL_RTX,
6344 VOIDmode, 0),
6345 unsignedp),
6346 GE, NULL_RTX, mode, unsignedp, 0,
6347 label_rtx (node->code_label));
6349 emit_case_nodes (index, node->left, default_label, index_type);
6352 else
6354 /* Node has no children so we check low and high bounds to remove
6355 redundant tests. Only one of the bounds can exist,
6356 since otherwise this node is bounded--a case tested already. */
6357 int high_bound = node_has_high_bound (node, index_type);
6358 int low_bound = node_has_low_bound (node, index_type);
6360 if (!high_bound && low_bound)
6362 emit_cmp_and_jump_insns (index,
6363 convert_modes
6364 (mode, imode,
6365 expand_expr (node->high, NULL_RTX,
6366 VOIDmode, 0),
6367 unsignedp),
6368 GT, NULL_RTX, mode, unsignedp, 0,
6369 default_label);
6372 else if (!low_bound && high_bound)
6374 emit_cmp_and_jump_insns (index,
6375 convert_modes
6376 (mode, imode,
6377 expand_expr (node->low, NULL_RTX,
6378 VOIDmode, 0),
6379 unsignedp),
6380 LT, NULL_RTX, mode, unsignedp, 0,
6381 default_label);
6383 else if (!low_bound && !high_bound)
6385 /* Widen LOW and HIGH to the same width as INDEX. */
6386 tree type = type_for_mode (mode, unsignedp);
6387 tree low = build1 (CONVERT_EXPR, type, node->low);
6388 tree high = build1 (CONVERT_EXPR, type, node->high);
6389 rtx low_rtx, new_index, new_bound;
6391 /* Instead of doing two branches, emit one unsigned branch for
6392 (index-low) > (high-low). */
6393 low_rtx = expand_expr (low, NULL_RTX, mode, 0);
6394 new_index = expand_simple_binop (mode, MINUS, index, low_rtx,
6395 NULL_RTX, unsignedp,
6396 OPTAB_WIDEN);
6397 new_bound = expand_expr (fold (build (MINUS_EXPR, type,
6398 high, low)),
6399 NULL_RTX, mode, 0);
6401 emit_cmp_and_jump_insns (new_index, new_bound, GT, NULL_RTX,
6402 mode, 1, 0, default_label);
6405 emit_jump (label_rtx (node->code_label));