* aclocal.m4 (gcc_AC_CHECK_DECL): Before attempting the test,
[official-gcc.git] / gcc / stmt.c
blob8465df7221c75e57064d7faffaccbe5c8435004e
1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
22 /* This file handles the generation of rtl code from tree structure
23 above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
24 It also creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
27 The functions whose names start with `expand_' are called by the
28 parser to generate RTL instructions for various kinds of constructs.
30 Some control and binding constructs require calling several such
31 functions at different times. For example, a simple if-then
32 is expanded by calling `expand_start_cond' (with the condition-expression
33 as argument) before parsing the then-clause and calling `expand_end_cond'
34 after parsing the then-clause. */
36 #include "config.h"
37 #include "system.h"
39 #include "rtl.h"
40 #include "tree.h"
41 #include "tm_p.h"
42 #include "flags.h"
43 #include "except.h"
44 #include "function.h"
45 #include "insn-flags.h"
46 #include "insn-config.h"
47 #include "insn-codes.h"
48 #include "expr.h"
49 #include "hard-reg-set.h"
50 #include "obstack.h"
51 #include "loop.h"
52 #include "recog.h"
53 #include "machmode.h"
54 #include "toplev.h"
55 #include "output.h"
56 #include "ggc.h"
58 #define obstack_chunk_alloc xmalloc
59 #define obstack_chunk_free free
60 struct obstack stmt_obstack;
62 /* Assume that case vectors are not pc-relative. */
63 #ifndef CASE_VECTOR_PC_RELATIVE
64 #define CASE_VECTOR_PC_RELATIVE 0
65 #endif
67 /* Functions and data structures for expanding case statements. */
69 /* Case label structure, used to hold info on labels within case
70 statements. We handle "range" labels; for a single-value label
71 as in C, the high and low limits are the same.
73 An AVL tree of case nodes is initially created, and later transformed
74 to a list linked via the RIGHT fields in the nodes. Nodes with
75 higher case values are later in the list.
77 Switch statements can be output in one of two forms. A branch table
78 is used if there are more than a few labels and the labels are dense
79 within the range between the smallest and largest case value. If a
80 branch table is used, no further manipulations are done with the case
81 node chain.
83 The alternative to the use of a branch table is to generate a series
84 of compare and jump insns. When that is done, we use the LEFT, RIGHT,
85 and PARENT fields to hold a binary tree. Initially the tree is
86 totally unbalanced, with everything on the right. We balance the tree
87 with nodes on the left having lower case values than the parent
88 and nodes on the right having higher values. We then output the tree
89 in order. */
91 struct case_node
93 struct case_node *left; /* Left son in binary tree */
94 struct case_node *right; /* Right son in binary tree; also node chain */
95 struct case_node *parent; /* Parent of node in binary tree */
96 tree low; /* Lowest index value for this label */
97 tree high; /* Highest index value for this label */
98 tree code_label; /* Label to jump to when node matches */
99 int balance;
102 typedef struct case_node case_node;
103 typedef struct case_node *case_node_ptr;
105 /* These are used by estimate_case_costs and balance_case_nodes. */
107 /* This must be a signed type, and non-ANSI compilers lack signed char. */
108 static short cost_table_[129];
109 static int use_cost_table;
110 static int cost_table_initialized;
112 /* Special care is needed because we allow -1, but TREE_INT_CST_LOW
113 is unsigned. */
114 #define COST_TABLE(I) cost_table_[(unsigned HOST_WIDE_INT)((I) + 1)]
116 /* Stack of control and binding constructs we are currently inside.
118 These constructs begin when you call `expand_start_WHATEVER'
119 and end when you call `expand_end_WHATEVER'. This stack records
120 info about how the construct began that tells the end-function
121 what to do. It also may provide information about the construct
122 to alter the behavior of other constructs within the body.
123 For example, they may affect the behavior of C `break' and `continue'.
125 Each construct gets one `struct nesting' object.
126 All of these objects are chained through the `all' field.
127 `nesting_stack' points to the first object (innermost construct).
128 The position of an entry on `nesting_stack' is in its `depth' field.
130 Each type of construct has its own individual stack.
131 For example, loops have `loop_stack'. Each object points to the
132 next object of the same type through the `next' field.
134 Some constructs are visible to `break' exit-statements and others
135 are not. Which constructs are visible depends on the language.
136 Therefore, the data structure allows each construct to be visible
137 or not, according to the args given when the construct is started.
138 The construct is visible if the `exit_label' field is non-null.
139 In that case, the value should be a CODE_LABEL rtx. */
141 struct nesting
143 struct nesting *all;
144 struct nesting *next;
145 int depth;
146 rtx exit_label;
147 union
149 /* For conds (if-then and if-then-else statements). */
150 struct
152 /* Label for the end of the if construct.
153 There is none if EXITFLAG was not set
154 and no `else' has been seen yet. */
155 rtx endif_label;
156 /* Label for the end of this alternative.
157 This may be the end of the if or the next else/elseif. */
158 rtx next_label;
159 } cond;
160 /* For loops. */
161 struct
163 /* Label at the top of the loop; place to loop back to. */
164 rtx start_label;
165 /* Label at the end of the whole construct. */
166 rtx end_label;
167 /* Label before a jump that branches to the end of the whole
168 construct. This is where destructors go if any. */
169 rtx alt_end_label;
170 /* Label for `continue' statement to jump to;
171 this is in front of the stepper of the loop. */
172 rtx continue_label;
173 } loop;
174 /* For variable binding contours. */
175 struct
177 /* Sequence number of this binding contour within the function,
178 in order of entry. */
179 int block_start_count;
180 /* Nonzero => value to restore stack to on exit. */
181 rtx stack_level;
182 /* The NOTE that starts this contour.
183 Used by expand_goto to check whether the destination
184 is within each contour or not. */
185 rtx first_insn;
186 /* Innermost containing binding contour that has a stack level. */
187 struct nesting *innermost_stack_block;
188 /* List of cleanups to be run on exit from this contour.
189 This is a list of expressions to be evaluated.
190 The TREE_PURPOSE of each link is the ..._DECL node
191 which the cleanup pertains to. */
192 tree cleanups;
193 /* List of cleanup-lists of blocks containing this block,
194 as they were at the locus where this block appears.
195 There is an element for each containing block,
196 ordered innermost containing block first.
197 The tail of this list can be 0,
198 if all remaining elements would be empty lists.
199 The element's TREE_VALUE is the cleanup-list of that block,
200 which may be null. */
201 tree outer_cleanups;
202 /* Chain of labels defined inside this binding contour.
203 For contours that have stack levels or cleanups. */
204 struct label_chain *label_chain;
205 /* Number of function calls seen, as of start of this block. */
206 int n_function_calls;
207 /* Nonzero if this is associated with a EH region. */
208 int exception_region;
209 /* The saved target_temp_slot_level from our outer block.
210 We may reset target_temp_slot_level to be the level of
211 this block, if that is done, target_temp_slot_level
212 reverts to the saved target_temp_slot_level at the very
213 end of the block. */
214 int block_target_temp_slot_level;
215 /* True if we are currently emitting insns in an area of
216 output code that is controlled by a conditional
217 expression. This is used by the cleanup handling code to
218 generate conditional cleanup actions. */
219 int conditional_code;
220 /* A place to move the start of the exception region for any
221 of the conditional cleanups, must be at the end or after
222 the start of the last unconditional cleanup, and before any
223 conditional branch points. */
224 rtx last_unconditional_cleanup;
225 /* When in a conditional context, this is the specific
226 cleanup list associated with last_unconditional_cleanup,
227 where we place the conditionalized cleanups. */
228 tree *cleanup_ptr;
229 } block;
230 /* For switch (C) or case (Pascal) statements,
231 and also for dummies (see `expand_start_case_dummy'). */
232 struct
234 /* The insn after which the case dispatch should finally
235 be emitted. Zero for a dummy. */
236 rtx start;
237 /* A list of case labels; it is first built as an AVL tree.
238 During expand_end_case, this is converted to a list, and may be
239 rearranged into a nearly balanced binary tree. */
240 struct case_node *case_list;
241 /* Label to jump to if no case matches. */
242 tree default_label;
243 /* The expression to be dispatched on. */
244 tree index_expr;
245 /* Type that INDEX_EXPR should be converted to. */
246 tree nominal_type;
247 /* Name of this kind of statement, for warnings. */
248 const char *printname;
249 /* Used to save no_line_numbers till we see the first case label.
250 We set this to -1 when we see the first case label in this
251 case statement. */
252 int line_number_status;
253 } case_stmt;
254 } data;
257 /* Allocate and return a new `struct nesting'. */
259 #define ALLOC_NESTING() \
260 (struct nesting *) obstack_alloc (&stmt_obstack, sizeof (struct nesting))
262 /* Pop the nesting stack element by element until we pop off
263 the element which is at the top of STACK.
264 Update all the other stacks, popping off elements from them
265 as we pop them from nesting_stack. */
267 #define POPSTACK(STACK) \
268 do { struct nesting *target = STACK; \
269 struct nesting *this; \
270 do { this = nesting_stack; \
271 if (loop_stack == this) \
272 loop_stack = loop_stack->next; \
273 if (cond_stack == this) \
274 cond_stack = cond_stack->next; \
275 if (block_stack == this) \
276 block_stack = block_stack->next; \
277 if (stack_block_stack == this) \
278 stack_block_stack = stack_block_stack->next; \
279 if (case_stack == this) \
280 case_stack = case_stack->next; \
281 nesting_depth = nesting_stack->depth - 1; \
282 nesting_stack = this->all; \
283 obstack_free (&stmt_obstack, this); } \
284 while (this != target); } while (0)
286 /* In some cases it is impossible to generate code for a forward goto
287 until the label definition is seen. This happens when it may be necessary
288 for the goto to reset the stack pointer: we don't yet know how to do that.
289 So expand_goto puts an entry on this fixup list.
290 Each time a binding contour that resets the stack is exited,
291 we check each fixup.
292 If the target label has now been defined, we can insert the proper code. */
294 struct goto_fixup
296 /* Points to following fixup. */
297 struct goto_fixup *next;
298 /* Points to the insn before the jump insn.
299 If more code must be inserted, it goes after this insn. */
300 rtx before_jump;
301 /* The LABEL_DECL that this jump is jumping to, or 0
302 for break, continue or return. */
303 tree target;
304 /* The BLOCK for the place where this goto was found. */
305 tree context;
306 /* The CODE_LABEL rtx that this is jumping to. */
307 rtx target_rtl;
308 /* Number of binding contours started in current function
309 before the label reference. */
310 int block_start_count;
311 /* The outermost stack level that should be restored for this jump.
312 Each time a binding contour that resets the stack is exited,
313 if the target label is *not* yet defined, this slot is updated. */
314 rtx stack_level;
315 /* List of lists of cleanup expressions to be run by this goto.
316 There is one element for each block that this goto is within.
317 The tail of this list can be 0,
318 if all remaining elements would be empty.
319 The TREE_VALUE contains the cleanup list of that block as of the
320 time this goto was seen.
321 The TREE_ADDRESSABLE flag is 1 for a block that has been exited. */
322 tree cleanup_list_list;
325 /* Within any binding contour that must restore a stack level,
326 all labels are recorded with a chain of these structures. */
328 struct label_chain
330 /* Points to following fixup. */
331 struct label_chain *next;
332 tree label;
335 struct stmt_status
337 /* Chain of all pending binding contours. */
338 struct nesting *x_block_stack;
340 /* If any new stacks are added here, add them to POPSTACKS too. */
342 /* Chain of all pending binding contours that restore stack levels
343 or have cleanups. */
344 struct nesting *x_stack_block_stack;
346 /* Chain of all pending conditional statements. */
347 struct nesting *x_cond_stack;
349 /* Chain of all pending loops. */
350 struct nesting *x_loop_stack;
352 /* Chain of all pending case or switch statements. */
353 struct nesting *x_case_stack;
355 /* Separate chain including all of the above,
356 chained through the `all' field. */
357 struct nesting *x_nesting_stack;
359 /* Number of entries on nesting_stack now. */
360 int x_nesting_depth;
362 /* Number of binding contours started so far in this function. */
363 int x_block_start_count;
365 /* Each time we expand an expression-statement,
366 record the expr's type and its RTL value here. */
367 tree x_last_expr_type;
368 rtx x_last_expr_value;
370 /* Nonzero if within a ({...}) grouping, in which case we must
371 always compute a value for each expr-stmt in case it is the last one. */
372 int x_expr_stmts_for_value;
374 /* Filename and line number of last line-number note,
375 whether we actually emitted it or not. */
376 const char *x_emit_filename;
377 int x_emit_lineno;
379 struct goto_fixup *x_goto_fixup_chain;
382 #define block_stack (cfun->stmt->x_block_stack)
383 #define stack_block_stack (cfun->stmt->x_stack_block_stack)
384 #define cond_stack (cfun->stmt->x_cond_stack)
385 #define loop_stack (cfun->stmt->x_loop_stack)
386 #define case_stack (cfun->stmt->x_case_stack)
387 #define nesting_stack (cfun->stmt->x_nesting_stack)
388 #define nesting_depth (cfun->stmt->x_nesting_depth)
389 #define current_block_start_count (cfun->stmt->x_block_start_count)
390 #define last_expr_type (cfun->stmt->x_last_expr_type)
391 #define last_expr_value (cfun->stmt->x_last_expr_value)
392 #define expr_stmts_for_value (cfun->stmt->x_expr_stmts_for_value)
393 #define emit_filename (cfun->stmt->x_emit_filename)
394 #define emit_lineno (cfun->stmt->x_emit_lineno)
395 #define goto_fixup_chain (cfun->stmt->x_goto_fixup_chain)
397 /* Non-zero if we are using EH to handle cleanus. */
398 static int using_eh_for_cleanups_p = 0;
400 static int n_occurrences PARAMS ((int, const char *));
401 static void expand_goto_internal PARAMS ((tree, rtx, rtx));
402 static int expand_fixup PARAMS ((tree, rtx, rtx));
403 static rtx expand_nl_handler_label PARAMS ((rtx, rtx));
404 static void expand_nl_goto_receiver PARAMS ((void));
405 static void expand_nl_goto_receivers PARAMS ((struct nesting *));
406 static void fixup_gotos PARAMS ((struct nesting *, rtx, tree,
407 rtx, int));
408 static void expand_null_return_1 PARAMS ((rtx, int));
409 static void expand_value_return PARAMS ((rtx));
410 static int tail_recursion_args PARAMS ((tree, tree));
411 static void expand_cleanups PARAMS ((tree, tree, int, int));
412 static void check_seenlabel PARAMS ((void));
413 static void do_jump_if_equal PARAMS ((rtx, rtx, rtx, int));
414 static int estimate_case_costs PARAMS ((case_node_ptr));
415 static void group_case_nodes PARAMS ((case_node_ptr));
416 static void balance_case_nodes PARAMS ((case_node_ptr *,
417 case_node_ptr));
418 static int node_has_low_bound PARAMS ((case_node_ptr, tree));
419 static int node_has_high_bound PARAMS ((case_node_ptr, tree));
420 static int node_is_bounded PARAMS ((case_node_ptr, tree));
421 static void emit_jump_if_reachable PARAMS ((rtx));
422 static void emit_case_nodes PARAMS ((rtx, case_node_ptr, rtx, tree));
423 static struct case_node *case_tree2list PARAMS ((case_node *, case_node *));
424 static void mark_cond_nesting PARAMS ((struct nesting *));
425 static void mark_loop_nesting PARAMS ((struct nesting *));
426 static void mark_block_nesting PARAMS ((struct nesting *));
427 static void mark_case_nesting PARAMS ((struct nesting *));
428 static void mark_case_node PARAMS ((struct case_node *));
429 static void mark_goto_fixup PARAMS ((struct goto_fixup *));
430 static void free_case_nodes PARAMS ((case_node_ptr));
432 void
433 using_eh_for_cleanups ()
435 using_eh_for_cleanups_p = 1;
438 /* Mark N (known to be a cond-nesting) for GC. */
440 static void
441 mark_cond_nesting (n)
442 struct nesting *n;
444 while (n)
446 ggc_mark_rtx (n->exit_label);
447 ggc_mark_rtx (n->data.cond.endif_label);
448 ggc_mark_rtx (n->data.cond.next_label);
450 n = n->next;
454 /* Mark N (known to be a loop-nesting) for GC. */
456 static void
457 mark_loop_nesting (n)
458 struct nesting *n;
461 while (n)
463 ggc_mark_rtx (n->exit_label);
464 ggc_mark_rtx (n->data.loop.start_label);
465 ggc_mark_rtx (n->data.loop.end_label);
466 ggc_mark_rtx (n->data.loop.alt_end_label);
467 ggc_mark_rtx (n->data.loop.continue_label);
469 n = n->next;
473 /* Mark N (known to be a block-nesting) for GC. */
475 static void
476 mark_block_nesting (n)
477 struct nesting *n;
479 while (n)
481 struct label_chain *l;
483 ggc_mark_rtx (n->exit_label);
484 ggc_mark_rtx (n->data.block.stack_level);
485 ggc_mark_rtx (n->data.block.first_insn);
486 ggc_mark_tree (n->data.block.cleanups);
487 ggc_mark_tree (n->data.block.outer_cleanups);
489 for (l = n->data.block.label_chain; l != NULL; l = l->next)
491 ggc_mark (l);
492 ggc_mark_tree (l->label);
495 ggc_mark_rtx (n->data.block.last_unconditional_cleanup);
497 /* ??? cleanup_ptr never points outside the stack, does it? */
499 n = n->next;
503 /* Mark N (known to be a case-nesting) for GC. */
505 static void
506 mark_case_nesting (n)
507 struct nesting *n;
509 while (n)
511 ggc_mark_rtx (n->exit_label);
512 ggc_mark_rtx (n->data.case_stmt.start);
514 ggc_mark_tree (n->data.case_stmt.default_label);
515 ggc_mark_tree (n->data.case_stmt.index_expr);
516 ggc_mark_tree (n->data.case_stmt.nominal_type);
518 mark_case_node (n->data.case_stmt.case_list);
519 n = n->next;
523 /* Mark C for GC. */
525 static void
526 mark_case_node (c)
527 struct case_node *c;
529 if (c != 0)
531 ggc_mark_tree (c->low);
532 ggc_mark_tree (c->high);
533 ggc_mark_tree (c->code_label);
535 mark_case_node (c->right);
536 mark_case_node (c->left);
540 /* Mark G for GC. */
542 static void
543 mark_goto_fixup (g)
544 struct goto_fixup *g;
546 while (g)
548 ggc_mark (g);
549 ggc_mark_rtx (g->before_jump);
550 ggc_mark_tree (g->target);
551 ggc_mark_tree (g->context);
552 ggc_mark_rtx (g->target_rtl);
553 ggc_mark_rtx (g->stack_level);
554 ggc_mark_tree (g->cleanup_list_list);
556 g = g->next;
560 /* Clear out all parts of the state in F that can safely be discarded
561 after the function has been compiled, to let garbage collection
562 reclaim the memory. */
564 void
565 free_stmt_status (f)
566 struct function *f;
568 /* We're about to free the function obstack. If we hold pointers to
569 things allocated there, then we'll try to mark them when we do
570 GC. So, we clear them out here explicitly. */
571 if (f->stmt)
572 free (f->stmt);
573 f->stmt = NULL;
576 /* Mark P for GC. */
578 void
579 mark_stmt_status (p)
580 struct stmt_status *p;
582 if (p == 0)
583 return;
585 mark_block_nesting (p->x_block_stack);
586 mark_cond_nesting (p->x_cond_stack);
587 mark_loop_nesting (p->x_loop_stack);
588 mark_case_nesting (p->x_case_stack);
590 ggc_mark_tree (p->x_last_expr_type);
591 /* last_epxr_value is only valid if last_expr_type is nonzero. */
592 if (p->x_last_expr_type)
593 ggc_mark_rtx (p->x_last_expr_value);
595 mark_goto_fixup (p->x_goto_fixup_chain);
598 void
599 init_stmt ()
601 gcc_obstack_init (&stmt_obstack);
604 void
605 init_stmt_for_function ()
607 cfun->stmt = (struct stmt_status *) xmalloc (sizeof (struct stmt_status));
609 /* We are not currently within any block, conditional, loop or case. */
610 block_stack = 0;
611 stack_block_stack = 0;
612 loop_stack = 0;
613 case_stack = 0;
614 cond_stack = 0;
615 nesting_stack = 0;
616 nesting_depth = 0;
618 current_block_start_count = 0;
620 /* No gotos have been expanded yet. */
621 goto_fixup_chain = 0;
623 /* We are not processing a ({...}) grouping. */
624 expr_stmts_for_value = 0;
625 last_expr_type = 0;
626 last_expr_value = NULL_RTX;
629 /* Return nonzero if anything is pushed on the loop, condition, or case
630 stack. */
632 in_control_zone_p ()
634 return cond_stack || loop_stack || case_stack;
637 /* Record the current file and line. Called from emit_line_note. */
638 void
639 set_file_and_line_for_stmt (file, line)
640 const char *file;
641 int line;
643 /* If we're outputting an inline function, and we add a line note,
644 there may be no CFUN->STMT information. So, there's no need to
645 update it. */
646 if (cfun->stmt)
648 emit_filename = file;
649 emit_lineno = line;
653 /* Emit a no-op instruction. */
655 void
656 emit_nop ()
658 rtx last_insn;
660 last_insn = get_last_insn ();
661 if (!optimize
662 && (GET_CODE (last_insn) == CODE_LABEL
663 || (GET_CODE (last_insn) == NOTE
664 && prev_real_insn (last_insn) == 0)))
665 emit_insn (gen_nop ());
668 /* Return the rtx-label that corresponds to a LABEL_DECL,
669 creating it if necessary. */
672 label_rtx (label)
673 tree label;
675 if (TREE_CODE (label) != LABEL_DECL)
676 abort ();
678 if (DECL_RTL (label))
679 return DECL_RTL (label);
681 return DECL_RTL (label) = gen_label_rtx ();
684 /* Add an unconditional jump to LABEL as the next sequential instruction. */
686 void
687 emit_jump (label)
688 rtx label;
690 do_pending_stack_adjust ();
691 emit_jump_insn (gen_jump (label));
692 emit_barrier ();
695 /* Emit code to jump to the address
696 specified by the pointer expression EXP. */
698 void
699 expand_computed_goto (exp)
700 tree exp;
702 rtx x = expand_expr (exp, NULL_RTX, VOIDmode, 0);
704 #ifdef POINTERS_EXTEND_UNSIGNED
705 x = convert_memory_address (Pmode, x);
706 #endif
708 emit_queue ();
709 /* Be sure the function is executable. */
710 if (current_function_check_memory_usage)
711 emit_library_call (chkr_check_exec_libfunc, LCT_CONST_MAKE_BLOCK,
712 VOIDmode, 1, x, ptr_mode);
714 do_pending_stack_adjust ();
715 emit_indirect_jump (x);
717 current_function_has_computed_jump = 1;
720 /* Handle goto statements and the labels that they can go to. */
722 /* Specify the location in the RTL code of a label LABEL,
723 which is a LABEL_DECL tree node.
725 This is used for the kind of label that the user can jump to with a
726 goto statement, and for alternatives of a switch or case statement.
727 RTL labels generated for loops and conditionals don't go through here;
728 they are generated directly at the RTL level, by other functions below.
730 Note that this has nothing to do with defining label *names*.
731 Languages vary in how they do that and what that even means. */
733 void
734 expand_label (label)
735 tree label;
737 struct label_chain *p;
739 do_pending_stack_adjust ();
740 emit_label (label_rtx (label));
741 if (DECL_NAME (label))
742 LABEL_NAME (DECL_RTL (label)) = IDENTIFIER_POINTER (DECL_NAME (label));
744 if (stack_block_stack != 0)
746 p = (struct label_chain *) ggc_alloc (sizeof (struct label_chain));
747 p->next = stack_block_stack->data.block.label_chain;
748 stack_block_stack->data.block.label_chain = p;
749 p->label = label;
753 /* Declare that LABEL (a LABEL_DECL) may be used for nonlocal gotos
754 from nested functions. */
756 void
757 declare_nonlocal_label (label)
758 tree label;
760 rtx slot = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
762 nonlocal_labels = tree_cons (NULL_TREE, label, nonlocal_labels);
763 LABEL_PRESERVE_P (label_rtx (label)) = 1;
764 if (nonlocal_goto_handler_slots == 0)
766 emit_stack_save (SAVE_NONLOCAL,
767 &nonlocal_goto_stack_level,
768 PREV_INSN (tail_recursion_reentry));
770 nonlocal_goto_handler_slots
771 = gen_rtx_EXPR_LIST (VOIDmode, slot, nonlocal_goto_handler_slots);
774 /* Generate RTL code for a `goto' statement with target label LABEL.
775 LABEL should be a LABEL_DECL tree node that was or will later be
776 defined with `expand_label'. */
778 void
779 expand_goto (label)
780 tree label;
782 tree context;
784 /* Check for a nonlocal goto to a containing function. */
785 context = decl_function_context (label);
786 if (context != 0 && context != current_function_decl)
788 struct function *p = find_function_data (context);
789 rtx label_ref = gen_rtx_LABEL_REF (Pmode, label_rtx (label));
790 rtx handler_slot, static_chain, save_area, insn;
791 tree link;
793 /* Find the corresponding handler slot for this label. */
794 handler_slot = p->x_nonlocal_goto_handler_slots;
795 for (link = p->x_nonlocal_labels; TREE_VALUE (link) != label;
796 link = TREE_CHAIN (link))
797 handler_slot = XEXP (handler_slot, 1);
798 handler_slot = XEXP (handler_slot, 0);
800 p->has_nonlocal_label = 1;
801 current_function_has_nonlocal_goto = 1;
802 LABEL_REF_NONLOCAL_P (label_ref) = 1;
804 /* Copy the rtl for the slots so that they won't be shared in
805 case the virtual stack vars register gets instantiated differently
806 in the parent than in the child. */
808 static_chain = copy_to_reg (lookup_static_chain (label));
810 /* Get addr of containing function's current nonlocal goto handler,
811 which will do any cleanups and then jump to the label. */
812 handler_slot = copy_to_reg (replace_rtx (copy_rtx (handler_slot),
813 virtual_stack_vars_rtx,
814 static_chain));
816 /* Get addr of containing function's nonlocal save area. */
817 save_area = p->x_nonlocal_goto_stack_level;
818 if (save_area)
819 save_area = replace_rtx (copy_rtx (save_area),
820 virtual_stack_vars_rtx, static_chain);
822 #if HAVE_nonlocal_goto
823 if (HAVE_nonlocal_goto)
824 emit_insn (gen_nonlocal_goto (static_chain, handler_slot,
825 save_area, label_ref));
826 else
827 #endif
829 /* Restore frame pointer for containing function.
830 This sets the actual hard register used for the frame pointer
831 to the location of the function's incoming static chain info.
832 The non-local goto handler will then adjust it to contain the
833 proper value and reload the argument pointer, if needed. */
834 emit_move_insn (hard_frame_pointer_rtx, static_chain);
835 emit_stack_restore (SAVE_NONLOCAL, save_area, NULL_RTX);
837 /* USE of hard_frame_pointer_rtx added for consistency;
838 not clear if really needed. */
839 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
840 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
841 emit_indirect_jump (handler_slot);
844 /* Search backwards to the jump insn and mark it as a
845 non-local goto. */
846 for (insn = get_last_insn ();
847 GET_CODE (insn) != JUMP_INSN;
848 insn = PREV_INSN (insn))
849 continue;
850 REG_NOTES (insn) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO, const0_rtx,
851 REG_NOTES (insn));
853 else
854 expand_goto_internal (label, label_rtx (label), NULL_RTX);
857 /* Generate RTL code for a `goto' statement with target label BODY.
858 LABEL should be a LABEL_REF.
859 LAST_INSN, if non-0, is the rtx we should consider as the last
860 insn emitted (for the purposes of cleaning up a return). */
862 static void
863 expand_goto_internal (body, label, last_insn)
864 tree body;
865 rtx label;
866 rtx last_insn;
868 struct nesting *block;
869 rtx stack_level = 0;
871 if (GET_CODE (label) != CODE_LABEL)
872 abort ();
874 /* If label has already been defined, we can tell now
875 whether and how we must alter the stack level. */
877 if (PREV_INSN (label) != 0)
879 /* Find the innermost pending block that contains the label.
880 (Check containment by comparing insn-uids.)
881 Then restore the outermost stack level within that block,
882 and do cleanups of all blocks contained in it. */
883 for (block = block_stack; block; block = block->next)
885 if (INSN_UID (block->data.block.first_insn) < INSN_UID (label))
886 break;
887 if (block->data.block.stack_level != 0)
888 stack_level = block->data.block.stack_level;
889 /* Execute the cleanups for blocks we are exiting. */
890 if (block->data.block.cleanups != 0)
892 expand_cleanups (block->data.block.cleanups, NULL_TREE, 1, 1);
893 do_pending_stack_adjust ();
897 if (stack_level)
899 /* Ensure stack adjust isn't done by emit_jump, as this
900 would clobber the stack pointer. This one should be
901 deleted as dead by flow. */
902 clear_pending_stack_adjust ();
903 do_pending_stack_adjust ();
905 /* Don't do this adjust if it's to the end label and this function
906 is to return with a depressed stack pointer. */
907 if (label == return_label
908 && (((TREE_CODE (TREE_TYPE (current_function_decl))
909 == FUNCTION_TYPE)
910 && (TYPE_RETURNS_STACK_DEPRESSED
911 (TREE_TYPE (current_function_decl))))))
913 else
914 emit_stack_restore (SAVE_BLOCK, stack_level, NULL_RTX);
917 if (body != 0 && DECL_TOO_LATE (body))
918 error ("jump to `%s' invalidly jumps into binding contour",
919 IDENTIFIER_POINTER (DECL_NAME (body)));
921 /* Label not yet defined: may need to put this goto
922 on the fixup list. */
923 else if (! expand_fixup (body, label, last_insn))
925 /* No fixup needed. Record that the label is the target
926 of at least one goto that has no fixup. */
927 if (body != 0)
928 TREE_ADDRESSABLE (body) = 1;
931 emit_jump (label);
934 /* Generate if necessary a fixup for a goto
935 whose target label in tree structure (if any) is TREE_LABEL
936 and whose target in rtl is RTL_LABEL.
938 If LAST_INSN is nonzero, we pretend that the jump appears
939 after insn LAST_INSN instead of at the current point in the insn stream.
941 The fixup will be used later to insert insns just before the goto.
942 Those insns will restore the stack level as appropriate for the
943 target label, and will (in the case of C++) also invoke any object
944 destructors which have to be invoked when we exit the scopes which
945 are exited by the goto.
947 Value is nonzero if a fixup is made. */
949 static int
950 expand_fixup (tree_label, rtl_label, last_insn)
951 tree tree_label;
952 rtx rtl_label;
953 rtx last_insn;
955 struct nesting *block, *end_block;
957 /* See if we can recognize which block the label will be output in.
958 This is possible in some very common cases.
959 If we succeed, set END_BLOCK to that block.
960 Otherwise, set it to 0. */
962 if (cond_stack
963 && (rtl_label == cond_stack->data.cond.endif_label
964 || rtl_label == cond_stack->data.cond.next_label))
965 end_block = cond_stack;
966 /* If we are in a loop, recognize certain labels which
967 are likely targets. This reduces the number of fixups
968 we need to create. */
969 else if (loop_stack
970 && (rtl_label == loop_stack->data.loop.start_label
971 || rtl_label == loop_stack->data.loop.end_label
972 || rtl_label == loop_stack->data.loop.continue_label))
973 end_block = loop_stack;
974 else
975 end_block = 0;
977 /* Now set END_BLOCK to the binding level to which we will return. */
979 if (end_block)
981 struct nesting *next_block = end_block->all;
982 block = block_stack;
984 /* First see if the END_BLOCK is inside the innermost binding level.
985 If so, then no cleanups or stack levels are relevant. */
986 while (next_block && next_block != block)
987 next_block = next_block->all;
989 if (next_block)
990 return 0;
992 /* Otherwise, set END_BLOCK to the innermost binding level
993 which is outside the relevant control-structure nesting. */
994 next_block = block_stack->next;
995 for (block = block_stack; block != end_block; block = block->all)
996 if (block == next_block)
997 next_block = next_block->next;
998 end_block = next_block;
1001 /* Does any containing block have a stack level or cleanups?
1002 If not, no fixup is needed, and that is the normal case
1003 (the only case, for standard C). */
1004 for (block = block_stack; block != end_block; block = block->next)
1005 if (block->data.block.stack_level != 0
1006 || block->data.block.cleanups != 0)
1007 break;
1009 if (block != end_block)
1011 /* Ok, a fixup is needed. Add a fixup to the list of such. */
1012 struct goto_fixup *fixup
1013 = (struct goto_fixup *) ggc_alloc (sizeof (struct goto_fixup));
1014 /* In case an old stack level is restored, make sure that comes
1015 after any pending stack adjust. */
1016 /* ?? If the fixup isn't to come at the present position,
1017 doing the stack adjust here isn't useful. Doing it with our
1018 settings at that location isn't useful either. Let's hope
1019 someone does it! */
1020 if (last_insn == 0)
1021 do_pending_stack_adjust ();
1022 fixup->target = tree_label;
1023 fixup->target_rtl = rtl_label;
1025 /* Create a BLOCK node and a corresponding matched set of
1026 NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes at
1027 this point. The notes will encapsulate any and all fixup
1028 code which we might later insert at this point in the insn
1029 stream. Also, the BLOCK node will be the parent (i.e. the
1030 `SUPERBLOCK') of any other BLOCK nodes which we might create
1031 later on when we are expanding the fixup code.
1033 Note that optimization passes (including expand_end_loop)
1034 might move the *_BLOCK notes away, so we use a NOTE_INSN_DELETED
1035 as a placeholder. */
1038 register rtx original_before_jump
1039 = last_insn ? last_insn : get_last_insn ();
1040 rtx start;
1041 rtx end;
1042 tree block;
1044 block = make_node (BLOCK);
1045 TREE_USED (block) = 1;
1047 if (!cfun->x_whole_function_mode_p)
1048 insert_block (block);
1049 else
1051 BLOCK_CHAIN (block)
1052 = BLOCK_CHAIN (DECL_INITIAL (current_function_decl));
1053 BLOCK_CHAIN (DECL_INITIAL (current_function_decl))
1054 = block;
1057 start_sequence ();
1058 start = emit_note (NULL_PTR, NOTE_INSN_BLOCK_BEG);
1059 if (cfun->x_whole_function_mode_p)
1060 NOTE_BLOCK (start) = block;
1061 fixup->before_jump = emit_note (NULL_PTR, NOTE_INSN_DELETED);
1062 end = emit_note (NULL_PTR, NOTE_INSN_BLOCK_END);
1063 if (cfun->x_whole_function_mode_p)
1064 NOTE_BLOCK (end) = block;
1065 fixup->context = block;
1066 end_sequence ();
1067 emit_insns_after (start, original_before_jump);
1070 fixup->block_start_count = current_block_start_count;
1071 fixup->stack_level = 0;
1072 fixup->cleanup_list_list
1073 = ((block->data.block.outer_cleanups
1074 || block->data.block.cleanups)
1075 ? tree_cons (NULL_TREE, block->data.block.cleanups,
1076 block->data.block.outer_cleanups)
1077 : 0);
1078 fixup->next = goto_fixup_chain;
1079 goto_fixup_chain = fixup;
1082 return block != 0;
1085 /* Expand any needed fixups in the outputmost binding level of the
1086 function. FIRST_INSN is the first insn in the function. */
1088 void
1089 expand_fixups (first_insn)
1090 rtx first_insn;
1092 fixup_gotos (NULL_PTR, NULL_RTX, NULL_TREE, first_insn, 0);
1095 /* When exiting a binding contour, process all pending gotos requiring fixups.
1096 THISBLOCK is the structure that describes the block being exited.
1097 STACK_LEVEL is the rtx for the stack level to restore exiting this contour.
1098 CLEANUP_LIST is a list of expressions to evaluate on exiting this contour.
1099 FIRST_INSN is the insn that began this contour.
1101 Gotos that jump out of this contour must restore the
1102 stack level and do the cleanups before actually jumping.
1104 DONT_JUMP_IN nonzero means report error there is a jump into this
1105 contour from before the beginning of the contour.
1106 This is also done if STACK_LEVEL is nonzero. */
1108 static void
1109 fixup_gotos (thisblock, stack_level, cleanup_list, first_insn, dont_jump_in)
1110 struct nesting *thisblock;
1111 rtx stack_level;
1112 tree cleanup_list;
1113 rtx first_insn;
1114 int dont_jump_in;
1116 register struct goto_fixup *f, *prev;
1118 /* F is the fixup we are considering; PREV is the previous one. */
1119 /* We run this loop in two passes so that cleanups of exited blocks
1120 are run first, and blocks that are exited are marked so
1121 afterwards. */
1123 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1125 /* Test for a fixup that is inactive because it is already handled. */
1126 if (f->before_jump == 0)
1128 /* Delete inactive fixup from the chain, if that is easy to do. */
1129 if (prev != 0)
1130 prev->next = f->next;
1132 /* Has this fixup's target label been defined?
1133 If so, we can finalize it. */
1134 else if (PREV_INSN (f->target_rtl) != 0)
1136 register rtx cleanup_insns;
1138 /* If this fixup jumped into this contour from before the beginning
1139 of this contour, report an error. This code used to use
1140 the first non-label insn after f->target_rtl, but that's
1141 wrong since such can be added, by things like put_var_into_stack
1142 and have INSN_UIDs that are out of the range of the block. */
1143 /* ??? Bug: this does not detect jumping in through intermediate
1144 blocks that have stack levels or cleanups.
1145 It detects only a problem with the innermost block
1146 around the label. */
1147 if (f->target != 0
1148 && (dont_jump_in || stack_level || cleanup_list)
1149 && INSN_UID (first_insn) < INSN_UID (f->target_rtl)
1150 && INSN_UID (first_insn) > INSN_UID (f->before_jump)
1151 && ! DECL_ERROR_ISSUED (f->target))
1153 error_with_decl (f->target,
1154 "label `%s' used before containing binding contour");
1155 /* Prevent multiple errors for one label. */
1156 DECL_ERROR_ISSUED (f->target) = 1;
1159 /* We will expand the cleanups into a sequence of their own and
1160 then later on we will attach this new sequence to the insn
1161 stream just ahead of the actual jump insn. */
1163 start_sequence ();
1165 /* Temporarily restore the lexical context where we will
1166 logically be inserting the fixup code. We do this for the
1167 sake of getting the debugging information right. */
1169 pushlevel (0);
1170 set_block (f->context);
1172 /* Expand the cleanups for blocks this jump exits. */
1173 if (f->cleanup_list_list)
1175 tree lists;
1176 for (lists = f->cleanup_list_list; lists; lists = TREE_CHAIN (lists))
1177 /* Marked elements correspond to blocks that have been closed.
1178 Do their cleanups. */
1179 if (TREE_ADDRESSABLE (lists)
1180 && TREE_VALUE (lists) != 0)
1182 expand_cleanups (TREE_VALUE (lists), NULL_TREE, 1, 1);
1183 /* Pop any pushes done in the cleanups,
1184 in case function is about to return. */
1185 do_pending_stack_adjust ();
1189 /* Restore stack level for the biggest contour that this
1190 jump jumps out of. */
1191 if (f->stack_level
1192 && ! (f->target_rtl == return_label
1193 && ((TREE_CODE (TREE_TYPE (current_function_decl))
1194 == FUNCTION_TYPE)
1195 && (TYPE_RETURNS_STACK_DEPRESSED
1196 (TREE_TYPE (current_function_decl))))))
1197 emit_stack_restore (SAVE_BLOCK, f->stack_level, f->before_jump);
1199 /* Finish up the sequence containing the insns which implement the
1200 necessary cleanups, and then attach that whole sequence to the
1201 insn stream just ahead of the actual jump insn. Attaching it
1202 at that point insures that any cleanups which are in fact
1203 implicit C++ object destructions (which must be executed upon
1204 leaving the block) appear (to the debugger) to be taking place
1205 in an area of the generated code where the object(s) being
1206 destructed are still "in scope". */
1208 cleanup_insns = get_insns ();
1209 poplevel (1, 0, 0);
1211 end_sequence ();
1212 emit_insns_after (cleanup_insns, f->before_jump);
1214 f->before_jump = 0;
1218 /* For any still-undefined labels, do the cleanups for this block now.
1219 We must do this now since items in the cleanup list may go out
1220 of scope when the block ends. */
1221 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1222 if (f->before_jump != 0
1223 && PREV_INSN (f->target_rtl) == 0
1224 /* Label has still not appeared. If we are exiting a block with
1225 a stack level to restore, that started before the fixup,
1226 mark this stack level as needing restoration
1227 when the fixup is later finalized. */
1228 && thisblock != 0
1229 /* Note: if THISBLOCK == 0 and we have a label that hasn't appeared, it
1230 means the label is undefined. That's erroneous, but possible. */
1231 && (thisblock->data.block.block_start_count
1232 <= f->block_start_count))
1234 tree lists = f->cleanup_list_list;
1235 rtx cleanup_insns;
1237 for (; lists; lists = TREE_CHAIN (lists))
1238 /* If the following elt. corresponds to our containing block
1239 then the elt. must be for this block. */
1240 if (TREE_CHAIN (lists) == thisblock->data.block.outer_cleanups)
1242 start_sequence ();
1243 pushlevel (0);
1244 set_block (f->context);
1245 expand_cleanups (TREE_VALUE (lists), NULL_TREE, 1, 1);
1246 do_pending_stack_adjust ();
1247 cleanup_insns = get_insns ();
1248 poplevel (1, 0, 0);
1249 end_sequence ();
1250 if (cleanup_insns != 0)
1251 f->before_jump
1252 = emit_insns_after (cleanup_insns, f->before_jump);
1254 f->cleanup_list_list = TREE_CHAIN (lists);
1257 if (stack_level)
1258 f->stack_level = stack_level;
1262 /* Return the number of times character C occurs in string S. */
1263 static int
1264 n_occurrences (c, s)
1265 int c;
1266 const char *s;
1268 int n = 0;
1269 while (*s)
1270 n += (*s++ == c);
1271 return n;
1274 /* Generate RTL for an asm statement (explicit assembler code).
1275 BODY is a STRING_CST node containing the assembler code text,
1276 or an ADDR_EXPR containing a STRING_CST. */
1278 void
1279 expand_asm (body)
1280 tree body;
1282 if (current_function_check_memory_usage)
1284 error ("`asm' cannot be used in function where memory usage is checked");
1285 return;
1288 if (TREE_CODE (body) == ADDR_EXPR)
1289 body = TREE_OPERAND (body, 0);
1291 emit_insn (gen_rtx_ASM_INPUT (VOIDmode,
1292 TREE_STRING_POINTER (body)));
1293 last_expr_type = 0;
1296 /* Generate RTL for an asm statement with arguments.
1297 STRING is the instruction template.
1298 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
1299 Each output or input has an expression in the TREE_VALUE and
1300 a constraint-string in the TREE_PURPOSE.
1301 CLOBBERS is a list of STRING_CST nodes each naming a hard register
1302 that is clobbered by this insn.
1304 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
1305 Some elements of OUTPUTS may be replaced with trees representing temporary
1306 values. The caller should copy those temporary values to the originally
1307 specified lvalues.
1309 VOL nonzero means the insn is volatile; don't optimize it. */
1311 void
1312 expand_asm_operands (string, outputs, inputs, clobbers, vol, filename, line)
1313 tree string, outputs, inputs, clobbers;
1314 int vol;
1315 const char *filename;
1316 int line;
1318 rtvec argvec, constraints;
1319 rtx body;
1320 int ninputs = list_length (inputs);
1321 int noutputs = list_length (outputs);
1322 int ninout = 0;
1323 int nclobbers;
1324 tree tail;
1325 register int i;
1326 /* Vector of RTX's of evaluated output operands. */
1327 rtx *output_rtx = (rtx *) alloca (noutputs * sizeof (rtx));
1328 int *inout_opnum = (int *) alloca (noutputs * sizeof (int));
1329 rtx *real_output_rtx = (rtx *) alloca (noutputs * sizeof (rtx));
1330 enum machine_mode *inout_mode
1331 = (enum machine_mode *) alloca (noutputs * sizeof (enum machine_mode));
1332 /* The insn we have emitted. */
1333 rtx insn;
1334 int old_generating_concat_p = generating_concat_p;
1336 /* An ASM with no outputs needs to be treated as volatile, for now. */
1337 if (noutputs == 0)
1338 vol = 1;
1340 if (current_function_check_memory_usage)
1342 error ("`asm' cannot be used with `-fcheck-memory-usage'");
1343 return;
1346 #ifdef MD_ASM_CLOBBERS
1347 /* Sometimes we wish to automatically clobber registers across an asm.
1348 Case in point is when the i386 backend moved from cc0 to a hard reg --
1349 maintaining source-level compatability means automatically clobbering
1350 the flags register. */
1351 MD_ASM_CLOBBERS (clobbers);
1352 #endif
1354 if (current_function_check_memory_usage)
1356 error ("`asm' cannot be used in function where memory usage is checked");
1357 return;
1360 /* Count the number of meaningful clobbered registers, ignoring what
1361 we would ignore later. */
1362 nclobbers = 0;
1363 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1365 const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1367 i = decode_reg_name (regname);
1368 if (i >= 0 || i == -4)
1369 ++nclobbers;
1370 else if (i == -2)
1371 error ("unknown register name `%s' in `asm'", regname);
1374 last_expr_type = 0;
1376 /* Check that the number of alternatives is constant across all
1377 operands. */
1378 if (outputs || inputs)
1380 tree tmp = TREE_PURPOSE (outputs ? outputs : inputs);
1381 int nalternatives = n_occurrences (',', TREE_STRING_POINTER (tmp));
1382 tree next = inputs;
1384 if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
1386 error ("too many alternatives in `asm'");
1387 return;
1390 tmp = outputs;
1391 while (tmp)
1393 const char *constraint = TREE_STRING_POINTER (TREE_PURPOSE (tmp));
1395 if (n_occurrences (',', constraint) != nalternatives)
1397 error ("operand constraints for `asm' differ in number of alternatives");
1398 return;
1401 if (TREE_CHAIN (tmp))
1402 tmp = TREE_CHAIN (tmp);
1403 else
1404 tmp = next, next = 0;
1408 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1410 tree val = TREE_VALUE (tail);
1411 tree type = TREE_TYPE (val);
1412 const char *constraint;
1413 char *p;
1414 int c_len;
1415 int j;
1416 int is_inout = 0;
1417 int allows_reg = 0;
1418 int allows_mem = 0;
1420 /* If there's an erroneous arg, emit no insn. */
1421 if (TREE_TYPE (val) == error_mark_node)
1422 return;
1424 /* Make sure constraint has `=' and does not have `+'. Also, see
1425 if it allows any register. Be liberal on the latter test, since
1426 the worst that happens if we get it wrong is we issue an error
1427 message. */
1429 constraint = TREE_STRING_POINTER (TREE_PURPOSE (tail));
1430 c_len = strlen (constraint);
1432 /* Allow the `=' or `+' to not be at the beginning of the string,
1433 since it wasn't explicitly documented that way, and there is a
1434 large body of code that puts it last. Swap the character to
1435 the front, so as not to uglify any place else. */
1436 switch (c_len)
1438 default:
1439 if ((p = strchr (constraint, '=')) != NULL)
1440 break;
1441 if ((p = strchr (constraint, '+')) != NULL)
1442 break;
1443 case 0:
1444 error ("output operand constraint lacks `='");
1445 return;
1447 j = p - constraint;
1448 is_inout = *p == '+';
1450 if (j || is_inout)
1452 /* Have to throw away this constraint string and get a new one. */
1453 char *buf = alloca (c_len + 1);
1454 buf[0] = '=';
1455 if (j)
1456 memcpy (buf + 1, constraint, j);
1457 memcpy (buf + 1 + j, p + 1, c_len - j); /* not -j-1 - copy null */
1458 constraint = ggc_alloc_string (buf, c_len);
1460 if (j)
1461 warning (
1462 "output constraint `%c' for operand %d is not at the beginning",
1463 *p, i);
1466 /* Make sure we can specify the matching operand. */
1467 if (is_inout && i > 9)
1469 error ("output operand constraint %d contains `+'", i);
1470 return;
1473 for (j = 1; j < c_len; j++)
1474 switch (constraint[j])
1476 case '+':
1477 case '=':
1478 error ("operand constraint contains '+' or '=' at illegal position.");
1479 return;
1481 case '%':
1482 if (i + 1 == ninputs + noutputs)
1484 error ("`%%' constraint used with last operand");
1485 return;
1487 break;
1489 case '?': case '!': case '*': case '&': case '#':
1490 case 'E': case 'F': case 'G': case 'H':
1491 case 's': case 'i': case 'n':
1492 case 'I': case 'J': case 'K': case 'L': case 'M':
1493 case 'N': case 'O': case 'P': case ',':
1494 break;
1496 case '0': case '1': case '2': case '3': case '4':
1497 case '5': case '6': case '7': case '8': case '9':
1498 error ("matching constraint not valid in output operand");
1499 break;
1501 case 'V': case 'm': case 'o':
1502 allows_mem = 1;
1503 break;
1505 case '<': case '>':
1506 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
1507 excepting those that expand_call created. So match memory
1508 and hope. */
1509 allows_mem = 1;
1510 break;
1512 case 'g': case 'X':
1513 allows_reg = 1;
1514 allows_mem = 1;
1515 break;
1517 case 'p': case 'r':
1518 allows_reg = 1;
1519 break;
1521 default:
1522 if (! ISALPHA (constraint[j]))
1524 error ("invalid punctuation `%c' in constraint",
1525 constraint[j]);
1526 return;
1528 if (REG_CLASS_FROM_LETTER (constraint[j]) != NO_REGS)
1529 allows_reg = 1;
1530 #ifdef EXTRA_CONSTRAINT
1531 else
1533 /* Otherwise we can't assume anything about the nature of
1534 the constraint except that it isn't purely registers.
1535 Treat it like "g" and hope for the best. */
1536 allows_reg = 1;
1537 allows_mem = 1;
1539 #endif
1540 break;
1543 /* If an output operand is not a decl or indirect ref and our constraint
1544 allows a register, make a temporary to act as an intermediate.
1545 Make the asm insn write into that, then our caller will copy it to
1546 the real output operand. Likewise for promoted variables. */
1548 generating_concat_p = 0;
1550 real_output_rtx[i] = NULL_RTX;
1551 if ((TREE_CODE (val) == INDIRECT_REF
1552 && allows_mem)
1553 || (DECL_P (val)
1554 && (allows_mem || GET_CODE (DECL_RTL (val)) == REG)
1555 && ! (GET_CODE (DECL_RTL (val)) == REG
1556 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
1557 || ! allows_reg
1558 || is_inout)
1560 if (! allows_reg)
1561 mark_addressable (TREE_VALUE (tail));
1563 output_rtx[i]
1564 = expand_expr (TREE_VALUE (tail), NULL_RTX, VOIDmode,
1565 EXPAND_MEMORY_USE_WO);
1567 if (! allows_reg && GET_CODE (output_rtx[i]) != MEM)
1568 error ("output number %d not directly addressable", i);
1569 if ((! allows_mem && GET_CODE (output_rtx[i]) == MEM)
1570 || GET_CODE (output_rtx[i]) == CONCAT)
1572 real_output_rtx[i] = protect_from_queue (output_rtx[i], 1);
1573 output_rtx[i] = gen_reg_rtx (GET_MODE (output_rtx[i]));
1574 if (is_inout)
1575 emit_move_insn (output_rtx[i], real_output_rtx[i]);
1578 else
1580 output_rtx[i] = assign_temp (type, 0, 0, 1);
1581 TREE_VALUE (tail) = make_tree (type, output_rtx[i]);
1584 generating_concat_p = old_generating_concat_p;
1586 if (is_inout)
1588 inout_mode[ninout] = TYPE_MODE (TREE_TYPE (TREE_VALUE (tail)));
1589 inout_opnum[ninout++] = i;
1593 ninputs += ninout;
1594 if (ninputs + noutputs > MAX_RECOG_OPERANDS)
1596 error ("more than %d operands in `asm'", MAX_RECOG_OPERANDS);
1597 return;
1600 /* Make vectors for the expression-rtx and constraint strings. */
1602 argvec = rtvec_alloc (ninputs);
1603 constraints = rtvec_alloc (ninputs);
1605 body = gen_rtx_ASM_OPERANDS ((noutputs == 0 ? VOIDmode
1606 : GET_MODE (output_rtx[0])),
1607 TREE_STRING_POINTER (string),
1608 empty_string, 0, argvec, constraints,
1609 filename, line);
1611 MEM_VOLATILE_P (body) = vol;
1613 /* Eval the inputs and put them into ARGVEC.
1614 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
1616 i = 0;
1617 for (tail = inputs; tail; tail = TREE_CHAIN (tail))
1619 int j;
1620 int allows_reg = 0, allows_mem = 0;
1621 const char *constraint, *orig_constraint;
1622 int c_len;
1623 rtx op;
1625 /* If there's an erroneous arg, emit no insn,
1626 because the ASM_INPUT would get VOIDmode
1627 and that could cause a crash in reload. */
1628 if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
1629 return;
1631 /* ??? Can this happen, and does the error message make any sense? */
1632 if (TREE_PURPOSE (tail) == NULL_TREE)
1634 error ("hard register `%s' listed as input operand to `asm'",
1635 TREE_STRING_POINTER (TREE_VALUE (tail)) );
1636 return;
1639 constraint = TREE_STRING_POINTER (TREE_PURPOSE (tail));
1640 c_len = strlen (constraint);
1641 orig_constraint = constraint;
1643 /* Make sure constraint has neither `=', `+', nor '&'. */
1645 for (j = 0; j < c_len; j++)
1646 switch (constraint[j])
1648 case '+': case '=': case '&':
1649 if (constraint == orig_constraint)
1651 error ("input operand constraint contains `%c'",
1652 constraint[j]);
1653 return;
1655 break;
1657 case '%':
1658 if (constraint == orig_constraint
1659 && i + 1 == ninputs - ninout)
1661 error ("`%%' constraint used with last operand");
1662 return;
1664 break;
1666 case 'V': case 'm': case 'o':
1667 allows_mem = 1;
1668 break;
1670 case '<': case '>':
1671 case '?': case '!': case '*': case '#':
1672 case 'E': case 'F': case 'G': case 'H':
1673 case 's': case 'i': case 'n':
1674 case 'I': case 'J': case 'K': case 'L': case 'M':
1675 case 'N': case 'O': case 'P': case ',':
1676 break;
1678 /* Whether or not a numeric constraint allows a register is
1679 decided by the matching constraint, and so there is no need
1680 to do anything special with them. We must handle them in
1681 the default case, so that we don't unnecessarily force
1682 operands to memory. */
1683 case '0': case '1': case '2': case '3': case '4':
1684 case '5': case '6': case '7': case '8': case '9':
1685 if (constraint[j] >= '0' + noutputs)
1687 error
1688 ("matching constraint references invalid operand number");
1689 return;
1692 /* Try and find the real constraint for this dup. */
1693 if ((j == 0 && c_len == 1)
1694 || (j == 1 && c_len == 2 && constraint[0] == '%'))
1696 tree o = outputs;
1698 for (j = constraint[j] - '0'; j > 0; --j)
1699 o = TREE_CHAIN (o);
1701 constraint = TREE_STRING_POINTER (TREE_PURPOSE (o));
1702 c_len = strlen (constraint);
1703 j = 0;
1704 break;
1707 /* Fall through. */
1709 case 'p': case 'r':
1710 allows_reg = 1;
1711 break;
1713 case 'g': case 'X':
1714 allows_reg = 1;
1715 allows_mem = 1;
1716 break;
1718 default:
1719 if (! ISALPHA (constraint[j]))
1721 error ("invalid punctuation `%c' in constraint",
1722 constraint[j]);
1723 return;
1725 if (REG_CLASS_FROM_LETTER (constraint[j]) != NO_REGS)
1726 allows_reg = 1;
1727 #ifdef EXTRA_CONSTRAINT
1728 else
1730 /* Otherwise we can't assume anything about the nature of
1731 the constraint except that it isn't purely registers.
1732 Treat it like "g" and hope for the best. */
1733 allows_reg = 1;
1734 allows_mem = 1;
1736 #endif
1737 break;
1740 if (! allows_reg && allows_mem)
1741 mark_addressable (TREE_VALUE (tail));
1743 op = expand_expr (TREE_VALUE (tail), NULL_RTX, VOIDmode, 0);
1745 /* Never pass a CONCAT to an ASM. */
1746 generating_concat_p = 0;
1747 if (GET_CODE (op) == CONCAT)
1748 op = force_reg (GET_MODE (op), op);
1750 if (asm_operand_ok (op, constraint) <= 0)
1752 if (allows_reg)
1753 op = force_reg (TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))), op);
1754 else if (!allows_mem)
1755 warning ("asm operand %d probably doesn't match constraints", i);
1756 else if (CONSTANT_P (op))
1757 op = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))),
1758 op);
1759 else if (GET_CODE (op) == REG
1760 || GET_CODE (op) == SUBREG
1761 || GET_CODE (op) == CONCAT)
1763 tree type = TREE_TYPE (TREE_VALUE (tail));
1764 tree qual_type = build_qualified_type (type,
1765 (TYPE_QUALS (type)
1766 | TYPE_QUAL_CONST));
1767 rtx memloc = assign_temp (qual_type, 1, 1, 1);
1769 emit_move_insn (memloc, op);
1770 op = memloc;
1773 else if (GET_CODE (op) == MEM && MEM_VOLATILE_P (op))
1774 /* We won't recognize volatile memory as available a
1775 memory_operand at this point. Ignore it. */
1777 else if (queued_subexp_p (op))
1779 else
1780 /* ??? Leave this only until we have experience with what
1781 happens in combine and elsewhere when constraints are
1782 not satisfied. */
1783 warning ("asm operand %d probably doesn't match constraints", i);
1785 generating_concat_p = old_generating_concat_p;
1786 ASM_OPERANDS_INPUT (body, i) = op;
1788 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, i)
1789 = gen_rtx_ASM_INPUT (TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))),
1790 orig_constraint);
1791 i++;
1794 /* Protect all the operands from the queue now that they have all been
1795 evaluated. */
1797 generating_concat_p = 0;
1799 for (i = 0; i < ninputs - ninout; i++)
1800 ASM_OPERANDS_INPUT (body, i)
1801 = protect_from_queue (ASM_OPERANDS_INPUT (body, i), 0);
1803 for (i = 0; i < noutputs; i++)
1804 output_rtx[i] = protect_from_queue (output_rtx[i], 1);
1806 /* For in-out operands, copy output rtx to input rtx. */
1807 for (i = 0; i < ninout; i++)
1809 int j = inout_opnum[i];
1811 ASM_OPERANDS_INPUT (body, ninputs - ninout + i)
1812 = output_rtx[j];
1813 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, ninputs - ninout + i)
1814 = gen_rtx_ASM_INPUT (inout_mode[i], digit_string (j));
1817 generating_concat_p = old_generating_concat_p;
1819 /* Now, for each output, construct an rtx
1820 (set OUTPUT (asm_operands INSN OUTPUTNUMBER OUTPUTCONSTRAINT
1821 ARGVEC CONSTRAINTS))
1822 If there is more than one, put them inside a PARALLEL. */
1824 if (noutputs == 1 && nclobbers == 0)
1826 ASM_OPERANDS_OUTPUT_CONSTRAINT (body)
1827 = TREE_STRING_POINTER (TREE_PURPOSE (outputs));
1828 insn = emit_insn (gen_rtx_SET (VOIDmode, output_rtx[0], body));
1831 else if (noutputs == 0 && nclobbers == 0)
1833 /* No output operands: put in a raw ASM_OPERANDS rtx. */
1834 insn = emit_insn (body);
1837 else
1839 rtx obody = body;
1840 int num = noutputs;
1842 if (num == 0)
1843 num = 1;
1845 body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
1847 /* For each output operand, store a SET. */
1848 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1850 XVECEXP (body, 0, i)
1851 = gen_rtx_SET (VOIDmode,
1852 output_rtx[i],
1853 gen_rtx_ASM_OPERANDS
1854 (GET_MODE (output_rtx[i]),
1855 TREE_STRING_POINTER (string),
1856 TREE_STRING_POINTER (TREE_PURPOSE (tail)),
1857 i, argvec, constraints,
1858 filename, line));
1860 MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
1863 /* If there are no outputs (but there are some clobbers)
1864 store the bare ASM_OPERANDS into the PARALLEL. */
1866 if (i == 0)
1867 XVECEXP (body, 0, i++) = obody;
1869 /* Store (clobber REG) for each clobbered register specified. */
1871 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1873 const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1874 int j = decode_reg_name (regname);
1876 if (j < 0)
1878 if (j == -3) /* `cc', which is not a register */
1879 continue;
1881 if (j == -4) /* `memory', don't cache memory across asm */
1883 XVECEXP (body, 0, i++)
1884 = gen_rtx_CLOBBER (VOIDmode,
1885 gen_rtx_MEM
1886 (BLKmode,
1887 gen_rtx_SCRATCH (VOIDmode)));
1888 continue;
1891 /* Ignore unknown register, error already signaled. */
1892 continue;
1895 /* Use QImode since that's guaranteed to clobber just one reg. */
1896 XVECEXP (body, 0, i++)
1897 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (QImode, j));
1900 insn = emit_insn (body);
1903 /* For any outputs that needed reloading into registers, spill them
1904 back to where they belong. */
1905 for (i = 0; i < noutputs; ++i)
1906 if (real_output_rtx[i])
1907 emit_move_insn (real_output_rtx[i], output_rtx[i]);
1909 free_temp_slots ();
1912 /* Generate RTL to evaluate the expression EXP
1913 and remember it in case this is the VALUE in a ({... VALUE; }) constr. */
1915 void
1916 expand_expr_stmt (exp)
1917 tree exp;
1919 /* If -W, warn about statements with no side effects,
1920 except for an explicit cast to void (e.g. for assert()), and
1921 except inside a ({...}) where they may be useful. */
1922 if (expr_stmts_for_value == 0 && exp != error_mark_node)
1924 if (! TREE_SIDE_EFFECTS (exp))
1926 if ((extra_warnings || warn_unused_value)
1927 && !(TREE_CODE (exp) == CONVERT_EXPR
1928 && VOID_TYPE_P (TREE_TYPE (exp))))
1929 warning_with_file_and_line (emit_filename, emit_lineno,
1930 "statement with no effect");
1932 else if (warn_unused_value)
1933 warn_if_unused_value (exp);
1936 /* If EXP is of function type and we are expanding statements for
1937 value, convert it to pointer-to-function. */
1938 if (expr_stmts_for_value && TREE_CODE (TREE_TYPE (exp)) == FUNCTION_TYPE)
1939 exp = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (exp)), exp);
1941 /* The call to `expand_expr' could cause last_expr_type and
1942 last_expr_value to get reset. Therefore, we set last_expr_value
1943 and last_expr_type *after* calling expand_expr. */
1944 last_expr_value = expand_expr (exp,
1945 (expr_stmts_for_value
1946 ? NULL_RTX : const0_rtx),
1947 VOIDmode, 0);
1948 last_expr_type = TREE_TYPE (exp);
1950 /* If all we do is reference a volatile value in memory,
1951 copy it to a register to be sure it is actually touched. */
1952 if (last_expr_value != 0 && GET_CODE (last_expr_value) == MEM
1953 && TREE_THIS_VOLATILE (exp))
1955 if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode)
1957 else if (TYPE_MODE (TREE_TYPE (exp)) != BLKmode)
1958 copy_to_reg (last_expr_value);
1959 else
1961 rtx lab = gen_label_rtx ();
1963 /* Compare the value with itself to reference it. */
1964 emit_cmp_and_jump_insns (last_expr_value, last_expr_value, EQ,
1965 expand_expr (TYPE_SIZE (last_expr_type),
1966 NULL_RTX, VOIDmode, 0),
1967 BLKmode, 0,
1968 TYPE_ALIGN (last_expr_type) / BITS_PER_UNIT,
1969 lab);
1970 emit_label (lab);
1974 /* If this expression is part of a ({...}) and is in memory, we may have
1975 to preserve temporaries. */
1976 preserve_temp_slots (last_expr_value);
1978 /* Free any temporaries used to evaluate this expression. Any temporary
1979 used as a result of this expression will already have been preserved
1980 above. */
1981 free_temp_slots ();
1983 emit_queue ();
1986 /* Warn if EXP contains any computations whose results are not used.
1987 Return 1 if a warning is printed; 0 otherwise. */
1990 warn_if_unused_value (exp)
1991 tree exp;
1993 if (TREE_USED (exp))
1994 return 0;
1996 /* Don't warn about void constructs. This includes casting to void,
1997 void function calls, and statement expressions with a final cast
1998 to void. */
1999 if (VOID_TYPE_P (TREE_TYPE (exp)))
2000 return 0;
2002 /* If this is an expression with side effects, don't warn. */
2003 if (TREE_SIDE_EFFECTS (exp))
2004 return 0;
2006 switch (TREE_CODE (exp))
2008 case PREINCREMENT_EXPR:
2009 case POSTINCREMENT_EXPR:
2010 case PREDECREMENT_EXPR:
2011 case POSTDECREMENT_EXPR:
2012 case MODIFY_EXPR:
2013 case INIT_EXPR:
2014 case TARGET_EXPR:
2015 case CALL_EXPR:
2016 case METHOD_CALL_EXPR:
2017 case RTL_EXPR:
2018 case TRY_CATCH_EXPR:
2019 case WITH_CLEANUP_EXPR:
2020 case EXIT_EXPR:
2021 return 0;
2023 case BIND_EXPR:
2024 /* For a binding, warn if no side effect within it. */
2025 return warn_if_unused_value (TREE_OPERAND (exp, 1));
2027 case SAVE_EXPR:
2028 return warn_if_unused_value (TREE_OPERAND (exp, 1));
2030 case TRUTH_ORIF_EXPR:
2031 case TRUTH_ANDIF_EXPR:
2032 /* In && or ||, warn if 2nd operand has no side effect. */
2033 return warn_if_unused_value (TREE_OPERAND (exp, 1));
2035 case COMPOUND_EXPR:
2036 if (TREE_NO_UNUSED_WARNING (exp))
2037 return 0;
2038 if (warn_if_unused_value (TREE_OPERAND (exp, 0)))
2039 return 1;
2040 /* Let people do `(foo (), 0)' without a warning. */
2041 if (TREE_CONSTANT (TREE_OPERAND (exp, 1)))
2042 return 0;
2043 return warn_if_unused_value (TREE_OPERAND (exp, 1));
2045 case NOP_EXPR:
2046 case CONVERT_EXPR:
2047 case NON_LVALUE_EXPR:
2048 /* Don't warn about conversions not explicit in the user's program. */
2049 if (TREE_NO_UNUSED_WARNING (exp))
2050 return 0;
2051 /* Assignment to a cast usually results in a cast of a modify.
2052 Don't complain about that. There can be an arbitrary number of
2053 casts before the modify, so we must loop until we find the first
2054 non-cast expression and then test to see if that is a modify. */
2056 tree tem = TREE_OPERAND (exp, 0);
2058 while (TREE_CODE (tem) == CONVERT_EXPR || TREE_CODE (tem) == NOP_EXPR)
2059 tem = TREE_OPERAND (tem, 0);
2061 if (TREE_CODE (tem) == MODIFY_EXPR || TREE_CODE (tem) == INIT_EXPR
2062 || TREE_CODE (tem) == CALL_EXPR)
2063 return 0;
2065 goto warn;
2067 case INDIRECT_REF:
2068 /* Don't warn about automatic dereferencing of references, since
2069 the user cannot control it. */
2070 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == REFERENCE_TYPE)
2071 return warn_if_unused_value (TREE_OPERAND (exp, 0));
2072 /* Fall through. */
2074 default:
2075 /* Referencing a volatile value is a side effect, so don't warn. */
2076 if ((DECL_P (exp)
2077 || TREE_CODE_CLASS (TREE_CODE (exp)) == 'r')
2078 && TREE_THIS_VOLATILE (exp))
2079 return 0;
2081 /* If this is an expression which has no operands, there is no value
2082 to be unused. There are no such language-independent codes,
2083 but front ends may define such. */
2084 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'e'
2085 && TREE_CODE_LENGTH (TREE_CODE (exp)) == 0)
2086 return 0;
2088 warn:
2089 warning_with_file_and_line (emit_filename, emit_lineno,
2090 "value computed is not used");
2091 return 1;
2095 /* Clear out the memory of the last expression evaluated. */
2097 void
2098 clear_last_expr ()
2100 last_expr_type = 0;
2103 /* Begin a statement which will return a value.
2104 Return the RTL_EXPR for this statement expr.
2105 The caller must save that value and pass it to expand_end_stmt_expr. */
2107 tree
2108 expand_start_stmt_expr ()
2110 tree t;
2112 /* Make the RTL_EXPR node temporary, not momentary,
2113 so that rtl_expr_chain doesn't become garbage. */
2114 t = make_node (RTL_EXPR);
2115 do_pending_stack_adjust ();
2116 start_sequence_for_rtl_expr (t);
2117 NO_DEFER_POP;
2118 expr_stmts_for_value++;
2119 return t;
2122 /* Restore the previous state at the end of a statement that returns a value.
2123 Returns a tree node representing the statement's value and the
2124 insns to compute the value.
2126 The nodes of that expression have been freed by now, so we cannot use them.
2127 But we don't want to do that anyway; the expression has already been
2128 evaluated and now we just want to use the value. So generate a RTL_EXPR
2129 with the proper type and RTL value.
2131 If the last substatement was not an expression,
2132 return something with type `void'. */
2134 tree
2135 expand_end_stmt_expr (t)
2136 tree t;
2138 OK_DEFER_POP;
2140 if (last_expr_type == 0)
2142 last_expr_type = void_type_node;
2143 last_expr_value = const0_rtx;
2145 else if (last_expr_value == 0)
2146 /* There are some cases where this can happen, such as when the
2147 statement is void type. */
2148 last_expr_value = const0_rtx;
2149 else if (GET_CODE (last_expr_value) != REG && ! CONSTANT_P (last_expr_value))
2150 /* Remove any possible QUEUED. */
2151 last_expr_value = protect_from_queue (last_expr_value, 0);
2153 emit_queue ();
2155 TREE_TYPE (t) = last_expr_type;
2156 RTL_EXPR_RTL (t) = last_expr_value;
2157 RTL_EXPR_SEQUENCE (t) = get_insns ();
2159 rtl_expr_chain = tree_cons (NULL_TREE, t, rtl_expr_chain);
2161 end_sequence ();
2163 /* Don't consider deleting this expr or containing exprs at tree level. */
2164 TREE_SIDE_EFFECTS (t) = 1;
2165 /* Propagate volatility of the actual RTL expr. */
2166 TREE_THIS_VOLATILE (t) = volatile_refs_p (last_expr_value);
2168 last_expr_type = 0;
2169 expr_stmts_for_value--;
2171 return t;
2174 /* Generate RTL for the start of an if-then. COND is the expression
2175 whose truth should be tested.
2177 If EXITFLAG is nonzero, this conditional is visible to
2178 `exit_something'. */
2180 void
2181 expand_start_cond (cond, exitflag)
2182 tree cond;
2183 int exitflag;
2185 struct nesting *thiscond = ALLOC_NESTING ();
2187 /* Make an entry on cond_stack for the cond we are entering. */
2189 thiscond->next = cond_stack;
2190 thiscond->all = nesting_stack;
2191 thiscond->depth = ++nesting_depth;
2192 thiscond->data.cond.next_label = gen_label_rtx ();
2193 /* Before we encounter an `else', we don't need a separate exit label
2194 unless there are supposed to be exit statements
2195 to exit this conditional. */
2196 thiscond->exit_label = exitflag ? gen_label_rtx () : 0;
2197 thiscond->data.cond.endif_label = thiscond->exit_label;
2198 cond_stack = thiscond;
2199 nesting_stack = thiscond;
2201 do_jump (cond, thiscond->data.cond.next_label, NULL_RTX);
2204 /* Generate RTL between then-clause and the elseif-clause
2205 of an if-then-elseif-.... */
2207 void
2208 expand_start_elseif (cond)
2209 tree cond;
2211 if (cond_stack->data.cond.endif_label == 0)
2212 cond_stack->data.cond.endif_label = gen_label_rtx ();
2213 emit_jump (cond_stack->data.cond.endif_label);
2214 emit_label (cond_stack->data.cond.next_label);
2215 cond_stack->data.cond.next_label = gen_label_rtx ();
2216 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
2219 /* Generate RTL between the then-clause and the else-clause
2220 of an if-then-else. */
2222 void
2223 expand_start_else ()
2225 if (cond_stack->data.cond.endif_label == 0)
2226 cond_stack->data.cond.endif_label = gen_label_rtx ();
2228 emit_jump (cond_stack->data.cond.endif_label);
2229 emit_label (cond_stack->data.cond.next_label);
2230 cond_stack->data.cond.next_label = 0; /* No more _else or _elseif calls. */
2233 /* After calling expand_start_else, turn this "else" into an "else if"
2234 by providing another condition. */
2236 void
2237 expand_elseif (cond)
2238 tree cond;
2240 cond_stack->data.cond.next_label = gen_label_rtx ();
2241 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
2244 /* Generate RTL for the end of an if-then.
2245 Pop the record for it off of cond_stack. */
2247 void
2248 expand_end_cond ()
2250 struct nesting *thiscond = cond_stack;
2252 do_pending_stack_adjust ();
2253 if (thiscond->data.cond.next_label)
2254 emit_label (thiscond->data.cond.next_label);
2255 if (thiscond->data.cond.endif_label)
2256 emit_label (thiscond->data.cond.endif_label);
2258 POPSTACK (cond_stack);
2259 last_expr_type = 0;
2262 /* Generate RTL for the start of a loop. EXIT_FLAG is nonzero if this
2263 loop should be exited by `exit_something'. This is a loop for which
2264 `expand_continue' will jump to the top of the loop.
2266 Make an entry on loop_stack to record the labels associated with
2267 this loop. */
2269 struct nesting *
2270 expand_start_loop (exit_flag)
2271 int exit_flag;
2273 register struct nesting *thisloop = ALLOC_NESTING ();
2275 /* Make an entry on loop_stack for the loop we are entering. */
2277 thisloop->next = loop_stack;
2278 thisloop->all = nesting_stack;
2279 thisloop->depth = ++nesting_depth;
2280 thisloop->data.loop.start_label = gen_label_rtx ();
2281 thisloop->data.loop.end_label = gen_label_rtx ();
2282 thisloop->data.loop.alt_end_label = 0;
2283 thisloop->data.loop.continue_label = thisloop->data.loop.start_label;
2284 thisloop->exit_label = exit_flag ? thisloop->data.loop.end_label : 0;
2285 loop_stack = thisloop;
2286 nesting_stack = thisloop;
2288 do_pending_stack_adjust ();
2289 emit_queue ();
2290 emit_note (NULL_PTR, NOTE_INSN_LOOP_BEG);
2291 emit_label (thisloop->data.loop.start_label);
2293 return thisloop;
2296 /* Like expand_start_loop but for a loop where the continuation point
2297 (for expand_continue_loop) will be specified explicitly. */
2299 struct nesting *
2300 expand_start_loop_continue_elsewhere (exit_flag)
2301 int exit_flag;
2303 struct nesting *thisloop = expand_start_loop (exit_flag);
2304 loop_stack->data.loop.continue_label = gen_label_rtx ();
2305 return thisloop;
2308 /* Begin a null, aka do { } while (0) "loop". But since the contents
2309 of said loop can still contain a break, we must frob the loop nest. */
2311 struct nesting *
2312 expand_start_null_loop ()
2314 register struct nesting *thisloop = ALLOC_NESTING ();
2316 /* Make an entry on loop_stack for the loop we are entering. */
2318 thisloop->next = loop_stack;
2319 thisloop->all = nesting_stack;
2320 thisloop->depth = ++nesting_depth;
2321 thisloop->data.loop.start_label = emit_note (NULL, NOTE_INSN_DELETED);
2322 thisloop->data.loop.end_label = gen_label_rtx ();
2323 thisloop->data.loop.alt_end_label = NULL_RTX;
2324 thisloop->data.loop.continue_label = thisloop->data.loop.end_label;
2325 thisloop->exit_label = thisloop->data.loop.end_label;
2326 loop_stack = thisloop;
2327 nesting_stack = thisloop;
2329 return thisloop;
2332 /* Specify the continuation point for a loop started with
2333 expand_start_loop_continue_elsewhere.
2334 Use this at the point in the code to which a continue statement
2335 should jump. */
2337 void
2338 expand_loop_continue_here ()
2340 do_pending_stack_adjust ();
2341 emit_note (NULL_PTR, NOTE_INSN_LOOP_CONT);
2342 emit_label (loop_stack->data.loop.continue_label);
2345 /* Finish a loop. Generate a jump back to the top and the loop-exit label.
2346 Pop the block off of loop_stack. */
2348 void
2349 expand_end_loop ()
2351 rtx start_label = loop_stack->data.loop.start_label;
2352 rtx insn = get_last_insn ();
2353 int needs_end_jump = 1;
2355 /* Mark the continue-point at the top of the loop if none elsewhere. */
2356 if (start_label == loop_stack->data.loop.continue_label)
2357 emit_note_before (NOTE_INSN_LOOP_CONT, start_label);
2359 do_pending_stack_adjust ();
2361 /* If optimizing, perhaps reorder the loop.
2362 First, try to use a condjump near the end.
2363 expand_exit_loop_if_false ends loops with unconditional jumps,
2364 like this:
2366 if (test) goto label;
2367 optional: cleanup
2368 goto loop_stack->data.loop.end_label
2369 barrier
2370 label:
2372 If we find such a pattern, we can end the loop earlier. */
2374 if (optimize
2375 && GET_CODE (insn) == CODE_LABEL
2376 && LABEL_NAME (insn) == NULL
2377 && GET_CODE (PREV_INSN (insn)) == BARRIER)
2379 rtx label = insn;
2380 rtx jump = PREV_INSN (PREV_INSN (label));
2382 if (GET_CODE (jump) == JUMP_INSN
2383 && GET_CODE (PATTERN (jump)) == SET
2384 && SET_DEST (PATTERN (jump)) == pc_rtx
2385 && GET_CODE (SET_SRC (PATTERN (jump))) == LABEL_REF
2386 && (XEXP (SET_SRC (PATTERN (jump)), 0)
2387 == loop_stack->data.loop.end_label))
2389 rtx prev;
2391 /* The test might be complex and reference LABEL multiple times,
2392 like the loop in loop_iterations to set vtop. To handle this,
2393 we move LABEL. */
2394 insn = PREV_INSN (label);
2395 reorder_insns (label, label, start_label);
2397 for (prev = PREV_INSN (jump);; prev = PREV_INSN (prev))
2399 /* We ignore line number notes, but if we see any other note,
2400 in particular NOTE_INSN_BLOCK_*, NOTE_INSN_EH_REGION_*,
2401 NOTE_INSN_LOOP_*, we disable this optimization. */
2402 if (GET_CODE (prev) == NOTE)
2404 if (NOTE_LINE_NUMBER (prev) < 0)
2405 break;
2406 continue;
2408 if (GET_CODE (prev) == CODE_LABEL)
2409 break;
2410 if (GET_CODE (prev) == JUMP_INSN)
2412 if (GET_CODE (PATTERN (prev)) == SET
2413 && SET_DEST (PATTERN (prev)) == pc_rtx
2414 && GET_CODE (SET_SRC (PATTERN (prev))) == IF_THEN_ELSE
2415 && (GET_CODE (XEXP (SET_SRC (PATTERN (prev)), 1))
2416 == LABEL_REF)
2417 && XEXP (XEXP (SET_SRC (PATTERN (prev)), 1), 0) == label)
2419 XEXP (XEXP (SET_SRC (PATTERN (prev)), 1), 0)
2420 = start_label;
2421 emit_note_after (NOTE_INSN_LOOP_END, prev);
2422 needs_end_jump = 0;
2424 break;
2430 /* If the loop starts with a loop exit, roll that to the end where
2431 it will optimize together with the jump back.
2433 We look for the conditional branch to the exit, except that once
2434 we find such a branch, we don't look past 30 instructions.
2436 In more detail, if the loop presently looks like this (in pseudo-C):
2438 start_label:
2439 if (test) goto end_label;
2440 body;
2441 goto start_label;
2442 end_label:
2444 transform it to look like:
2446 goto start_label;
2447 newstart_label:
2448 body;
2449 start_label:
2450 if (test) goto end_label;
2451 goto newstart_label;
2452 end_label:
2454 Here, the `test' may actually consist of some reasonably complex
2455 code, terminating in a test. */
2457 if (optimize
2458 && needs_end_jump
2460 ! (GET_CODE (insn) == JUMP_INSN
2461 && GET_CODE (PATTERN (insn)) == SET
2462 && SET_DEST (PATTERN (insn)) == pc_rtx
2463 && GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE))
2465 int eh_regions = 0;
2466 int num_insns = 0;
2467 rtx last_test_insn = NULL_RTX;
2469 /* Scan insns from the top of the loop looking for a qualified
2470 conditional exit. */
2471 for (insn = NEXT_INSN (loop_stack->data.loop.start_label); insn;
2472 insn = NEXT_INSN (insn))
2474 if (GET_CODE (insn) == NOTE)
2476 if (optimize < 2
2477 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2478 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
2479 /* The code that actually moves the exit test will
2480 carefully leave BLOCK notes in their original
2481 location. That means, however, that we can't debug
2482 the exit test itself. So, we refuse to move code
2483 containing BLOCK notes at low optimization levels. */
2484 break;
2486 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
2487 ++eh_regions;
2488 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END)
2490 --eh_regions;
2491 if (eh_regions < 0)
2492 /* We've come to the end of an EH region, but
2493 never saw the beginning of that region. That
2494 means that an EH region begins before the top
2495 of the loop, and ends in the middle of it. The
2496 existence of such a situation violates a basic
2497 assumption in this code, since that would imply
2498 that even when EH_REGIONS is zero, we might
2499 move code out of an exception region. */
2500 abort ();
2503 /* We must not walk into a nested loop. */
2504 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
2505 break;
2507 /* We already know this INSN is a NOTE, so there's no
2508 point in looking at it to see if it's a JUMP. */
2509 continue;
2512 if (GET_CODE (insn) == JUMP_INSN || GET_CODE (insn) == INSN)
2513 num_insns++;
2515 if (last_test_insn && num_insns > 30)
2516 break;
2518 if (eh_regions > 0)
2519 /* We don't want to move a partial EH region. Consider:
2521 while ( ( { try {
2522 if (cond ()) 0;
2523 else {
2524 bar();
2527 } catch (...) {
2529 } )) {
2530 body;
2533 This isn't legal C++, but here's what it's supposed to
2534 mean: if cond() is true, stop looping. Otherwise,
2535 call bar, and keep looping. In addition, if cond
2536 throws an exception, catch it and keep looping. Such
2537 constructs are certainy legal in LISP.
2539 We should not move the `if (cond()) 0' test since then
2540 the EH-region for the try-block would be broken up.
2541 (In this case we would the EH_BEG note for the `try'
2542 and `if cond()' but not the call to bar() or the
2543 EH_END note.)
2545 So we don't look for tests within an EH region. */
2546 continue;
2548 if (GET_CODE (insn) == JUMP_INSN
2549 && GET_CODE (PATTERN (insn)) == SET
2550 && SET_DEST (PATTERN (insn)) == pc_rtx)
2552 /* This is indeed a jump. */
2553 rtx dest1 = NULL_RTX;
2554 rtx dest2 = NULL_RTX;
2555 rtx potential_last_test;
2556 if (GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE)
2558 /* A conditional jump. */
2559 dest1 = XEXP (SET_SRC (PATTERN (insn)), 1);
2560 dest2 = XEXP (SET_SRC (PATTERN (insn)), 2);
2561 potential_last_test = insn;
2563 else
2565 /* An unconditional jump. */
2566 dest1 = SET_SRC (PATTERN (insn));
2567 /* Include the BARRIER after the JUMP. */
2568 potential_last_test = NEXT_INSN (insn);
2571 do {
2572 if (dest1 && GET_CODE (dest1) == LABEL_REF
2573 && ((XEXP (dest1, 0)
2574 == loop_stack->data.loop.alt_end_label)
2575 || (XEXP (dest1, 0)
2576 == loop_stack->data.loop.end_label)))
2578 last_test_insn = potential_last_test;
2579 break;
2582 /* If this was a conditional jump, there may be
2583 another label at which we should look. */
2584 dest1 = dest2;
2585 dest2 = NULL_RTX;
2586 } while (dest1);
2590 if (last_test_insn != 0 && last_test_insn != get_last_insn ())
2592 /* We found one. Move everything from there up
2593 to the end of the loop, and add a jump into the loop
2594 to jump to there. */
2595 register rtx newstart_label = gen_label_rtx ();
2596 register rtx start_move = start_label;
2597 rtx next_insn;
2599 /* If the start label is preceded by a NOTE_INSN_LOOP_CONT note,
2600 then we want to move this note also. */
2601 if (GET_CODE (PREV_INSN (start_move)) == NOTE
2602 && (NOTE_LINE_NUMBER (PREV_INSN (start_move))
2603 == NOTE_INSN_LOOP_CONT))
2604 start_move = PREV_INSN (start_move);
2606 emit_label_after (newstart_label, PREV_INSN (start_move));
2608 /* Actually move the insns. Start at the beginning, and
2609 keep copying insns until we've copied the
2610 last_test_insn. */
2611 for (insn = start_move; insn; insn = next_insn)
2613 /* Figure out which insn comes after this one. We have
2614 to do this before we move INSN. */
2615 if (insn == last_test_insn)
2616 /* We've moved all the insns. */
2617 next_insn = NULL_RTX;
2618 else
2619 next_insn = NEXT_INSN (insn);
2621 if (GET_CODE (insn) == NOTE
2622 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2623 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
2624 /* We don't want to move NOTE_INSN_BLOCK_BEGs or
2625 NOTE_INSN_BLOCK_ENDs because the correct generation
2626 of debugging information depends on these appearing
2627 in the same order in the RTL and in the tree
2628 structure, where they are represented as BLOCKs.
2629 So, we don't move block notes. Of course, moving
2630 the code inside the block is likely to make it
2631 impossible to debug the instructions in the exit
2632 test, but such is the price of optimization. */
2633 continue;
2635 /* Move the INSN. */
2636 reorder_insns (insn, insn, get_last_insn ());
2639 emit_jump_insn_after (gen_jump (start_label),
2640 PREV_INSN (newstart_label));
2641 emit_barrier_after (PREV_INSN (newstart_label));
2642 start_label = newstart_label;
2646 if (needs_end_jump)
2648 emit_jump (start_label);
2649 emit_note (NULL_PTR, NOTE_INSN_LOOP_END);
2651 emit_label (loop_stack->data.loop.end_label);
2653 POPSTACK (loop_stack);
2655 last_expr_type = 0;
2658 /* Finish a null loop, aka do { } while (0). */
2660 void
2661 expand_end_null_loop ()
2663 do_pending_stack_adjust ();
2664 emit_label (loop_stack->data.loop.end_label);
2666 POPSTACK (loop_stack);
2668 last_expr_type = 0;
2671 /* Generate a jump to the current loop's continue-point.
2672 This is usually the top of the loop, but may be specified
2673 explicitly elsewhere. If not currently inside a loop,
2674 return 0 and do nothing; caller will print an error message. */
2677 expand_continue_loop (whichloop)
2678 struct nesting *whichloop;
2680 last_expr_type = 0;
2681 if (whichloop == 0)
2682 whichloop = loop_stack;
2683 if (whichloop == 0)
2684 return 0;
2685 expand_goto_internal (NULL_TREE, whichloop->data.loop.continue_label,
2686 NULL_RTX);
2687 return 1;
2690 /* Generate a jump to exit the current loop. If not currently inside a loop,
2691 return 0 and do nothing; caller will print an error message. */
2694 expand_exit_loop (whichloop)
2695 struct nesting *whichloop;
2697 last_expr_type = 0;
2698 if (whichloop == 0)
2699 whichloop = loop_stack;
2700 if (whichloop == 0)
2701 return 0;
2702 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label, NULL_RTX);
2703 return 1;
2706 /* Generate a conditional jump to exit the current loop if COND
2707 evaluates to zero. If not currently inside a loop,
2708 return 0 and do nothing; caller will print an error message. */
2711 expand_exit_loop_if_false (whichloop, cond)
2712 struct nesting *whichloop;
2713 tree cond;
2715 rtx label = gen_label_rtx ();
2716 rtx last_insn;
2717 last_expr_type = 0;
2719 if (whichloop == 0)
2720 whichloop = loop_stack;
2721 if (whichloop == 0)
2722 return 0;
2723 /* In order to handle fixups, we actually create a conditional jump
2724 around a unconditional branch to exit the loop. If fixups are
2725 necessary, they go before the unconditional branch. */
2727 do_jump (cond, NULL_RTX, label);
2728 last_insn = get_last_insn ();
2729 if (GET_CODE (last_insn) == CODE_LABEL)
2730 whichloop->data.loop.alt_end_label = last_insn;
2731 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label,
2732 NULL_RTX);
2733 emit_label (label);
2735 return 1;
2738 /* Return nonzero if the loop nest is empty. Else return zero. */
2741 stmt_loop_nest_empty ()
2743 /* cfun->stmt can be NULL if we are building a call to get the
2744 EH context for a setjmp/longjmp EH target and the current
2745 function was a deferred inline function. */
2746 return (cfun->stmt == NULL || loop_stack == NULL);
2749 /* Return non-zero if we should preserve sub-expressions as separate
2750 pseudos. We never do so if we aren't optimizing. We always do so
2751 if -fexpensive-optimizations.
2753 Otherwise, we only do so if we are in the "early" part of a loop. I.e.,
2754 the loop may still be a small one. */
2757 preserve_subexpressions_p ()
2759 rtx insn;
2761 if (flag_expensive_optimizations)
2762 return 1;
2764 if (optimize == 0 || cfun == 0 || cfun->stmt == 0 || loop_stack == 0)
2765 return 0;
2767 insn = get_last_insn_anywhere ();
2769 return (insn
2770 && (INSN_UID (insn) - INSN_UID (loop_stack->data.loop.start_label)
2771 < n_non_fixed_regs * 3));
2775 /* Generate a jump to exit the current loop, conditional, binding contour
2776 or case statement. Not all such constructs are visible to this function,
2777 only those started with EXIT_FLAG nonzero. Individual languages use
2778 the EXIT_FLAG parameter to control which kinds of constructs you can
2779 exit this way.
2781 If not currently inside anything that can be exited,
2782 return 0 and do nothing; caller will print an error message. */
2785 expand_exit_something ()
2787 struct nesting *n;
2788 last_expr_type = 0;
2789 for (n = nesting_stack; n; n = n->all)
2790 if (n->exit_label != 0)
2792 expand_goto_internal (NULL_TREE, n->exit_label, NULL_RTX);
2793 return 1;
2796 return 0;
2799 /* Generate RTL to return from the current function, with no value.
2800 (That is, we do not do anything about returning any value.) */
2802 void
2803 expand_null_return ()
2805 struct nesting *block = block_stack;
2806 rtx last_insn = get_last_insn ();
2808 /* If this function was declared to return a value, but we
2809 didn't, clobber the return registers so that they are not
2810 propogated live to the rest of the function. */
2811 clobber_return_register ();
2813 /* Does any pending block have cleanups? */
2814 while (block && block->data.block.cleanups == 0)
2815 block = block->next;
2817 /* If yes, use a goto to return, since that runs cleanups. */
2819 expand_null_return_1 (last_insn, block != 0);
2822 /* Generate RTL to return from the current function, with value VAL. */
2824 static void
2825 expand_value_return (val)
2826 rtx val;
2828 struct nesting *block = block_stack;
2829 rtx last_insn = get_last_insn ();
2830 rtx return_reg = DECL_RTL (DECL_RESULT (current_function_decl));
2832 /* Copy the value to the return location
2833 unless it's already there. */
2835 if (return_reg != val)
2837 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
2838 #ifdef PROMOTE_FUNCTION_RETURN
2839 int unsignedp = TREE_UNSIGNED (type);
2840 enum machine_mode old_mode
2841 = DECL_MODE (DECL_RESULT (current_function_decl));
2842 enum machine_mode mode
2843 = promote_mode (type, old_mode, &unsignedp, 1);
2845 if (mode != old_mode)
2846 val = convert_modes (mode, old_mode, val, unsignedp);
2847 #endif
2848 if (GET_CODE (return_reg) == PARALLEL)
2849 emit_group_load (return_reg, val, int_size_in_bytes (type),
2850 TYPE_ALIGN (type));
2851 else
2852 emit_move_insn (return_reg, val);
2855 /* Does any pending block have cleanups? */
2857 while (block && block->data.block.cleanups == 0)
2858 block = block->next;
2860 /* If yes, use a goto to return, since that runs cleanups.
2861 Use LAST_INSN to put cleanups *before* the move insn emitted above. */
2863 expand_null_return_1 (last_insn, block != 0);
2866 /* Output a return with no value. If LAST_INSN is nonzero,
2867 pretend that the return takes place after LAST_INSN.
2868 If USE_GOTO is nonzero then don't use a return instruction;
2869 go to the return label instead. This causes any cleanups
2870 of pending blocks to be executed normally. */
2872 static void
2873 expand_null_return_1 (last_insn, use_goto)
2874 rtx last_insn;
2875 int use_goto;
2877 rtx end_label = cleanup_label ? cleanup_label : return_label;
2879 clear_pending_stack_adjust ();
2880 do_pending_stack_adjust ();
2881 last_expr_type = 0;
2883 /* PCC-struct return always uses an epilogue. */
2884 if (current_function_returns_pcc_struct || use_goto)
2886 if (end_label == 0)
2887 end_label = return_label = gen_label_rtx ();
2888 expand_goto_internal (NULL_TREE, end_label, last_insn);
2889 return;
2892 /* Otherwise output a simple return-insn if one is available,
2893 unless it won't do the job. */
2894 #ifdef HAVE_return
2895 if (HAVE_return && use_goto == 0 && cleanup_label == 0)
2897 emit_jump_insn (gen_return ());
2898 emit_barrier ();
2899 return;
2901 #endif
2903 /* Otherwise jump to the epilogue. */
2904 expand_goto_internal (NULL_TREE, end_label, last_insn);
2907 /* Generate RTL to evaluate the expression RETVAL and return it
2908 from the current function. */
2910 void
2911 expand_return (retval)
2912 tree retval;
2914 /* If there are any cleanups to be performed, then they will
2915 be inserted following LAST_INSN. It is desirable
2916 that the last_insn, for such purposes, should be the
2917 last insn before computing the return value. Otherwise, cleanups
2918 which call functions can clobber the return value. */
2919 /* ??? rms: I think that is erroneous, because in C++ it would
2920 run destructors on variables that might be used in the subsequent
2921 computation of the return value. */
2922 rtx last_insn = 0;
2923 rtx result_rtl = DECL_RTL (DECL_RESULT (current_function_decl));
2924 register rtx val = 0;
2925 tree retval_rhs;
2926 int cleanups;
2928 /* If function wants no value, give it none. */
2929 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
2931 expand_expr (retval, NULL_RTX, VOIDmode, 0);
2932 emit_queue ();
2933 expand_null_return ();
2934 return;
2937 /* Are any cleanups needed? E.g. C++ destructors to be run? */
2938 /* This is not sufficient. We also need to watch for cleanups of the
2939 expression we are about to expand. Unfortunately, we cannot know
2940 if it has cleanups until we expand it, and we want to change how we
2941 expand it depending upon if we need cleanups. We can't win. */
2942 #if 0
2943 cleanups = any_pending_cleanups (1);
2944 #else
2945 cleanups = 1;
2946 #endif
2948 if (retval == error_mark_node)
2949 retval_rhs = NULL_TREE;
2950 else if (TREE_CODE (retval) == RESULT_DECL)
2951 retval_rhs = retval;
2952 else if ((TREE_CODE (retval) == MODIFY_EXPR || TREE_CODE (retval) == INIT_EXPR)
2953 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
2954 retval_rhs = TREE_OPERAND (retval, 1);
2955 else if (VOID_TYPE_P (TREE_TYPE (retval)))
2956 /* Recognize tail-recursive call to void function. */
2957 retval_rhs = retval;
2958 else
2959 retval_rhs = NULL_TREE;
2961 /* Only use `last_insn' if there are cleanups which must be run. */
2962 if (cleanups || cleanup_label != 0)
2963 last_insn = get_last_insn ();
2965 /* Distribute return down conditional expr if either of the sides
2966 may involve tail recursion (see test below). This enhances the number
2967 of tail recursions we see. Don't do this always since it can produce
2968 sub-optimal code in some cases and we distribute assignments into
2969 conditional expressions when it would help. */
2971 if (optimize && retval_rhs != 0
2972 && frame_offset == 0
2973 && TREE_CODE (retval_rhs) == COND_EXPR
2974 && (TREE_CODE (TREE_OPERAND (retval_rhs, 1)) == CALL_EXPR
2975 || TREE_CODE (TREE_OPERAND (retval_rhs, 2)) == CALL_EXPR))
2977 rtx label = gen_label_rtx ();
2978 tree expr;
2980 do_jump (TREE_OPERAND (retval_rhs, 0), label, NULL_RTX);
2981 start_cleanup_deferral ();
2982 expr = build (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (current_function_decl)),
2983 DECL_RESULT (current_function_decl),
2984 TREE_OPERAND (retval_rhs, 1));
2985 TREE_SIDE_EFFECTS (expr) = 1;
2986 expand_return (expr);
2987 emit_label (label);
2989 expr = build (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (current_function_decl)),
2990 DECL_RESULT (current_function_decl),
2991 TREE_OPERAND (retval_rhs, 2));
2992 TREE_SIDE_EFFECTS (expr) = 1;
2993 expand_return (expr);
2994 end_cleanup_deferral ();
2995 return;
2998 /* If the result is an aggregate that is being returned in one (or more)
2999 registers, load the registers here. The compiler currently can't handle
3000 copying a BLKmode value into registers. We could put this code in a
3001 more general area (for use by everyone instead of just function
3002 call/return), but until this feature is generally usable it is kept here
3003 (and in expand_call). The value must go into a pseudo in case there
3004 are cleanups that will clobber the real return register. */
3006 if (retval_rhs != 0
3007 && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
3008 && GET_CODE (result_rtl) == REG)
3010 int i;
3011 unsigned HOST_WIDE_INT bitpos, xbitpos;
3012 unsigned HOST_WIDE_INT big_endian_correction = 0;
3013 unsigned HOST_WIDE_INT bytes
3014 = int_size_in_bytes (TREE_TYPE (retval_rhs));
3015 int n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
3016 unsigned int bitsize
3017 = MIN (TYPE_ALIGN (TREE_TYPE (retval_rhs)), BITS_PER_WORD);
3018 rtx *result_pseudos = (rtx *) alloca (sizeof (rtx) * n_regs);
3019 rtx result_reg, src = NULL_RTX, dst = NULL_RTX;
3020 rtx result_val = expand_expr (retval_rhs, NULL_RTX, VOIDmode, 0);
3021 enum machine_mode tmpmode, result_reg_mode;
3023 if (bytes == 0)
3025 expand_null_return ();
3026 return;
3029 /* Structures whose size is not a multiple of a word are aligned
3030 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
3031 machine, this means we must skip the empty high order bytes when
3032 calculating the bit offset. */
3033 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
3034 big_endian_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
3035 * BITS_PER_UNIT));
3037 /* Copy the structure BITSIZE bits at a time. */
3038 for (bitpos = 0, xbitpos = big_endian_correction;
3039 bitpos < bytes * BITS_PER_UNIT;
3040 bitpos += bitsize, xbitpos += bitsize)
3042 /* We need a new destination pseudo each time xbitpos is
3043 on a word boundary and when xbitpos == big_endian_correction
3044 (the first time through). */
3045 if (xbitpos % BITS_PER_WORD == 0
3046 || xbitpos == big_endian_correction)
3048 /* Generate an appropriate register. */
3049 dst = gen_reg_rtx (word_mode);
3050 result_pseudos[xbitpos / BITS_PER_WORD] = dst;
3052 /* Clobber the destination before we move anything into it. */
3053 emit_insn (gen_rtx_CLOBBER (VOIDmode, dst));
3056 /* We need a new source operand each time bitpos is on a word
3057 boundary. */
3058 if (bitpos % BITS_PER_WORD == 0)
3059 src = operand_subword_force (result_val,
3060 bitpos / BITS_PER_WORD,
3061 BLKmode);
3063 /* Use bitpos for the source extraction (left justified) and
3064 xbitpos for the destination store (right justified). */
3065 store_bit_field (dst, bitsize, xbitpos % BITS_PER_WORD, word_mode,
3066 extract_bit_field (src, bitsize,
3067 bitpos % BITS_PER_WORD, 1,
3068 NULL_RTX, word_mode, word_mode,
3069 bitsize, BITS_PER_WORD),
3070 bitsize, BITS_PER_WORD);
3073 /* Find the smallest integer mode large enough to hold the
3074 entire structure and use that mode instead of BLKmode
3075 on the USE insn for the return register. */
3076 for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3077 tmpmode != VOIDmode;
3078 tmpmode = GET_MODE_WIDER_MODE (tmpmode))
3079 /* Have we found a large enough mode? */
3080 if (GET_MODE_SIZE (tmpmode) >= bytes)
3081 break;
3083 /* No suitable mode found. */
3084 if (tmpmode == VOIDmode)
3085 abort ();
3087 PUT_MODE (result_rtl, tmpmode);
3089 if (GET_MODE_SIZE (tmpmode) < GET_MODE_SIZE (word_mode))
3090 result_reg_mode = word_mode;
3091 else
3092 result_reg_mode = tmpmode;
3093 result_reg = gen_reg_rtx (result_reg_mode);
3095 emit_queue ();
3096 for (i = 0; i < n_regs; i++)
3097 emit_move_insn (operand_subword (result_reg, i, 0, result_reg_mode),
3098 result_pseudos[i]);
3100 if (tmpmode != result_reg_mode)
3101 result_reg = gen_lowpart (tmpmode, result_reg);
3103 expand_value_return (result_reg);
3105 else if (cleanups
3106 && retval_rhs != 0
3107 && !VOID_TYPE_P (TREE_TYPE (retval_rhs))
3108 && (GET_CODE (result_rtl) == REG
3109 || (GET_CODE (result_rtl) == PARALLEL)))
3111 /* Calculate the return value into a temporary (usually a pseudo
3112 reg). */
3113 tree ot = TREE_TYPE (DECL_RESULT (current_function_decl));
3114 tree nt = build_qualified_type (ot, TYPE_QUALS (ot) | TYPE_QUAL_CONST);
3116 val = assign_temp (nt, 0, 0, 1);
3117 val = expand_expr (retval_rhs, val, GET_MODE (val), 0);
3118 val = force_not_mem (val);
3119 emit_queue ();
3120 /* Return the calculated value, doing cleanups first. */
3121 expand_value_return (val);
3123 else
3125 /* No cleanups or no hard reg used;
3126 calculate value into hard return reg. */
3127 expand_expr (retval, const0_rtx, VOIDmode, 0);
3128 emit_queue ();
3129 expand_value_return (result_rtl);
3133 /* Return 1 if the end of the generated RTX is not a barrier.
3134 This means code already compiled can drop through. */
3137 drop_through_at_end_p ()
3139 rtx insn = get_last_insn ();
3140 while (insn && GET_CODE (insn) == NOTE)
3141 insn = PREV_INSN (insn);
3142 return insn && GET_CODE (insn) != BARRIER;
3145 /* Attempt to optimize a potential tail recursion call into a goto.
3146 ARGUMENTS are the arguments to a CALL_EXPR; LAST_INSN indicates
3147 where to place the jump to the tail recursion label.
3149 Return TRUE if the call was optimized into a goto. */
3152 optimize_tail_recursion (arguments, last_insn)
3153 tree arguments;
3154 rtx last_insn;
3156 /* Finish checking validity, and if valid emit code to set the
3157 argument variables for the new call. */
3158 if (tail_recursion_args (arguments, DECL_ARGUMENTS (current_function_decl)))
3160 if (tail_recursion_label == 0)
3162 tail_recursion_label = gen_label_rtx ();
3163 emit_label_after (tail_recursion_label,
3164 tail_recursion_reentry);
3166 emit_queue ();
3167 expand_goto_internal (NULL_TREE, tail_recursion_label, last_insn);
3168 emit_barrier ();
3169 return 1;
3171 return 0;
3174 /* Emit code to alter this function's formal parms for a tail-recursive call.
3175 ACTUALS is a list of actual parameter expressions (chain of TREE_LISTs).
3176 FORMALS is the chain of decls of formals.
3177 Return 1 if this can be done;
3178 otherwise return 0 and do not emit any code. */
3180 static int
3181 tail_recursion_args (actuals, formals)
3182 tree actuals, formals;
3184 register tree a = actuals, f = formals;
3185 register int i;
3186 register rtx *argvec;
3188 /* Check that number and types of actuals are compatible
3189 with the formals. This is not always true in valid C code.
3190 Also check that no formal needs to be addressable
3191 and that all formals are scalars. */
3193 /* Also count the args. */
3195 for (a = actuals, f = formals, i = 0; a && f; a = TREE_CHAIN (a), f = TREE_CHAIN (f), i++)
3197 if (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_VALUE (a)))
3198 != TYPE_MAIN_VARIANT (TREE_TYPE (f)))
3199 return 0;
3200 if (GET_CODE (DECL_RTL (f)) != REG || DECL_MODE (f) == BLKmode)
3201 return 0;
3203 if (a != 0 || f != 0)
3204 return 0;
3206 /* Compute all the actuals. */
3208 argvec = (rtx *) alloca (i * sizeof (rtx));
3210 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
3211 argvec[i] = expand_expr (TREE_VALUE (a), NULL_RTX, VOIDmode, 0);
3213 /* Find which actual values refer to current values of previous formals.
3214 Copy each of them now, before any formal is changed. */
3216 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
3218 int copy = 0;
3219 register int j;
3220 for (f = formals, j = 0; j < i; f = TREE_CHAIN (f), j++)
3221 if (reg_mentioned_p (DECL_RTL (f), argvec[i]))
3223 copy = 1;
3224 break;
3226 if (copy)
3227 argvec[i] = copy_to_reg (argvec[i]);
3230 /* Store the values of the actuals into the formals. */
3232 for (f = formals, a = actuals, i = 0; f;
3233 f = TREE_CHAIN (f), a = TREE_CHAIN (a), i++)
3235 if (GET_MODE (DECL_RTL (f)) == GET_MODE (argvec[i]))
3236 emit_move_insn (DECL_RTL (f), argvec[i]);
3237 else
3238 convert_move (DECL_RTL (f), argvec[i],
3239 TREE_UNSIGNED (TREE_TYPE (TREE_VALUE (a))));
3242 free_temp_slots ();
3243 return 1;
3246 /* Generate the RTL code for entering a binding contour.
3247 The variables are declared one by one, by calls to `expand_decl'.
3249 FLAGS is a bitwise or of the following flags:
3251 1 - Nonzero if this construct should be visible to
3252 `exit_something'.
3254 2 - Nonzero if this contour does not require a
3255 NOTE_INSN_BLOCK_BEG note. Virtually all calls from
3256 language-independent code should set this flag because they
3257 will not create corresponding BLOCK nodes. (There should be
3258 a one-to-one correspondence between NOTE_INSN_BLOCK_BEG notes
3259 and BLOCKs.) If this flag is set, MARK_ENDS should be zero
3260 when expand_end_bindings is called.
3262 If we are creating a NOTE_INSN_BLOCK_BEG note, a BLOCK may
3263 optionally be supplied. If so, it becomes the NOTE_BLOCK for the
3264 note. */
3266 void
3267 expand_start_bindings_and_block (flags, block)
3268 int flags;
3269 tree block;
3271 struct nesting *thisblock = ALLOC_NESTING ();
3272 rtx note;
3273 int exit_flag = ((flags & 1) != 0);
3274 int block_flag = ((flags & 2) == 0);
3276 /* If a BLOCK is supplied, then the caller should be requesting a
3277 NOTE_INSN_BLOCK_BEG note. */
3278 if (!block_flag && block)
3279 abort ();
3281 /* Create a note to mark the beginning of the block. */
3282 if (block_flag)
3284 note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_BEG);
3285 NOTE_BLOCK (note) = block;
3287 else
3288 note = emit_note (NULL_PTR, NOTE_INSN_DELETED);
3290 /* Make an entry on block_stack for the block we are entering. */
3292 thisblock->next = block_stack;
3293 thisblock->all = nesting_stack;
3294 thisblock->depth = ++nesting_depth;
3295 thisblock->data.block.stack_level = 0;
3296 thisblock->data.block.cleanups = 0;
3297 thisblock->data.block.n_function_calls = 0;
3298 thisblock->data.block.exception_region = 0;
3299 thisblock->data.block.block_target_temp_slot_level = target_temp_slot_level;
3301 thisblock->data.block.conditional_code = 0;
3302 thisblock->data.block.last_unconditional_cleanup = note;
3303 /* When we insert instructions after the last unconditional cleanup,
3304 we don't adjust last_insn. That means that a later add_insn will
3305 clobber the instructions we've just added. The easiest way to
3306 fix this is to just insert another instruction here, so that the
3307 instructions inserted after the last unconditional cleanup are
3308 never the last instruction. */
3309 emit_note (NULL_PTR, NOTE_INSN_DELETED);
3310 thisblock->data.block.cleanup_ptr = &thisblock->data.block.cleanups;
3312 if (block_stack
3313 && !(block_stack->data.block.cleanups == NULL_TREE
3314 && block_stack->data.block.outer_cleanups == NULL_TREE))
3315 thisblock->data.block.outer_cleanups
3316 = tree_cons (NULL_TREE, block_stack->data.block.cleanups,
3317 block_stack->data.block.outer_cleanups);
3318 else
3319 thisblock->data.block.outer_cleanups = 0;
3320 thisblock->data.block.label_chain = 0;
3321 thisblock->data.block.innermost_stack_block = stack_block_stack;
3322 thisblock->data.block.first_insn = note;
3323 thisblock->data.block.block_start_count = ++current_block_start_count;
3324 thisblock->exit_label = exit_flag ? gen_label_rtx () : 0;
3325 block_stack = thisblock;
3326 nesting_stack = thisblock;
3328 /* Make a new level for allocating stack slots. */
3329 push_temp_slots ();
3332 /* Specify the scope of temporaries created by TARGET_EXPRs. Similar
3333 to CLEANUP_POINT_EXPR, but handles cases when a series of calls to
3334 expand_expr are made. After we end the region, we know that all
3335 space for all temporaries that were created by TARGET_EXPRs will be
3336 destroyed and their space freed for reuse. */
3338 void
3339 expand_start_target_temps ()
3341 /* This is so that even if the result is preserved, the space
3342 allocated will be freed, as we know that it is no longer in use. */
3343 push_temp_slots ();
3345 /* Start a new binding layer that will keep track of all cleanup
3346 actions to be performed. */
3347 expand_start_bindings (2);
3349 target_temp_slot_level = temp_slot_level;
3352 void
3353 expand_end_target_temps ()
3355 expand_end_bindings (NULL_TREE, 0, 0);
3357 /* This is so that even if the result is preserved, the space
3358 allocated will be freed, as we know that it is no longer in use. */
3359 pop_temp_slots ();
3362 /* Given a pointer to a BLOCK node return non-zero if (and only if) the node
3363 in question represents the outermost pair of curly braces (i.e. the "body
3364 block") of a function or method.
3366 For any BLOCK node representing a "body block" of a function or method, the
3367 BLOCK_SUPERCONTEXT of the node will point to another BLOCK node which
3368 represents the outermost (function) scope for the function or method (i.e.
3369 the one which includes the formal parameters). The BLOCK_SUPERCONTEXT of
3370 *that* node in turn will point to the relevant FUNCTION_DECL node. */
3373 is_body_block (stmt)
3374 register tree stmt;
3376 if (TREE_CODE (stmt) == BLOCK)
3378 tree parent = BLOCK_SUPERCONTEXT (stmt);
3380 if (parent && TREE_CODE (parent) == BLOCK)
3382 tree grandparent = BLOCK_SUPERCONTEXT (parent);
3384 if (grandparent && TREE_CODE (grandparent) == FUNCTION_DECL)
3385 return 1;
3389 return 0;
3392 /* Mark top block of block_stack as an implicit binding for an
3393 exception region. This is used to prevent infinite recursion when
3394 ending a binding with expand_end_bindings. It is only ever called
3395 by expand_eh_region_start, as that it the only way to create a
3396 block stack for a exception region. */
3398 void
3399 mark_block_as_eh_region ()
3401 block_stack->data.block.exception_region = 1;
3402 if (block_stack->next
3403 && block_stack->next->data.block.conditional_code)
3405 block_stack->data.block.conditional_code
3406 = block_stack->next->data.block.conditional_code;
3407 block_stack->data.block.last_unconditional_cleanup
3408 = block_stack->next->data.block.last_unconditional_cleanup;
3409 block_stack->data.block.cleanup_ptr
3410 = block_stack->next->data.block.cleanup_ptr;
3414 /* True if we are currently emitting insns in an area of output code
3415 that is controlled by a conditional expression. This is used by
3416 the cleanup handling code to generate conditional cleanup actions. */
3419 conditional_context ()
3421 return block_stack && block_stack->data.block.conditional_code;
3424 /* Mark top block of block_stack as not for an implicit binding for an
3425 exception region. This is only ever done by expand_eh_region_end
3426 to let expand_end_bindings know that it is being called explicitly
3427 to end the binding layer for just the binding layer associated with
3428 the exception region, otherwise expand_end_bindings would try and
3429 end all implicit binding layers for exceptions regions, and then
3430 one normal binding layer. */
3432 void
3433 mark_block_as_not_eh_region ()
3435 block_stack->data.block.exception_region = 0;
3438 /* True if the top block of block_stack was marked as for an exception
3439 region by mark_block_as_eh_region. */
3442 is_eh_region ()
3444 return cfun && block_stack && block_stack->data.block.exception_region;
3447 /* Emit a handler label for a nonlocal goto handler.
3448 Also emit code to store the handler label in SLOT before BEFORE_INSN. */
3450 static rtx
3451 expand_nl_handler_label (slot, before_insn)
3452 rtx slot, before_insn;
3454 rtx insns;
3455 rtx handler_label = gen_label_rtx ();
3457 /* Don't let jump_optimize delete the handler. */
3458 LABEL_PRESERVE_P (handler_label) = 1;
3460 start_sequence ();
3461 emit_move_insn (slot, gen_rtx_LABEL_REF (Pmode, handler_label));
3462 insns = get_insns ();
3463 end_sequence ();
3464 emit_insns_before (insns, before_insn);
3466 emit_label (handler_label);
3468 return handler_label;
3471 /* Emit code to restore vital registers at the beginning of a nonlocal goto
3472 handler. */
3473 static void
3474 expand_nl_goto_receiver ()
3476 #ifdef HAVE_nonlocal_goto
3477 if (! HAVE_nonlocal_goto)
3478 #endif
3479 /* First adjust our frame pointer to its actual value. It was
3480 previously set to the start of the virtual area corresponding to
3481 the stacked variables when we branched here and now needs to be
3482 adjusted to the actual hardware fp value.
3484 Assignments are to virtual registers are converted by
3485 instantiate_virtual_regs into the corresponding assignment
3486 to the underlying register (fp in this case) that makes
3487 the original assignment true.
3488 So the following insn will actually be
3489 decrementing fp by STARTING_FRAME_OFFSET. */
3490 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
3492 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3493 if (fixed_regs[ARG_POINTER_REGNUM])
3495 #ifdef ELIMINABLE_REGS
3496 /* If the argument pointer can be eliminated in favor of the
3497 frame pointer, we don't need to restore it. We assume here
3498 that if such an elimination is present, it can always be used.
3499 This is the case on all known machines; if we don't make this
3500 assumption, we do unnecessary saving on many machines. */
3501 static struct elims {int from, to;} elim_regs[] = ELIMINABLE_REGS;
3502 size_t i;
3504 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
3505 if (elim_regs[i].from == ARG_POINTER_REGNUM
3506 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
3507 break;
3509 if (i == ARRAY_SIZE (elim_regs))
3510 #endif
3512 /* Now restore our arg pointer from the address at which it
3513 was saved in our stack frame.
3514 If there hasn't be space allocated for it yet, make
3515 some now. */
3516 if (arg_pointer_save_area == 0)
3517 arg_pointer_save_area
3518 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
3519 emit_move_insn (virtual_incoming_args_rtx,
3520 /* We need a pseudo here, or else
3521 instantiate_virtual_regs_1 complains. */
3522 copy_to_reg (arg_pointer_save_area));
3525 #endif
3527 #ifdef HAVE_nonlocal_goto_receiver
3528 if (HAVE_nonlocal_goto_receiver)
3529 emit_insn (gen_nonlocal_goto_receiver ());
3530 #endif
3533 /* Make handlers for nonlocal gotos taking place in the function calls in
3534 block THISBLOCK. */
3536 static void
3537 expand_nl_goto_receivers (thisblock)
3538 struct nesting *thisblock;
3540 tree link;
3541 rtx afterward = gen_label_rtx ();
3542 rtx insns, slot;
3543 rtx label_list;
3544 int any_invalid;
3546 /* Record the handler address in the stack slot for that purpose,
3547 during this block, saving and restoring the outer value. */
3548 if (thisblock->next != 0)
3549 for (slot = nonlocal_goto_handler_slots; slot; slot = XEXP (slot, 1))
3551 rtx save_receiver = gen_reg_rtx (Pmode);
3552 emit_move_insn (XEXP (slot, 0), save_receiver);
3554 start_sequence ();
3555 emit_move_insn (save_receiver, XEXP (slot, 0));
3556 insns = get_insns ();
3557 end_sequence ();
3558 emit_insns_before (insns, thisblock->data.block.first_insn);
3561 /* Jump around the handlers; they run only when specially invoked. */
3562 emit_jump (afterward);
3564 /* Make a separate handler for each label. */
3565 link = nonlocal_labels;
3566 slot = nonlocal_goto_handler_slots;
3567 label_list = NULL_RTX;
3568 for (; link; link = TREE_CHAIN (link), slot = XEXP (slot, 1))
3569 /* Skip any labels we shouldn't be able to jump to from here,
3570 we generate one special handler for all of them below which just calls
3571 abort. */
3572 if (! DECL_TOO_LATE (TREE_VALUE (link)))
3574 rtx lab;
3575 lab = expand_nl_handler_label (XEXP (slot, 0),
3576 thisblock->data.block.first_insn);
3577 label_list = gen_rtx_EXPR_LIST (VOIDmode, lab, label_list);
3579 expand_nl_goto_receiver ();
3581 /* Jump to the "real" nonlocal label. */
3582 expand_goto (TREE_VALUE (link));
3585 /* A second pass over all nonlocal labels; this time we handle those
3586 we should not be able to jump to at this point. */
3587 link = nonlocal_labels;
3588 slot = nonlocal_goto_handler_slots;
3589 any_invalid = 0;
3590 for (; link; link = TREE_CHAIN (link), slot = XEXP (slot, 1))
3591 if (DECL_TOO_LATE (TREE_VALUE (link)))
3593 rtx lab;
3594 lab = expand_nl_handler_label (XEXP (slot, 0),
3595 thisblock->data.block.first_insn);
3596 label_list = gen_rtx_EXPR_LIST (VOIDmode, lab, label_list);
3597 any_invalid = 1;
3600 if (any_invalid)
3602 expand_nl_goto_receiver ();
3603 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "abort"), 0,
3604 VOIDmode, 0);
3605 emit_barrier ();
3608 nonlocal_goto_handler_labels = label_list;
3609 emit_label (afterward);
3612 /* Warn about any unused VARS (which may contain nodes other than
3613 VAR_DECLs, but such nodes are ignored). The nodes are connected
3614 via the TREE_CHAIN field. */
3616 void
3617 warn_about_unused_variables (vars)
3618 tree vars;
3620 tree decl;
3622 if (warn_unused_variable)
3623 for (decl = vars; decl; decl = TREE_CHAIN (decl))
3624 if (TREE_CODE (decl) == VAR_DECL
3625 && ! TREE_USED (decl)
3626 && ! DECL_IN_SYSTEM_HEADER (decl)
3627 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
3628 warning_with_decl (decl, "unused variable `%s'");
3631 /* Generate RTL code to terminate a binding contour.
3633 VARS is the chain of VAR_DECL nodes for the variables bound in this
3634 contour. There may actually be other nodes in this chain, but any
3635 nodes other than VAR_DECLS are ignored.
3637 MARK_ENDS is nonzero if we should put a note at the beginning
3638 and end of this binding contour.
3640 DONT_JUMP_IN is nonzero if it is not valid to jump into this contour.
3641 (That is true automatically if the contour has a saved stack level.) */
3643 void
3644 expand_end_bindings (vars, mark_ends, dont_jump_in)
3645 tree vars;
3646 int mark_ends;
3647 int dont_jump_in;
3649 register struct nesting *thisblock;
3651 while (block_stack->data.block.exception_region)
3653 /* Because we don't need or want a new temporary level and
3654 because we didn't create one in expand_eh_region_start,
3655 create a fake one now to avoid removing one in
3656 expand_end_bindings. */
3657 push_temp_slots ();
3659 block_stack->data.block.exception_region = 0;
3661 expand_end_bindings (NULL_TREE, 0, 0);
3664 /* Since expand_eh_region_start does an expand_start_bindings, we
3665 have to first end all the bindings that were created by
3666 expand_eh_region_start. */
3668 thisblock = block_stack;
3670 /* If any of the variables in this scope were not used, warn the
3671 user. */
3672 warn_about_unused_variables (vars);
3674 if (thisblock->exit_label)
3676 do_pending_stack_adjust ();
3677 emit_label (thisblock->exit_label);
3680 /* If necessary, make handlers for nonlocal gotos taking
3681 place in the function calls in this block. */
3682 if (function_call_count != thisblock->data.block.n_function_calls
3683 && nonlocal_labels
3684 /* Make handler for outermost block
3685 if there were any nonlocal gotos to this function. */
3686 && (thisblock->next == 0 ? current_function_has_nonlocal_label
3687 /* Make handler for inner block if it has something
3688 special to do when you jump out of it. */
3689 : (thisblock->data.block.cleanups != 0
3690 || thisblock->data.block.stack_level != 0)))
3691 expand_nl_goto_receivers (thisblock);
3693 /* Don't allow jumping into a block that has a stack level.
3694 Cleanups are allowed, though. */
3695 if (dont_jump_in
3696 || thisblock->data.block.stack_level != 0)
3698 struct label_chain *chain;
3700 /* Any labels in this block are no longer valid to go to.
3701 Mark them to cause an error message. */
3702 for (chain = thisblock->data.block.label_chain; chain; chain = chain->next)
3704 DECL_TOO_LATE (chain->label) = 1;
3705 /* If any goto without a fixup came to this label,
3706 that must be an error, because gotos without fixups
3707 come from outside all saved stack-levels. */
3708 if (TREE_ADDRESSABLE (chain->label))
3709 error_with_decl (chain->label,
3710 "label `%s' used before containing binding contour");
3714 /* Restore stack level in effect before the block
3715 (only if variable-size objects allocated). */
3716 /* Perform any cleanups associated with the block. */
3718 if (thisblock->data.block.stack_level != 0
3719 || thisblock->data.block.cleanups != 0)
3721 int reachable;
3722 rtx insn;
3724 /* Don't let cleanups affect ({...}) constructs. */
3725 int old_expr_stmts_for_value = expr_stmts_for_value;
3726 rtx old_last_expr_value = last_expr_value;
3727 tree old_last_expr_type = last_expr_type;
3728 expr_stmts_for_value = 0;
3730 /* Only clean up here if this point can actually be reached. */
3731 insn = get_last_insn ();
3732 if (GET_CODE (insn) == NOTE)
3733 insn = prev_nonnote_insn (insn);
3734 reachable = (! insn || GET_CODE (insn) != BARRIER);
3736 /* Do the cleanups. */
3737 expand_cleanups (thisblock->data.block.cleanups, NULL_TREE, 0, reachable);
3738 if (reachable)
3739 do_pending_stack_adjust ();
3741 expr_stmts_for_value = old_expr_stmts_for_value;
3742 last_expr_value = old_last_expr_value;
3743 last_expr_type = old_last_expr_type;
3745 /* Restore the stack level. */
3747 if (reachable && thisblock->data.block.stack_level != 0)
3749 emit_stack_restore (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3750 thisblock->data.block.stack_level, NULL_RTX);
3751 if (nonlocal_goto_handler_slots != 0)
3752 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level,
3753 NULL_RTX);
3756 /* Any gotos out of this block must also do these things.
3757 Also report any gotos with fixups that came to labels in this
3758 level. */
3759 fixup_gotos (thisblock,
3760 thisblock->data.block.stack_level,
3761 thisblock->data.block.cleanups,
3762 thisblock->data.block.first_insn,
3763 dont_jump_in);
3766 /* Mark the beginning and end of the scope if requested.
3767 We do this now, after running cleanups on the variables
3768 just going out of scope, so they are in scope for their cleanups. */
3770 if (mark_ends)
3772 rtx note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_END);
3773 NOTE_BLOCK (note) = NOTE_BLOCK (thisblock->data.block.first_insn);
3775 else
3776 /* Get rid of the beginning-mark if we don't make an end-mark. */
3777 NOTE_LINE_NUMBER (thisblock->data.block.first_insn) = NOTE_INSN_DELETED;
3779 /* Restore the temporary level of TARGET_EXPRs. */
3780 target_temp_slot_level = thisblock->data.block.block_target_temp_slot_level;
3782 /* Restore block_stack level for containing block. */
3784 stack_block_stack = thisblock->data.block.innermost_stack_block;
3785 POPSTACK (block_stack);
3787 /* Pop the stack slot nesting and free any slots at this level. */
3788 pop_temp_slots ();
3791 /* Generate code to save the stack pointer at the start of the current block
3792 and set up to restore it on exit. */
3794 void
3795 save_stack_pointer ()
3797 struct nesting *thisblock = block_stack;
3799 if (thisblock->data.block.stack_level == 0)
3801 emit_stack_save (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3802 &thisblock->data.block.stack_level,
3803 thisblock->data.block.first_insn);
3804 stack_block_stack = thisblock;
3808 /* Generate RTL for the automatic variable declaration DECL.
3809 (Other kinds of declarations are simply ignored if seen here.) */
3811 void
3812 expand_decl (decl)
3813 register tree decl;
3815 struct nesting *thisblock;
3816 tree type;
3818 type = TREE_TYPE (decl);
3820 /* Only automatic variables need any expansion done.
3821 Static and external variables, and external functions,
3822 will be handled by `assemble_variable' (called from finish_decl).
3823 TYPE_DECL and CONST_DECL require nothing.
3824 PARM_DECLs are handled in `assign_parms'. */
3826 if (TREE_CODE (decl) != VAR_DECL)
3827 return;
3828 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
3829 return;
3831 thisblock = block_stack;
3833 /* Create the RTL representation for the variable. */
3835 if (type == error_mark_node)
3836 DECL_RTL (decl) = gen_rtx_MEM (BLKmode, const0_rtx);
3838 else if (DECL_SIZE (decl) == 0)
3839 /* Variable with incomplete type. */
3841 if (DECL_INITIAL (decl) == 0)
3842 /* Error message was already done; now avoid a crash. */
3843 DECL_RTL (decl) = gen_rtx_MEM (BLKmode, const0_rtx);
3844 else
3845 /* An initializer is going to decide the size of this array.
3846 Until we know the size, represent its address with a reg. */
3847 DECL_RTL (decl) = gen_rtx_MEM (BLKmode, gen_reg_rtx (Pmode));
3849 set_mem_attributes (DECL_RTL (decl), decl, 1);
3851 else if (DECL_MODE (decl) != BLKmode
3852 /* If -ffloat-store, don't put explicit float vars
3853 into regs. */
3854 && !(flag_float_store
3855 && TREE_CODE (type) == REAL_TYPE)
3856 && ! TREE_THIS_VOLATILE (decl)
3857 && (DECL_REGISTER (decl) || optimize)
3858 /* if -fcheck-memory-usage, check all variables. */
3859 && ! current_function_check_memory_usage)
3861 /* Automatic variable that can go in a register. */
3862 int unsignedp = TREE_UNSIGNED (type);
3863 enum machine_mode reg_mode
3864 = promote_mode (type, DECL_MODE (decl), &unsignedp, 0);
3866 DECL_RTL (decl) = gen_reg_rtx (reg_mode);
3867 mark_user_reg (DECL_RTL (decl));
3869 if (POINTER_TYPE_P (type))
3870 mark_reg_pointer (DECL_RTL (decl),
3871 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (decl))));
3873 maybe_set_unchanging (DECL_RTL (decl), decl);
3875 /* If something wants our address, try to use ADDRESSOF. */
3876 if (TREE_ADDRESSABLE (decl))
3877 put_var_into_stack (decl);
3880 else if (TREE_CODE (DECL_SIZE_UNIT (decl)) == INTEGER_CST
3881 && ! (flag_stack_check && ! STACK_CHECK_BUILTIN
3882 && 0 < compare_tree_int (DECL_SIZE_UNIT (decl),
3883 STACK_CHECK_MAX_VAR_SIZE)))
3885 /* Variable of fixed size that goes on the stack. */
3886 rtx oldaddr = 0;
3887 rtx addr;
3889 /* If we previously made RTL for this decl, it must be an array
3890 whose size was determined by the initializer.
3891 The old address was a register; set that register now
3892 to the proper address. */
3893 if (DECL_RTL (decl) != 0)
3895 if (GET_CODE (DECL_RTL (decl)) != MEM
3896 || GET_CODE (XEXP (DECL_RTL (decl), 0)) != REG)
3897 abort ();
3898 oldaddr = XEXP (DECL_RTL (decl), 0);
3901 DECL_RTL (decl) = assign_temp (TREE_TYPE (decl), 1, 1, 1);
3903 /* Set alignment we actually gave this decl. */
3904 DECL_ALIGN (decl) = (DECL_MODE (decl) == BLKmode ? BIGGEST_ALIGNMENT
3905 : GET_MODE_BITSIZE (DECL_MODE (decl)));
3906 DECL_USER_ALIGN (decl) = 0;
3908 if (oldaddr)
3910 addr = force_operand (XEXP (DECL_RTL (decl), 0), oldaddr);
3911 if (addr != oldaddr)
3912 emit_move_insn (oldaddr, addr);
3915 else
3916 /* Dynamic-size object: must push space on the stack. */
3918 rtx address, size;
3920 /* Record the stack pointer on entry to block, if have
3921 not already done so. */
3922 do_pending_stack_adjust ();
3923 save_stack_pointer ();
3925 /* In function-at-a-time mode, variable_size doesn't expand this,
3926 so do it now. */
3927 if (TREE_CODE (type) == ARRAY_TYPE && TYPE_DOMAIN (type))
3928 expand_expr (TYPE_MAX_VALUE (TYPE_DOMAIN (type)),
3929 const0_rtx, VOIDmode, 0);
3931 /* Compute the variable's size, in bytes. */
3932 size = expand_expr (DECL_SIZE_UNIT (decl), NULL_RTX, VOIDmode, 0);
3933 free_temp_slots ();
3935 /* Allocate space on the stack for the variable. Note that
3936 DECL_ALIGN says how the variable is to be aligned and we
3937 cannot use it to conclude anything about the alignment of
3938 the size. */
3939 address = allocate_dynamic_stack_space (size, NULL_RTX,
3940 TYPE_ALIGN (TREE_TYPE (decl)));
3942 /* Reference the variable indirect through that rtx. */
3943 DECL_RTL (decl) = gen_rtx_MEM (DECL_MODE (decl), address);
3945 set_mem_attributes (DECL_RTL (decl), decl, 1);
3947 /* Indicate the alignment we actually gave this variable. */
3948 #ifdef STACK_BOUNDARY
3949 DECL_ALIGN (decl) = STACK_BOUNDARY;
3950 #else
3951 DECL_ALIGN (decl) = BIGGEST_ALIGNMENT;
3952 #endif
3953 DECL_USER_ALIGN (decl) = 0;
3957 /* Emit code to perform the initialization of a declaration DECL. */
3959 void
3960 expand_decl_init (decl)
3961 tree decl;
3963 int was_used = TREE_USED (decl);
3965 /* If this is a CONST_DECL, we don't have to generate any code, but
3966 if DECL_INITIAL is a constant, call expand_expr to force TREE_CST_RTL
3967 to be set while in the obstack containing the constant. If we don't
3968 do this, we can lose if we have functions nested three deep and the middle
3969 function makes a CONST_DECL whose DECL_INITIAL is a STRING_CST while
3970 the innermost function is the first to expand that STRING_CST. */
3971 if (TREE_CODE (decl) == CONST_DECL)
3973 if (DECL_INITIAL (decl) && TREE_CONSTANT (DECL_INITIAL (decl)))
3974 expand_expr (DECL_INITIAL (decl), NULL_RTX, VOIDmode,
3975 EXPAND_INITIALIZER);
3976 return;
3979 if (TREE_STATIC (decl))
3980 return;
3982 /* Compute and store the initial value now. */
3984 if (DECL_INITIAL (decl) == error_mark_node)
3986 enum tree_code code = TREE_CODE (TREE_TYPE (decl));
3988 if (code == INTEGER_TYPE || code == REAL_TYPE || code == ENUMERAL_TYPE
3989 || code == POINTER_TYPE || code == REFERENCE_TYPE)
3990 expand_assignment (decl, convert (TREE_TYPE (decl), integer_zero_node),
3991 0, 0);
3992 emit_queue ();
3994 else if (DECL_INITIAL (decl) && TREE_CODE (DECL_INITIAL (decl)) != TREE_LIST)
3996 emit_line_note (DECL_SOURCE_FILE (decl), DECL_SOURCE_LINE (decl));
3997 expand_assignment (decl, DECL_INITIAL (decl), 0, 0);
3998 emit_queue ();
4001 /* Don't let the initialization count as "using" the variable. */
4002 TREE_USED (decl) = was_used;
4004 /* Free any temporaries we made while initializing the decl. */
4005 preserve_temp_slots (NULL_RTX);
4006 free_temp_slots ();
4009 /* CLEANUP is an expression to be executed at exit from this binding contour;
4010 for example, in C++, it might call the destructor for this variable.
4012 We wrap CLEANUP in an UNSAVE_EXPR node, so that we can expand the
4013 CLEANUP multiple times, and have the correct semantics. This
4014 happens in exception handling, for gotos, returns, breaks that
4015 leave the current scope.
4017 If CLEANUP is nonzero and DECL is zero, we record a cleanup
4018 that is not associated with any particular variable. */
4021 expand_decl_cleanup (decl, cleanup)
4022 tree decl, cleanup;
4024 struct nesting *thisblock;
4026 /* Error if we are not in any block. */
4027 if (cfun == 0 || block_stack == 0)
4028 return 0;
4030 thisblock = block_stack;
4032 /* Record the cleanup if there is one. */
4034 if (cleanup != 0)
4036 tree t;
4037 rtx seq;
4038 tree *cleanups = &thisblock->data.block.cleanups;
4039 int cond_context = conditional_context ();
4041 if (cond_context)
4043 rtx flag = gen_reg_rtx (word_mode);
4044 rtx set_flag_0;
4045 tree cond;
4047 start_sequence ();
4048 emit_move_insn (flag, const0_rtx);
4049 set_flag_0 = get_insns ();
4050 end_sequence ();
4052 thisblock->data.block.last_unconditional_cleanup
4053 = emit_insns_after (set_flag_0,
4054 thisblock->data.block.last_unconditional_cleanup);
4056 emit_move_insn (flag, const1_rtx);
4058 cond = build_decl (VAR_DECL, NULL_TREE, type_for_mode (word_mode, 1));
4059 DECL_RTL (cond) = flag;
4061 /* Conditionalize the cleanup. */
4062 cleanup = build (COND_EXPR, void_type_node,
4063 truthvalue_conversion (cond),
4064 cleanup, integer_zero_node);
4065 cleanup = fold (cleanup);
4067 cleanups = thisblock->data.block.cleanup_ptr;
4070 cleanup = unsave_expr (cleanup);
4072 t = *cleanups = tree_cons (decl, cleanup, *cleanups);
4074 if (! cond_context)
4075 /* If this block has a cleanup, it belongs in stack_block_stack. */
4076 stack_block_stack = thisblock;
4078 if (cond_context)
4080 start_sequence ();
4083 /* If this was optimized so that there is no exception region for the
4084 cleanup, then mark the TREE_LIST node, so that we can later tell
4085 if we need to call expand_eh_region_end. */
4086 if (! using_eh_for_cleanups_p
4087 || expand_eh_region_start_tree (decl, cleanup))
4088 TREE_ADDRESSABLE (t) = 1;
4089 /* If that started a new EH region, we're in a new block. */
4090 thisblock = block_stack;
4092 if (cond_context)
4094 seq = get_insns ();
4095 end_sequence ();
4096 if (seq)
4097 thisblock->data.block.last_unconditional_cleanup
4098 = emit_insns_after (seq,
4099 thisblock->data.block.last_unconditional_cleanup);
4101 else
4103 thisblock->data.block.last_unconditional_cleanup
4104 = get_last_insn ();
4105 /* When we insert instructions after the last unconditional cleanup,
4106 we don't adjust last_insn. That means that a later add_insn will
4107 clobber the instructions we've just added. The easiest way to
4108 fix this is to just insert another instruction here, so that the
4109 instructions inserted after the last unconditional cleanup are
4110 never the last instruction. */
4111 emit_note (NULL_PTR, NOTE_INSN_DELETED);
4112 thisblock->data.block.cleanup_ptr = &thisblock->data.block.cleanups;
4115 return 1;
4118 /* Like expand_decl_cleanup, but suppress generating an exception handler
4119 to perform the cleanup. */
4121 #if 0
4123 expand_decl_cleanup_no_eh (decl, cleanup)
4124 tree decl, cleanup;
4126 int save_eh = using_eh_for_cleanups_p;
4127 int result;
4129 using_eh_for_cleanups_p = 0;
4130 result = expand_decl_cleanup (decl, cleanup);
4131 using_eh_for_cleanups_p = save_eh;
4133 return result;
4135 #endif
4137 /* Arrange for the top element of the dynamic cleanup chain to be
4138 popped if we exit the current binding contour. DECL is the
4139 associated declaration, if any, otherwise NULL_TREE. If the
4140 current contour is left via an exception, then __sjthrow will pop
4141 the top element off the dynamic cleanup chain. The code that
4142 avoids doing the action we push into the cleanup chain in the
4143 exceptional case is contained in expand_cleanups.
4145 This routine is only used by expand_eh_region_start, and that is
4146 the only way in which an exception region should be started. This
4147 routine is only used when using the setjmp/longjmp codegen method
4148 for exception handling. */
4151 expand_dcc_cleanup (decl)
4152 tree decl;
4154 struct nesting *thisblock;
4155 tree cleanup;
4157 /* Error if we are not in any block. */
4158 if (cfun == 0 || block_stack == 0)
4159 return 0;
4160 thisblock = block_stack;
4162 /* Record the cleanup for the dynamic handler chain. */
4164 cleanup = make_node (POPDCC_EXPR);
4166 /* Add the cleanup in a manner similar to expand_decl_cleanup. */
4167 thisblock->data.block.cleanups
4168 = tree_cons (decl, cleanup, thisblock->data.block.cleanups);
4170 /* If this block has a cleanup, it belongs in stack_block_stack. */
4171 stack_block_stack = thisblock;
4172 return 1;
4175 /* Arrange for the top element of the dynamic handler chain to be
4176 popped if we exit the current binding contour. DECL is the
4177 associated declaration, if any, otherwise NULL_TREE. If the current
4178 contour is left via an exception, then __sjthrow will pop the top
4179 element off the dynamic handler chain. The code that avoids doing
4180 the action we push into the handler chain in the exceptional case
4181 is contained in expand_cleanups.
4183 This routine is only used by expand_eh_region_start, and that is
4184 the only way in which an exception region should be started. This
4185 routine is only used when using the setjmp/longjmp codegen method
4186 for exception handling. */
4189 expand_dhc_cleanup (decl)
4190 tree decl;
4192 struct nesting *thisblock;
4193 tree cleanup;
4195 /* Error if we are not in any block. */
4196 if (cfun == 0 || block_stack == 0)
4197 return 0;
4198 thisblock = block_stack;
4200 /* Record the cleanup for the dynamic handler chain. */
4202 cleanup = make_node (POPDHC_EXPR);
4204 /* Add the cleanup in a manner similar to expand_decl_cleanup. */
4205 thisblock->data.block.cleanups
4206 = tree_cons (decl, cleanup, thisblock->data.block.cleanups);
4208 /* If this block has a cleanup, it belongs in stack_block_stack. */
4209 stack_block_stack = thisblock;
4210 return 1;
4213 /* DECL is an anonymous union. CLEANUP is a cleanup for DECL.
4214 DECL_ELTS is the list of elements that belong to DECL's type.
4215 In each, the TREE_VALUE is a VAR_DECL, and the TREE_PURPOSE a cleanup. */
4217 void
4218 expand_anon_union_decl (decl, cleanup, decl_elts)
4219 tree decl, cleanup, decl_elts;
4221 struct nesting *thisblock = cfun == 0 ? 0 : block_stack;
4222 rtx x;
4223 tree t;
4225 /* If any of the elements are addressable, so is the entire union. */
4226 for (t = decl_elts; t; t = TREE_CHAIN (t))
4227 if (TREE_ADDRESSABLE (TREE_VALUE (t)))
4229 TREE_ADDRESSABLE (decl) = 1;
4230 break;
4233 expand_decl (decl);
4234 expand_decl_cleanup (decl, cleanup);
4235 x = DECL_RTL (decl);
4237 /* Go through the elements, assigning RTL to each. */
4238 for (t = decl_elts; t; t = TREE_CHAIN (t))
4240 tree decl_elt = TREE_VALUE (t);
4241 tree cleanup_elt = TREE_PURPOSE (t);
4242 enum machine_mode mode = TYPE_MODE (TREE_TYPE (decl_elt));
4244 /* Propagate the union's alignment to the elements. */
4245 DECL_ALIGN (decl_elt) = DECL_ALIGN (decl);
4246 DECL_USER_ALIGN (decl_elt) = DECL_USER_ALIGN (decl);
4248 /* If the element has BLKmode and the union doesn't, the union is
4249 aligned such that the element doesn't need to have BLKmode, so
4250 change the element's mode to the appropriate one for its size. */
4251 if (mode == BLKmode && DECL_MODE (decl) != BLKmode)
4252 DECL_MODE (decl_elt) = mode
4253 = mode_for_size_tree (DECL_SIZE (decl_elt), MODE_INT, 1);
4255 /* (SUBREG (MEM ...)) at RTL generation time is invalid, so we
4256 instead create a new MEM rtx with the proper mode. */
4257 if (GET_CODE (x) == MEM)
4259 if (mode == GET_MODE (x))
4260 DECL_RTL (decl_elt) = x;
4261 else
4263 DECL_RTL (decl_elt) = gen_rtx_MEM (mode, copy_rtx (XEXP (x, 0)));
4264 MEM_COPY_ATTRIBUTES (DECL_RTL (decl_elt), x);
4267 else if (GET_CODE (x) == REG)
4269 if (mode == GET_MODE (x))
4270 DECL_RTL (decl_elt) = x;
4271 else
4272 DECL_RTL (decl_elt) = gen_rtx_SUBREG (mode, x, 0);
4274 else
4275 abort ();
4277 /* Record the cleanup if there is one. */
4279 if (cleanup != 0)
4280 thisblock->data.block.cleanups
4281 = tree_cons (decl_elt, cleanup_elt,
4282 thisblock->data.block.cleanups);
4286 /* Expand a list of cleanups LIST.
4287 Elements may be expressions or may be nested lists.
4289 If DONT_DO is nonnull, then any list-element
4290 whose TREE_PURPOSE matches DONT_DO is omitted.
4291 This is sometimes used to avoid a cleanup associated with
4292 a value that is being returned out of the scope.
4294 If IN_FIXUP is non-zero, we are generating this cleanup for a fixup
4295 goto and handle protection regions specially in that case.
4297 If REACHABLE, we emit code, otherwise just inform the exception handling
4298 code about this finalization. */
4300 static void
4301 expand_cleanups (list, dont_do, in_fixup, reachable)
4302 tree list;
4303 tree dont_do;
4304 int in_fixup;
4305 int reachable;
4307 tree tail;
4308 for (tail = list; tail; tail = TREE_CHAIN (tail))
4309 if (dont_do == 0 || TREE_PURPOSE (tail) != dont_do)
4311 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
4312 expand_cleanups (TREE_VALUE (tail), dont_do, in_fixup, reachable);
4313 else
4315 if (! in_fixup)
4317 tree cleanup = TREE_VALUE (tail);
4319 /* See expand_d{h,c}c_cleanup for why we avoid this. */
4320 if (TREE_CODE (cleanup) != POPDHC_EXPR
4321 && TREE_CODE (cleanup) != POPDCC_EXPR
4322 /* See expand_eh_region_start_tree for this case. */
4323 && ! TREE_ADDRESSABLE (tail))
4325 cleanup = protect_with_terminate (cleanup);
4326 expand_eh_region_end (cleanup);
4330 if (reachable)
4332 /* Cleanups may be run multiple times. For example,
4333 when exiting a binding contour, we expand the
4334 cleanups associated with that contour. When a goto
4335 within that binding contour has a target outside that
4336 contour, it will expand all cleanups from its scope to
4337 the target. Though the cleanups are expanded multiple
4338 times, the control paths are non-overlapping so the
4339 cleanups will not be executed twice. */
4341 /* We may need to protect fixups with rethrow regions. */
4342 int protect = (in_fixup && ! TREE_ADDRESSABLE (tail));
4344 if (protect)
4345 expand_fixup_region_start ();
4347 /* The cleanup might contain try-blocks, so we have to
4348 preserve our current queue. */
4349 push_ehqueue ();
4350 expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
4351 pop_ehqueue ();
4352 if (protect)
4353 expand_fixup_region_end (TREE_VALUE (tail));
4354 free_temp_slots ();
4360 /* Mark when the context we are emitting RTL for as a conditional
4361 context, so that any cleanup actions we register with
4362 expand_decl_init will be properly conditionalized when those
4363 cleanup actions are later performed. Must be called before any
4364 expression (tree) is expanded that is within a conditional context. */
4366 void
4367 start_cleanup_deferral ()
4369 /* block_stack can be NULL if we are inside the parameter list. It is
4370 OK to do nothing, because cleanups aren't possible here. */
4371 if (block_stack)
4372 ++block_stack->data.block.conditional_code;
4375 /* Mark the end of a conditional region of code. Because cleanup
4376 deferrals may be nested, we may still be in a conditional region
4377 after we end the currently deferred cleanups, only after we end all
4378 deferred cleanups, are we back in unconditional code. */
4380 void
4381 end_cleanup_deferral ()
4383 /* block_stack can be NULL if we are inside the parameter list. It is
4384 OK to do nothing, because cleanups aren't possible here. */
4385 if (block_stack)
4386 --block_stack->data.block.conditional_code;
4389 /* Move all cleanups from the current block_stack
4390 to the containing block_stack, where they are assumed to
4391 have been created. If anything can cause a temporary to
4392 be created, but not expanded for more than one level of
4393 block_stacks, then this code will have to change. */
4395 void
4396 move_cleanups_up ()
4398 struct nesting *block = block_stack;
4399 struct nesting *outer = block->next;
4401 outer->data.block.cleanups
4402 = chainon (block->data.block.cleanups,
4403 outer->data.block.cleanups);
4404 block->data.block.cleanups = 0;
4407 tree
4408 last_cleanup_this_contour ()
4410 if (block_stack == 0)
4411 return 0;
4413 return block_stack->data.block.cleanups;
4416 /* Return 1 if there are any pending cleanups at this point.
4417 If THIS_CONTOUR is nonzero, check the current contour as well.
4418 Otherwise, look only at the contours that enclose this one. */
4421 any_pending_cleanups (this_contour)
4422 int this_contour;
4424 struct nesting *block;
4426 if (cfun == NULL || cfun->stmt == NULL || block_stack == 0)
4427 return 0;
4429 if (this_contour && block_stack->data.block.cleanups != NULL)
4430 return 1;
4431 if (block_stack->data.block.cleanups == 0
4432 && block_stack->data.block.outer_cleanups == 0)
4433 return 0;
4435 for (block = block_stack->next; block; block = block->next)
4436 if (block->data.block.cleanups != 0)
4437 return 1;
4439 return 0;
4442 /* Enter a case (Pascal) or switch (C) statement.
4443 Push a block onto case_stack and nesting_stack
4444 to accumulate the case-labels that are seen
4445 and to record the labels generated for the statement.
4447 EXIT_FLAG is nonzero if `exit_something' should exit this case stmt.
4448 Otherwise, this construct is transparent for `exit_something'.
4450 EXPR is the index-expression to be dispatched on.
4451 TYPE is its nominal type. We could simply convert EXPR to this type,
4452 but instead we take short cuts. */
4454 void
4455 expand_start_case (exit_flag, expr, type, printname)
4456 int exit_flag;
4457 tree expr;
4458 tree type;
4459 const char *printname;
4461 register struct nesting *thiscase = ALLOC_NESTING ();
4463 /* Make an entry on case_stack for the case we are entering. */
4465 thiscase->next = case_stack;
4466 thiscase->all = nesting_stack;
4467 thiscase->depth = ++nesting_depth;
4468 thiscase->exit_label = exit_flag ? gen_label_rtx () : 0;
4469 thiscase->data.case_stmt.case_list = 0;
4470 thiscase->data.case_stmt.index_expr = expr;
4471 thiscase->data.case_stmt.nominal_type = type;
4472 thiscase->data.case_stmt.default_label = 0;
4473 thiscase->data.case_stmt.printname = printname;
4474 thiscase->data.case_stmt.line_number_status = force_line_numbers ();
4475 case_stack = thiscase;
4476 nesting_stack = thiscase;
4478 do_pending_stack_adjust ();
4480 /* Make sure case_stmt.start points to something that won't
4481 need any transformation before expand_end_case. */
4482 if (GET_CODE (get_last_insn ()) != NOTE)
4483 emit_note (NULL_PTR, NOTE_INSN_DELETED);
4485 thiscase->data.case_stmt.start = get_last_insn ();
4487 start_cleanup_deferral ();
4490 /* Start a "dummy case statement" within which case labels are invalid
4491 and are not connected to any larger real case statement.
4492 This can be used if you don't want to let a case statement jump
4493 into the middle of certain kinds of constructs. */
4495 void
4496 expand_start_case_dummy ()
4498 register struct nesting *thiscase = ALLOC_NESTING ();
4500 /* Make an entry on case_stack for the dummy. */
4502 thiscase->next = case_stack;
4503 thiscase->all = nesting_stack;
4504 thiscase->depth = ++nesting_depth;
4505 thiscase->exit_label = 0;
4506 thiscase->data.case_stmt.case_list = 0;
4507 thiscase->data.case_stmt.start = 0;
4508 thiscase->data.case_stmt.nominal_type = 0;
4509 thiscase->data.case_stmt.default_label = 0;
4510 case_stack = thiscase;
4511 nesting_stack = thiscase;
4512 start_cleanup_deferral ();
4515 /* End a dummy case statement. */
4517 void
4518 expand_end_case_dummy ()
4520 end_cleanup_deferral ();
4521 POPSTACK (case_stack);
4524 /* Return the data type of the index-expression
4525 of the innermost case statement, or null if none. */
4527 tree
4528 case_index_expr_type ()
4530 if (case_stack)
4531 return TREE_TYPE (case_stack->data.case_stmt.index_expr);
4532 return 0;
4535 static void
4536 check_seenlabel ()
4538 /* If this is the first label, warn if any insns have been emitted. */
4539 if (case_stack->data.case_stmt.line_number_status >= 0)
4541 rtx insn;
4543 restore_line_number_status
4544 (case_stack->data.case_stmt.line_number_status);
4545 case_stack->data.case_stmt.line_number_status = -1;
4547 for (insn = case_stack->data.case_stmt.start;
4548 insn;
4549 insn = NEXT_INSN (insn))
4551 if (GET_CODE (insn) == CODE_LABEL)
4552 break;
4553 if (GET_CODE (insn) != NOTE
4554 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn)) != USE))
4557 insn = PREV_INSN (insn);
4558 while (insn && (GET_CODE (insn) != NOTE || NOTE_LINE_NUMBER (insn) < 0));
4560 /* If insn is zero, then there must have been a syntax error. */
4561 if (insn)
4562 warning_with_file_and_line (NOTE_SOURCE_FILE (insn),
4563 NOTE_LINE_NUMBER (insn),
4564 "unreachable code at beginning of %s",
4565 case_stack->data.case_stmt.printname);
4566 break;
4572 /* Accumulate one case or default label inside a case or switch statement.
4573 VALUE is the value of the case (a null pointer, for a default label).
4574 The function CONVERTER, when applied to arguments T and V,
4575 converts the value V to the type T.
4577 If not currently inside a case or switch statement, return 1 and do
4578 nothing. The caller will print a language-specific error message.
4579 If VALUE is a duplicate or overlaps, return 2 and do nothing
4580 except store the (first) duplicate node in *DUPLICATE.
4581 If VALUE is out of range, return 3 and do nothing.
4582 If we are jumping into the scope of a cleanup or var-sized array, return 5.
4583 Return 0 on success.
4585 Extended to handle range statements. */
4588 pushcase (value, converter, label, duplicate)
4589 register tree value;
4590 tree (*converter) PARAMS ((tree, tree));
4591 register tree label;
4592 tree *duplicate;
4594 tree index_type;
4595 tree nominal_type;
4597 /* Fail if not inside a real case statement. */
4598 if (! (case_stack && case_stack->data.case_stmt.start))
4599 return 1;
4601 if (stack_block_stack
4602 && stack_block_stack->depth > case_stack->depth)
4603 return 5;
4605 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4606 nominal_type = case_stack->data.case_stmt.nominal_type;
4608 /* If the index is erroneous, avoid more problems: pretend to succeed. */
4609 if (index_type == error_mark_node)
4610 return 0;
4612 /* Convert VALUE to the type in which the comparisons are nominally done. */
4613 if (value != 0)
4614 value = (*converter) (nominal_type, value);
4616 check_seenlabel ();
4618 /* Fail if this value is out of range for the actual type of the index
4619 (which may be narrower than NOMINAL_TYPE). */
4620 if (value != 0
4621 && (TREE_CONSTANT_OVERFLOW (value)
4622 || ! int_fits_type_p (value, index_type)))
4623 return 3;
4625 return add_case_node (value, value, label, duplicate);
4628 /* Like pushcase but this case applies to all values between VALUE1 and
4629 VALUE2 (inclusive). If VALUE1 is NULL, the range starts at the lowest
4630 value of the index type and ends at VALUE2. If VALUE2 is NULL, the range
4631 starts at VALUE1 and ends at the highest value of the index type.
4632 If both are NULL, this case applies to all values.
4634 The return value is the same as that of pushcase but there is one
4635 additional error code: 4 means the specified range was empty. */
4638 pushcase_range (value1, value2, converter, label, duplicate)
4639 register tree value1, value2;
4640 tree (*converter) PARAMS ((tree, tree));
4641 register tree label;
4642 tree *duplicate;
4644 tree index_type;
4645 tree nominal_type;
4647 /* Fail if not inside a real case statement. */
4648 if (! (case_stack && case_stack->data.case_stmt.start))
4649 return 1;
4651 if (stack_block_stack
4652 && stack_block_stack->depth > case_stack->depth)
4653 return 5;
4655 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4656 nominal_type = case_stack->data.case_stmt.nominal_type;
4658 /* If the index is erroneous, avoid more problems: pretend to succeed. */
4659 if (index_type == error_mark_node)
4660 return 0;
4662 check_seenlabel ();
4664 /* Convert VALUEs to type in which the comparisons are nominally done
4665 and replace any unspecified value with the corresponding bound. */
4666 if (value1 == 0)
4667 value1 = TYPE_MIN_VALUE (index_type);
4668 if (value2 == 0)
4669 value2 = TYPE_MAX_VALUE (index_type);
4671 /* Fail if the range is empty. Do this before any conversion since
4672 we want to allow out-of-range empty ranges. */
4673 if (value2 != 0 && tree_int_cst_lt (value2, value1))
4674 return 4;
4676 /* If the max was unbounded, use the max of the nominal_type we are
4677 converting to. Do this after the < check above to suppress false
4678 positives. */
4679 if (value2 == 0)
4680 value2 = TYPE_MAX_VALUE (nominal_type);
4682 value1 = (*converter) (nominal_type, value1);
4683 value2 = (*converter) (nominal_type, value2);
4685 /* Fail if these values are out of range. */
4686 if (TREE_CONSTANT_OVERFLOW (value1)
4687 || ! int_fits_type_p (value1, index_type))
4688 return 3;
4690 if (TREE_CONSTANT_OVERFLOW (value2)
4691 || ! int_fits_type_p (value2, index_type))
4692 return 3;
4694 return add_case_node (value1, value2, label, duplicate);
4697 /* Do the actual insertion of a case label for pushcase and pushcase_range
4698 into case_stack->data.case_stmt.case_list. Use an AVL tree to avoid
4699 slowdown for large switch statements. */
4702 add_case_node (low, high, label, duplicate)
4703 tree low, high;
4704 tree label;
4705 tree *duplicate;
4707 struct case_node *p, **q, *r;
4709 /* If there's no HIGH value, then this is not a case range; it's
4710 just a simple case label. But that's just a degenerate case
4711 range. */
4712 if (!high)
4713 high = low;
4715 /* Handle default labels specially. */
4716 if (!high && !low)
4718 if (case_stack->data.case_stmt.default_label != 0)
4720 *duplicate = case_stack->data.case_stmt.default_label;
4721 return 2;
4723 case_stack->data.case_stmt.default_label = label;
4724 expand_label (label);
4725 return 0;
4728 q = &case_stack->data.case_stmt.case_list;
4729 p = *q;
4731 while ((r = *q))
4733 p = r;
4735 /* Keep going past elements distinctly greater than HIGH. */
4736 if (tree_int_cst_lt (high, p->low))
4737 q = &p->left;
4739 /* or distinctly less than LOW. */
4740 else if (tree_int_cst_lt (p->high, low))
4741 q = &p->right;
4743 else
4745 /* We have an overlap; this is an error. */
4746 *duplicate = p->code_label;
4747 return 2;
4751 /* Add this label to the chain, and succeed. */
4753 r = (struct case_node *) xmalloc (sizeof (struct case_node));
4754 r->low = low;
4756 /* If the bounds are equal, turn this into the one-value case. */
4757 if (tree_int_cst_equal (low, high))
4758 r->high = r->low;
4759 else
4760 r->high = high;
4762 r->code_label = label;
4763 expand_label (label);
4765 *q = r;
4766 r->parent = p;
4767 r->left = 0;
4768 r->right = 0;
4769 r->balance = 0;
4771 while (p)
4773 struct case_node *s;
4775 if (r == p->left)
4777 int b;
4779 if (! (b = p->balance))
4780 /* Growth propagation from left side. */
4781 p->balance = -1;
4782 else if (b < 0)
4784 if (r->balance < 0)
4786 /* R-Rotation */
4787 if ((p->left = s = r->right))
4788 s->parent = p;
4790 r->right = p;
4791 p->balance = 0;
4792 r->balance = 0;
4793 s = p->parent;
4794 p->parent = r;
4796 if ((r->parent = s))
4798 if (s->left == p)
4799 s->left = r;
4800 else
4801 s->right = r;
4803 else
4804 case_stack->data.case_stmt.case_list = r;
4806 else
4807 /* r->balance == +1 */
4809 /* LR-Rotation */
4811 int b2;
4812 struct case_node *t = r->right;
4814 if ((p->left = s = t->right))
4815 s->parent = p;
4817 t->right = p;
4818 if ((r->right = s = t->left))
4819 s->parent = r;
4821 t->left = r;
4822 b = t->balance;
4823 b2 = b < 0;
4824 p->balance = b2;
4825 b2 = -b2 - b;
4826 r->balance = b2;
4827 t->balance = 0;
4828 s = p->parent;
4829 p->parent = t;
4830 r->parent = t;
4832 if ((t->parent = s))
4834 if (s->left == p)
4835 s->left = t;
4836 else
4837 s->right = t;
4839 else
4840 case_stack->data.case_stmt.case_list = t;
4842 break;
4845 else
4847 /* p->balance == +1; growth of left side balances the node. */
4848 p->balance = 0;
4849 break;
4852 else
4853 /* r == p->right */
4855 int b;
4857 if (! (b = p->balance))
4858 /* Growth propagation from right side. */
4859 p->balance++;
4860 else if (b > 0)
4862 if (r->balance > 0)
4864 /* L-Rotation */
4866 if ((p->right = s = r->left))
4867 s->parent = p;
4869 r->left = p;
4870 p->balance = 0;
4871 r->balance = 0;
4872 s = p->parent;
4873 p->parent = r;
4874 if ((r->parent = s))
4876 if (s->left == p)
4877 s->left = r;
4878 else
4879 s->right = r;
4882 else
4883 case_stack->data.case_stmt.case_list = r;
4886 else
4887 /* r->balance == -1 */
4889 /* RL-Rotation */
4890 int b2;
4891 struct case_node *t = r->left;
4893 if ((p->right = s = t->left))
4894 s->parent = p;
4896 t->left = p;
4898 if ((r->left = s = t->right))
4899 s->parent = r;
4901 t->right = r;
4902 b = t->balance;
4903 b2 = b < 0;
4904 r->balance = b2;
4905 b2 = -b2 - b;
4906 p->balance = b2;
4907 t->balance = 0;
4908 s = p->parent;
4909 p->parent = t;
4910 r->parent = t;
4912 if ((t->parent = s))
4914 if (s->left == p)
4915 s->left = t;
4916 else
4917 s->right = t;
4920 else
4921 case_stack->data.case_stmt.case_list = t;
4923 break;
4925 else
4927 /* p->balance == -1; growth of right side balances the node. */
4928 p->balance = 0;
4929 break;
4933 r = p;
4934 p = p->parent;
4937 return 0;
4940 /* Returns the number of possible values of TYPE.
4941 Returns -1 if the number is unknown, variable, or if the number does not
4942 fit in a HOST_WIDE_INT.
4943 Sets *SPARENESS to 2 if TYPE is an ENUMERAL_TYPE whose values
4944 do not increase monotonically (there may be duplicates);
4945 to 1 if the values increase monotonically, but not always by 1;
4946 otherwise sets it to 0. */
4948 HOST_WIDE_INT
4949 all_cases_count (type, spareness)
4950 tree type;
4951 int *spareness;
4953 tree t;
4954 HOST_WIDE_INT count, minval, lastval;
4956 *spareness = 0;
4958 switch (TREE_CODE (type))
4960 case BOOLEAN_TYPE:
4961 count = 2;
4962 break;
4964 case CHAR_TYPE:
4965 count = 1 << BITS_PER_UNIT;
4966 break;
4968 default:
4969 case INTEGER_TYPE:
4970 if (TYPE_MAX_VALUE (type) != 0
4971 && 0 != (t = fold (build (MINUS_EXPR, type, TYPE_MAX_VALUE (type),
4972 TYPE_MIN_VALUE (type))))
4973 && 0 != (t = fold (build (PLUS_EXPR, type, t,
4974 convert (type, integer_zero_node))))
4975 && host_integerp (t, 1))
4976 count = tree_low_cst (t, 1);
4977 else
4978 return -1;
4979 break;
4981 case ENUMERAL_TYPE:
4982 /* Don't waste time with enumeral types with huge values. */
4983 if (! host_integerp (TYPE_MIN_VALUE (type), 0)
4984 || TYPE_MAX_VALUE (type) == 0
4985 || ! host_integerp (TYPE_MAX_VALUE (type), 0))
4986 return -1;
4988 lastval = minval = tree_low_cst (TYPE_MIN_VALUE (type), 0);
4989 count = 0;
4991 for (t = TYPE_VALUES (type); t != NULL_TREE; t = TREE_CHAIN (t))
4993 HOST_WIDE_INT thisval = tree_low_cst (TREE_VALUE (t), 0);
4995 if (*spareness == 2 || thisval < lastval)
4996 *spareness = 2;
4997 else if (thisval != minval + count)
4998 *spareness = 1;
5000 count++;
5004 return count;
5007 #define BITARRAY_TEST(ARRAY, INDEX) \
5008 ((ARRAY)[(unsigned) (INDEX) / HOST_BITS_PER_CHAR]\
5009 & (1 << ((unsigned) (INDEX) % HOST_BITS_PER_CHAR)))
5010 #define BITARRAY_SET(ARRAY, INDEX) \
5011 ((ARRAY)[(unsigned) (INDEX) / HOST_BITS_PER_CHAR]\
5012 |= 1 << ((unsigned) (INDEX) % HOST_BITS_PER_CHAR))
5014 /* Set the elements of the bitstring CASES_SEEN (which has length COUNT),
5015 with the case values we have seen, assuming the case expression
5016 has the given TYPE.
5017 SPARSENESS is as determined by all_cases_count.
5019 The time needed is proportional to COUNT, unless
5020 SPARSENESS is 2, in which case quadratic time is needed. */
5022 void
5023 mark_seen_cases (type, cases_seen, count, sparseness)
5024 tree type;
5025 unsigned char *cases_seen;
5026 HOST_WIDE_INT count;
5027 int sparseness;
5029 tree next_node_to_try = NULL_TREE;
5030 HOST_WIDE_INT next_node_offset = 0;
5032 register struct case_node *n, *root = case_stack->data.case_stmt.case_list;
5033 tree val = make_node (INTEGER_CST);
5035 TREE_TYPE (val) = type;
5036 if (! root)
5037 /* Do nothing. */
5039 else if (sparseness == 2)
5041 tree t;
5042 unsigned HOST_WIDE_INT xlo;
5044 /* This less efficient loop is only needed to handle
5045 duplicate case values (multiple enum constants
5046 with the same value). */
5047 TREE_TYPE (val) = TREE_TYPE (root->low);
5048 for (t = TYPE_VALUES (type), xlo = 0; t != NULL_TREE;
5049 t = TREE_CHAIN (t), xlo++)
5051 TREE_INT_CST_LOW (val) = TREE_INT_CST_LOW (TREE_VALUE (t));
5052 TREE_INT_CST_HIGH (val) = TREE_INT_CST_HIGH (TREE_VALUE (t));
5053 n = root;
5056 /* Keep going past elements distinctly greater than VAL. */
5057 if (tree_int_cst_lt (val, n->low))
5058 n = n->left;
5060 /* or distinctly less than VAL. */
5061 else if (tree_int_cst_lt (n->high, val))
5062 n = n->right;
5064 else
5066 /* We have found a matching range. */
5067 BITARRAY_SET (cases_seen, xlo);
5068 break;
5071 while (n);
5074 else
5076 if (root->left)
5077 case_stack->data.case_stmt.case_list = root = case_tree2list (root, 0);
5079 for (n = root; n; n = n->right)
5081 TREE_INT_CST_LOW (val) = TREE_INT_CST_LOW (n->low);
5082 TREE_INT_CST_HIGH (val) = TREE_INT_CST_HIGH (n->low);
5083 while (! tree_int_cst_lt (n->high, val))
5085 /* Calculate (into xlo) the "offset" of the integer (val).
5086 The element with lowest value has offset 0, the next smallest
5087 element has offset 1, etc. */
5089 unsigned HOST_WIDE_INT xlo;
5090 HOST_WIDE_INT xhi;
5091 tree t;
5093 if (sparseness && TYPE_VALUES (type) != NULL_TREE)
5095 /* The TYPE_VALUES will be in increasing order, so
5096 starting searching where we last ended. */
5097 t = next_node_to_try;
5098 xlo = next_node_offset;
5099 xhi = 0;
5100 for (;;)
5102 if (t == NULL_TREE)
5104 t = TYPE_VALUES (type);
5105 xlo = 0;
5107 if (tree_int_cst_equal (val, TREE_VALUE (t)))
5109 next_node_to_try = TREE_CHAIN (t);
5110 next_node_offset = xlo + 1;
5111 break;
5113 xlo++;
5114 t = TREE_CHAIN (t);
5115 if (t == next_node_to_try)
5117 xlo = -1;
5118 break;
5122 else
5124 t = TYPE_MIN_VALUE (type);
5125 if (t)
5126 neg_double (TREE_INT_CST_LOW (t), TREE_INT_CST_HIGH (t),
5127 &xlo, &xhi);
5128 else
5129 xlo = xhi = 0;
5130 add_double (xlo, xhi,
5131 TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
5132 &xlo, &xhi);
5135 if (xhi == 0 && xlo < (unsigned HOST_WIDE_INT) count)
5136 BITARRAY_SET (cases_seen, xlo);
5138 add_double (TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
5139 1, 0,
5140 &TREE_INT_CST_LOW (val), &TREE_INT_CST_HIGH (val));
5146 /* Called when the index of a switch statement is an enumerated type
5147 and there is no default label.
5149 Checks that all enumeration literals are covered by the case
5150 expressions of a switch. Also, warn if there are any extra
5151 switch cases that are *not* elements of the enumerated type.
5153 If all enumeration literals were covered by the case expressions,
5154 turn one of the expressions into the default expression since it should
5155 not be possible to fall through such a switch. */
5157 void
5158 check_for_full_enumeration_handling (type)
5159 tree type;
5161 register struct case_node *n;
5162 register tree chain;
5163 #if 0 /* variable used by 'if 0'ed code below. */
5164 register struct case_node **l;
5165 int all_values = 1;
5166 #endif
5168 /* True iff the selector type is a numbered set mode. */
5169 int sparseness = 0;
5171 /* The number of possible selector values. */
5172 HOST_WIDE_INT size;
5174 /* For each possible selector value. a one iff it has been matched
5175 by a case value alternative. */
5176 unsigned char *cases_seen;
5178 /* The allocated size of cases_seen, in chars. */
5179 HOST_WIDE_INT bytes_needed;
5181 if (! warn_switch)
5182 return;
5184 size = all_cases_count (type, &sparseness);
5185 bytes_needed = (size + HOST_BITS_PER_CHAR) / HOST_BITS_PER_CHAR;
5187 if (size > 0 && size < 600000
5188 /* We deliberately use calloc here, not cmalloc, so that we can suppress
5189 this optimization if we don't have enough memory rather than
5190 aborting, as xmalloc would do. */
5191 && (cases_seen = (unsigned char *) calloc (bytes_needed, 1)) != NULL)
5193 HOST_WIDE_INT i;
5194 tree v = TYPE_VALUES (type);
5196 /* The time complexity of this code is normally O(N), where
5197 N being the number of members in the enumerated type.
5198 However, if type is a ENUMERAL_TYPE whose values do not
5199 increase monotonically, O(N*log(N)) time may be needed. */
5201 mark_seen_cases (type, cases_seen, size, sparseness);
5203 for (i = 0; v != NULL_TREE && i < size; i++, v = TREE_CHAIN (v))
5204 if (BITARRAY_TEST (cases_seen, i) == 0)
5205 warning ("enumeration value `%s' not handled in switch",
5206 IDENTIFIER_POINTER (TREE_PURPOSE (v)));
5208 free (cases_seen);
5211 /* Now we go the other way around; we warn if there are case
5212 expressions that don't correspond to enumerators. This can
5213 occur since C and C++ don't enforce type-checking of
5214 assignments to enumeration variables. */
5216 if (case_stack->data.case_stmt.case_list
5217 && case_stack->data.case_stmt.case_list->left)
5218 case_stack->data.case_stmt.case_list
5219 = case_tree2list (case_stack->data.case_stmt.case_list, 0);
5220 if (warn_switch)
5221 for (n = case_stack->data.case_stmt.case_list; n; n = n->right)
5223 for (chain = TYPE_VALUES (type);
5224 chain && !tree_int_cst_equal (n->low, TREE_VALUE (chain));
5225 chain = TREE_CHAIN (chain))
5228 if (!chain)
5230 if (TYPE_NAME (type) == 0)
5231 warning ("case value `%ld' not in enumerated type",
5232 (long) TREE_INT_CST_LOW (n->low));
5233 else
5234 warning ("case value `%ld' not in enumerated type `%s'",
5235 (long) TREE_INT_CST_LOW (n->low),
5236 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
5237 == IDENTIFIER_NODE)
5238 ? TYPE_NAME (type)
5239 : DECL_NAME (TYPE_NAME (type))));
5241 if (!tree_int_cst_equal (n->low, n->high))
5243 for (chain = TYPE_VALUES (type);
5244 chain && !tree_int_cst_equal (n->high, TREE_VALUE (chain));
5245 chain = TREE_CHAIN (chain))
5248 if (!chain)
5250 if (TYPE_NAME (type) == 0)
5251 warning ("case value `%ld' not in enumerated type",
5252 (long) TREE_INT_CST_LOW (n->high));
5253 else
5254 warning ("case value `%ld' not in enumerated type `%s'",
5255 (long) TREE_INT_CST_LOW (n->high),
5256 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
5257 == IDENTIFIER_NODE)
5258 ? TYPE_NAME (type)
5259 : DECL_NAME (TYPE_NAME (type))));
5264 #if 0
5265 /* ??? This optimization is disabled because it causes valid programs to
5266 fail. ANSI C does not guarantee that an expression with enum type
5267 will have a value that is the same as one of the enumeration literals. */
5269 /* If all values were found as case labels, make one of them the default
5270 label. Thus, this switch will never fall through. We arbitrarily pick
5271 the last one to make the default since this is likely the most
5272 efficient choice. */
5274 if (all_values)
5276 for (l = &case_stack->data.case_stmt.case_list;
5277 (*l)->right != 0;
5278 l = &(*l)->right)
5281 case_stack->data.case_stmt.default_label = (*l)->code_label;
5282 *l = 0;
5284 #endif /* 0 */
5287 /* Free CN, and its children. */
5289 static void
5290 free_case_nodes (cn)
5291 case_node_ptr cn;
5293 if (cn)
5295 free_case_nodes (cn->left);
5296 free_case_nodes (cn->right);
5297 free (cn);
5302 /* Terminate a case (Pascal) or switch (C) statement
5303 in which ORIG_INDEX is the expression to be tested.
5304 Generate the code to test it and jump to the right place. */
5306 void
5307 expand_end_case (orig_index)
5308 tree orig_index;
5310 tree minval = NULL_TREE, maxval = NULL_TREE, range = NULL_TREE, orig_minval;
5311 rtx default_label = 0;
5312 register struct case_node *n;
5313 unsigned int count;
5314 rtx index;
5315 rtx table_label;
5316 int ncases;
5317 rtx *labelvec;
5318 register int i;
5319 rtx before_case;
5320 register struct nesting *thiscase = case_stack;
5321 tree index_expr, index_type;
5322 int unsignedp;
5324 /* Don't crash due to previous errors. */
5325 if (thiscase == NULL)
5326 return;
5328 table_label = gen_label_rtx ();
5329 index_expr = thiscase->data.case_stmt.index_expr;
5330 index_type = TREE_TYPE (index_expr);
5331 unsignedp = TREE_UNSIGNED (index_type);
5333 do_pending_stack_adjust ();
5335 /* This might get an spurious warning in the presence of a syntax error;
5336 it could be fixed by moving the call to check_seenlabel after the
5337 check for error_mark_node, and copying the code of check_seenlabel that
5338 deals with case_stack->data.case_stmt.line_number_status /
5339 restore_line_number_status in front of the call to end_cleanup_deferral;
5340 However, this might miss some useful warnings in the presence of
5341 non-syntax errors. */
5342 check_seenlabel ();
5344 /* An ERROR_MARK occurs for various reasons including invalid data type. */
5345 if (index_type != error_mark_node)
5347 /* If switch expression was an enumerated type, check that all
5348 enumeration literals are covered by the cases.
5349 No sense trying this if there's a default case, however. */
5351 if (!thiscase->data.case_stmt.default_label
5352 && TREE_CODE (TREE_TYPE (orig_index)) == ENUMERAL_TYPE
5353 && TREE_CODE (index_expr) != INTEGER_CST)
5354 check_for_full_enumeration_handling (TREE_TYPE (orig_index));
5356 /* If we don't have a default-label, create one here,
5357 after the body of the switch. */
5358 if (thiscase->data.case_stmt.default_label == 0)
5360 thiscase->data.case_stmt.default_label
5361 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
5362 expand_label (thiscase->data.case_stmt.default_label);
5364 default_label = label_rtx (thiscase->data.case_stmt.default_label);
5366 before_case = get_last_insn ();
5368 if (thiscase->data.case_stmt.case_list
5369 && thiscase->data.case_stmt.case_list->left)
5370 thiscase->data.case_stmt.case_list
5371 = case_tree2list (thiscase->data.case_stmt.case_list, 0);
5373 /* Simplify the case-list before we count it. */
5374 group_case_nodes (thiscase->data.case_stmt.case_list);
5376 /* Get upper and lower bounds of case values.
5377 Also convert all the case values to the index expr's data type. */
5379 count = 0;
5380 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5382 /* Check low and high label values are integers. */
5383 if (TREE_CODE (n->low) != INTEGER_CST)
5384 abort ();
5385 if (TREE_CODE (n->high) != INTEGER_CST)
5386 abort ();
5388 n->low = convert (index_type, n->low);
5389 n->high = convert (index_type, n->high);
5391 /* Count the elements and track the largest and smallest
5392 of them (treating them as signed even if they are not). */
5393 if (count++ == 0)
5395 minval = n->low;
5396 maxval = n->high;
5398 else
5400 if (INT_CST_LT (n->low, minval))
5401 minval = n->low;
5402 if (INT_CST_LT (maxval, n->high))
5403 maxval = n->high;
5405 /* A range counts double, since it requires two compares. */
5406 if (! tree_int_cst_equal (n->low, n->high))
5407 count++;
5410 orig_minval = minval;
5412 /* Compute span of values. */
5413 if (count != 0)
5414 range = fold (build (MINUS_EXPR, index_type, maxval, minval));
5416 end_cleanup_deferral ();
5418 if (count == 0)
5420 expand_expr (index_expr, const0_rtx, VOIDmode, 0);
5421 emit_queue ();
5422 emit_jump (default_label);
5425 /* If range of values is much bigger than number of values,
5426 make a sequence of conditional branches instead of a dispatch.
5427 If the switch-index is a constant, do it this way
5428 because we can optimize it. */
5430 #ifndef CASE_VALUES_THRESHOLD
5431 #ifdef HAVE_casesi
5432 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
5433 #else
5434 /* If machine does not have a case insn that compares the
5435 bounds, this means extra overhead for dispatch tables
5436 which raises the threshold for using them. */
5437 #define CASE_VALUES_THRESHOLD 5
5438 #endif /* HAVE_casesi */
5439 #endif /* CASE_VALUES_THRESHOLD */
5441 else if (count < CASE_VALUES_THRESHOLD
5442 || compare_tree_int (range, 10 * count) > 0
5443 /* RANGE may be signed, and really large ranges will show up
5444 as negative numbers. */
5445 || compare_tree_int (range, 0) < 0
5446 #ifndef ASM_OUTPUT_ADDR_DIFF_ELT
5447 || flag_pic
5448 #endif
5449 || TREE_CODE (index_expr) == INTEGER_CST
5450 /* These will reduce to a constant. */
5451 || (TREE_CODE (index_expr) == CALL_EXPR
5452 && TREE_CODE (TREE_OPERAND (index_expr, 0)) == ADDR_EXPR
5453 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == FUNCTION_DECL
5454 && DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == BUILT_IN_NORMAL
5455 && DECL_FUNCTION_CODE (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == BUILT_IN_CLASSIFY_TYPE)
5456 || (TREE_CODE (index_expr) == COMPOUND_EXPR
5457 && TREE_CODE (TREE_OPERAND (index_expr, 1)) == INTEGER_CST))
5459 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5461 /* If the index is a short or char that we do not have
5462 an insn to handle comparisons directly, convert it to
5463 a full integer now, rather than letting each comparison
5464 generate the conversion. */
5466 if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT
5467 && (cmp_optab->handlers[(int) GET_MODE (index)].insn_code
5468 == CODE_FOR_nothing))
5470 enum machine_mode wider_mode;
5471 for (wider_mode = GET_MODE (index); wider_mode != VOIDmode;
5472 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
5473 if (cmp_optab->handlers[(int) wider_mode].insn_code
5474 != CODE_FOR_nothing)
5476 index = convert_to_mode (wider_mode, index, unsignedp);
5477 break;
5481 emit_queue ();
5482 do_pending_stack_adjust ();
5484 index = protect_from_queue (index, 0);
5485 if (GET_CODE (index) == MEM)
5486 index = copy_to_reg (index);
5487 if (GET_CODE (index) == CONST_INT
5488 || TREE_CODE (index_expr) == INTEGER_CST)
5490 /* Make a tree node with the proper constant value
5491 if we don't already have one. */
5492 if (TREE_CODE (index_expr) != INTEGER_CST)
5494 index_expr
5495 = build_int_2 (INTVAL (index),
5496 unsignedp || INTVAL (index) >= 0 ? 0 : -1);
5497 index_expr = convert (index_type, index_expr);
5500 /* For constant index expressions we need only
5501 issue a unconditional branch to the appropriate
5502 target code. The job of removing any unreachable
5503 code is left to the optimisation phase if the
5504 "-O" option is specified. */
5505 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5506 if (! tree_int_cst_lt (index_expr, n->low)
5507 && ! tree_int_cst_lt (n->high, index_expr))
5508 break;
5510 if (n)
5511 emit_jump (label_rtx (n->code_label));
5512 else
5513 emit_jump (default_label);
5515 else
5517 /* If the index expression is not constant we generate
5518 a binary decision tree to select the appropriate
5519 target code. This is done as follows:
5521 The list of cases is rearranged into a binary tree,
5522 nearly optimal assuming equal probability for each case.
5524 The tree is transformed into RTL, eliminating
5525 redundant test conditions at the same time.
5527 If program flow could reach the end of the
5528 decision tree an unconditional jump to the
5529 default code is emitted. */
5531 use_cost_table
5532 = (TREE_CODE (TREE_TYPE (orig_index)) != ENUMERAL_TYPE
5533 && estimate_case_costs (thiscase->data.case_stmt.case_list));
5534 balance_case_nodes (&thiscase->data.case_stmt.case_list,
5535 NULL_PTR);
5536 emit_case_nodes (index, thiscase->data.case_stmt.case_list,
5537 default_label, index_type);
5538 emit_jump_if_reachable (default_label);
5541 else
5543 int win = 0;
5544 #ifdef HAVE_casesi
5545 if (HAVE_casesi)
5547 enum machine_mode index_mode = SImode;
5548 int index_bits = GET_MODE_BITSIZE (index_mode);
5549 rtx op1, op2;
5550 enum machine_mode op_mode;
5552 /* Convert the index to SImode. */
5553 if (GET_MODE_BITSIZE (TYPE_MODE (index_type))
5554 > GET_MODE_BITSIZE (index_mode))
5556 enum machine_mode omode = TYPE_MODE (index_type);
5557 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
5559 /* We must handle the endpoints in the original mode. */
5560 index_expr = build (MINUS_EXPR, index_type,
5561 index_expr, minval);
5562 minval = integer_zero_node;
5563 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5564 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
5565 omode, 1, 0, default_label);
5566 /* Now we can safely truncate. */
5567 index = convert_to_mode (index_mode, index, 0);
5569 else
5571 if (TYPE_MODE (index_type) != index_mode)
5573 index_expr = convert (type_for_size (index_bits, 0),
5574 index_expr);
5575 index_type = TREE_TYPE (index_expr);
5578 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5580 emit_queue ();
5581 index = protect_from_queue (index, 0);
5582 do_pending_stack_adjust ();
5584 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
5585 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
5586 (index, op_mode))
5587 index = copy_to_mode_reg (op_mode, index);
5589 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
5591 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
5592 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
5593 (op1, op_mode))
5594 op1 = copy_to_mode_reg (op_mode, op1);
5596 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
5598 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
5599 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
5600 (op2, op_mode))
5601 op2 = copy_to_mode_reg (op_mode, op2);
5603 emit_jump_insn (gen_casesi (index, op1, op2,
5604 table_label, default_label));
5605 win = 1;
5607 #endif
5608 #ifdef HAVE_tablejump
5609 if (! win && HAVE_tablejump)
5611 index_type = thiscase->data.case_stmt.nominal_type;
5612 index_expr = fold (build (MINUS_EXPR, index_type,
5613 convert (index_type, index_expr),
5614 convert (index_type, minval)));
5615 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5616 emit_queue ();
5617 index = protect_from_queue (index, 0);
5618 do_pending_stack_adjust ();
5620 do_tablejump (index, TYPE_MODE (index_type),
5621 expand_expr (range, NULL_RTX, VOIDmode, 0),
5622 table_label, default_label);
5623 win = 1;
5625 #endif
5626 if (! win)
5627 abort ();
5629 /* Get table of labels to jump to, in order of case index. */
5631 ncases = TREE_INT_CST_LOW (range) + 1;
5632 labelvec = (rtx *) alloca (ncases * sizeof (rtx));
5633 memset ((char *) labelvec, 0, ncases * sizeof (rtx));
5635 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5637 register HOST_WIDE_INT i
5638 = TREE_INT_CST_LOW (n->low) - TREE_INT_CST_LOW (orig_minval);
5640 while (1)
5642 labelvec[i]
5643 = gen_rtx_LABEL_REF (Pmode, label_rtx (n->code_label));
5644 if (i + TREE_INT_CST_LOW (orig_minval)
5645 == TREE_INT_CST_LOW (n->high))
5646 break;
5647 i++;
5651 /* Fill in the gaps with the default. */
5652 for (i = 0; i < ncases; i++)
5653 if (labelvec[i] == 0)
5654 labelvec[i] = gen_rtx_LABEL_REF (Pmode, default_label);
5656 /* Output the table */
5657 emit_label (table_label);
5659 if (CASE_VECTOR_PC_RELATIVE || flag_pic)
5660 emit_jump_insn (gen_rtx_ADDR_DIFF_VEC (CASE_VECTOR_MODE,
5661 gen_rtx_LABEL_REF (Pmode, table_label),
5662 gen_rtvec_v (ncases, labelvec),
5663 const0_rtx, const0_rtx));
5664 else
5665 emit_jump_insn (gen_rtx_ADDR_VEC (CASE_VECTOR_MODE,
5666 gen_rtvec_v (ncases, labelvec)));
5668 /* If the case insn drops through the table,
5669 after the table we must jump to the default-label.
5670 Otherwise record no drop-through after the table. */
5671 #ifdef CASE_DROPS_THROUGH
5672 emit_jump (default_label);
5673 #else
5674 emit_barrier ();
5675 #endif
5678 before_case = squeeze_notes (NEXT_INSN (before_case), get_last_insn ());
5679 reorder_insns (before_case, get_last_insn (),
5680 thiscase->data.case_stmt.start);
5682 else
5683 end_cleanup_deferral ();
5685 if (thiscase->exit_label)
5686 emit_label (thiscase->exit_label);
5688 free_case_nodes (case_stack->data.case_stmt.case_list);
5689 POPSTACK (case_stack);
5691 free_temp_slots ();
5694 /* Convert the tree NODE into a list linked by the right field, with the left
5695 field zeroed. RIGHT is used for recursion; it is a list to be placed
5696 rightmost in the resulting list. */
5698 static struct case_node *
5699 case_tree2list (node, right)
5700 struct case_node *node, *right;
5702 struct case_node *left;
5704 if (node->right)
5705 right = case_tree2list (node->right, right);
5707 node->right = right;
5708 if ((left = node->left))
5710 node->left = 0;
5711 return case_tree2list (left, node);
5714 return node;
5717 /* Generate code to jump to LABEL if OP1 and OP2 are equal. */
5719 static void
5720 do_jump_if_equal (op1, op2, label, unsignedp)
5721 rtx op1, op2, label;
5722 int unsignedp;
5724 if (GET_CODE (op1) == CONST_INT
5725 && GET_CODE (op2) == CONST_INT)
5727 if (INTVAL (op1) == INTVAL (op2))
5728 emit_jump (label);
5730 else
5732 enum machine_mode mode = GET_MODE (op1);
5733 if (mode == VOIDmode)
5734 mode = GET_MODE (op2);
5735 emit_cmp_and_jump_insns (op1, op2, EQ, NULL_RTX, mode, unsignedp,
5736 0, label);
5740 /* Not all case values are encountered equally. This function
5741 uses a heuristic to weight case labels, in cases where that
5742 looks like a reasonable thing to do.
5744 Right now, all we try to guess is text, and we establish the
5745 following weights:
5747 chars above space: 16
5748 digits: 16
5749 default: 12
5750 space, punct: 8
5751 tab: 4
5752 newline: 2
5753 other "\" chars: 1
5754 remaining chars: 0
5756 If we find any cases in the switch that are not either -1 or in the range
5757 of valid ASCII characters, or are control characters other than those
5758 commonly used with "\", don't treat this switch scanning text.
5760 Return 1 if these nodes are suitable for cost estimation, otherwise
5761 return 0. */
5763 static int
5764 estimate_case_costs (node)
5765 case_node_ptr node;
5767 tree min_ascii = integer_minus_one_node;
5768 tree max_ascii = convert (TREE_TYPE (node->high), build_int_2 (127, 0));
5769 case_node_ptr n;
5770 int i;
5772 /* If we haven't already made the cost table, make it now. Note that the
5773 lower bound of the table is -1, not zero. */
5775 if (! cost_table_initialized)
5777 cost_table_initialized = 1;
5779 for (i = 0; i < 128; i++)
5781 if (ISALNUM (i))
5782 COST_TABLE (i) = 16;
5783 else if (ISPUNCT (i))
5784 COST_TABLE (i) = 8;
5785 else if (ISCNTRL (i))
5786 COST_TABLE (i) = -1;
5789 COST_TABLE (' ') = 8;
5790 COST_TABLE ('\t') = 4;
5791 COST_TABLE ('\0') = 4;
5792 COST_TABLE ('\n') = 2;
5793 COST_TABLE ('\f') = 1;
5794 COST_TABLE ('\v') = 1;
5795 COST_TABLE ('\b') = 1;
5798 /* See if all the case expressions look like text. It is text if the
5799 constant is >= -1 and the highest constant is <= 127. Do all comparisons
5800 as signed arithmetic since we don't want to ever access cost_table with a
5801 value less than -1. Also check that none of the constants in a range
5802 are strange control characters. */
5804 for (n = node; n; n = n->right)
5806 if ((INT_CST_LT (n->low, min_ascii)) || INT_CST_LT (max_ascii, n->high))
5807 return 0;
5809 for (i = (HOST_WIDE_INT) TREE_INT_CST_LOW (n->low);
5810 i <= (HOST_WIDE_INT) TREE_INT_CST_LOW (n->high); i++)
5811 if (COST_TABLE (i) < 0)
5812 return 0;
5815 /* All interesting values are within the range of interesting
5816 ASCII characters. */
5817 return 1;
5820 /* Scan an ordered list of case nodes
5821 combining those with consecutive values or ranges.
5823 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
5825 static void
5826 group_case_nodes (head)
5827 case_node_ptr head;
5829 case_node_ptr node = head;
5831 while (node)
5833 rtx lb = next_real_insn (label_rtx (node->code_label));
5834 rtx lb2;
5835 case_node_ptr np = node;
5837 /* Try to group the successors of NODE with NODE. */
5838 while (((np = np->right) != 0)
5839 /* Do they jump to the same place? */
5840 && ((lb2 = next_real_insn (label_rtx (np->code_label))) == lb
5841 || (lb != 0 && lb2 != 0
5842 && simplejump_p (lb)
5843 && simplejump_p (lb2)
5844 && rtx_equal_p (SET_SRC (PATTERN (lb)),
5845 SET_SRC (PATTERN (lb2)))))
5846 /* Are their ranges consecutive? */
5847 && tree_int_cst_equal (np->low,
5848 fold (build (PLUS_EXPR,
5849 TREE_TYPE (node->high),
5850 node->high,
5851 integer_one_node)))
5852 /* An overflow is not consecutive. */
5853 && tree_int_cst_lt (node->high,
5854 fold (build (PLUS_EXPR,
5855 TREE_TYPE (node->high),
5856 node->high,
5857 integer_one_node))))
5859 node->high = np->high;
5861 /* NP is the first node after NODE which can't be grouped with it.
5862 Delete the nodes in between, and move on to that node. */
5863 node->right = np;
5864 node = np;
5868 /* Take an ordered list of case nodes
5869 and transform them into a near optimal binary tree,
5870 on the assumption that any target code selection value is as
5871 likely as any other.
5873 The transformation is performed by splitting the ordered
5874 list into two equal sections plus a pivot. The parts are
5875 then attached to the pivot as left and right branches. Each
5876 branch is then transformed recursively. */
5878 static void
5879 balance_case_nodes (head, parent)
5880 case_node_ptr *head;
5881 case_node_ptr parent;
5883 register case_node_ptr np;
5885 np = *head;
5886 if (np)
5888 int cost = 0;
5889 int i = 0;
5890 int ranges = 0;
5891 register case_node_ptr *npp;
5892 case_node_ptr left;
5894 /* Count the number of entries on branch. Also count the ranges. */
5896 while (np)
5898 if (!tree_int_cst_equal (np->low, np->high))
5900 ranges++;
5901 if (use_cost_table)
5902 cost += COST_TABLE (TREE_INT_CST_LOW (np->high));
5905 if (use_cost_table)
5906 cost += COST_TABLE (TREE_INT_CST_LOW (np->low));
5908 i++;
5909 np = np->right;
5912 if (i > 2)
5914 /* Split this list if it is long enough for that to help. */
5915 npp = head;
5916 left = *npp;
5917 if (use_cost_table)
5919 /* Find the place in the list that bisects the list's total cost,
5920 Here I gets half the total cost. */
5921 int n_moved = 0;
5922 i = (cost + 1) / 2;
5923 while (1)
5925 /* Skip nodes while their cost does not reach that amount. */
5926 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5927 i -= COST_TABLE (TREE_INT_CST_LOW ((*npp)->high));
5928 i -= COST_TABLE (TREE_INT_CST_LOW ((*npp)->low));
5929 if (i <= 0)
5930 break;
5931 npp = &(*npp)->right;
5932 n_moved += 1;
5934 if (n_moved == 0)
5936 /* Leave this branch lopsided, but optimize left-hand
5937 side and fill in `parent' fields for right-hand side. */
5938 np = *head;
5939 np->parent = parent;
5940 balance_case_nodes (&np->left, np);
5941 for (; np->right; np = np->right)
5942 np->right->parent = np;
5943 return;
5946 /* If there are just three nodes, split at the middle one. */
5947 else if (i == 3)
5948 npp = &(*npp)->right;
5949 else
5951 /* Find the place in the list that bisects the list's total cost,
5952 where ranges count as 2.
5953 Here I gets half the total cost. */
5954 i = (i + ranges + 1) / 2;
5955 while (1)
5957 /* Skip nodes while their cost does not reach that amount. */
5958 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5959 i--;
5960 i--;
5961 if (i <= 0)
5962 break;
5963 npp = &(*npp)->right;
5966 *head = np = *npp;
5967 *npp = 0;
5968 np->parent = parent;
5969 np->left = left;
5971 /* Optimize each of the two split parts. */
5972 balance_case_nodes (&np->left, np);
5973 balance_case_nodes (&np->right, np);
5975 else
5977 /* Else leave this branch as one level,
5978 but fill in `parent' fields. */
5979 np = *head;
5980 np->parent = parent;
5981 for (; np->right; np = np->right)
5982 np->right->parent = np;
5987 /* Search the parent sections of the case node tree
5988 to see if a test for the lower bound of NODE would be redundant.
5989 INDEX_TYPE is the type of the index expression.
5991 The instructions to generate the case decision tree are
5992 output in the same order as nodes are processed so it is
5993 known that if a parent node checks the range of the current
5994 node minus one that the current node is bounded at its lower
5995 span. Thus the test would be redundant. */
5997 static int
5998 node_has_low_bound (node, index_type)
5999 case_node_ptr node;
6000 tree index_type;
6002 tree low_minus_one;
6003 case_node_ptr pnode;
6005 /* If the lower bound of this node is the lowest value in the index type,
6006 we need not test it. */
6008 if (tree_int_cst_equal (node->low, TYPE_MIN_VALUE (index_type)))
6009 return 1;
6011 /* If this node has a left branch, the value at the left must be less
6012 than that at this node, so it cannot be bounded at the bottom and
6013 we need not bother testing any further. */
6015 if (node->left)
6016 return 0;
6018 low_minus_one = fold (build (MINUS_EXPR, TREE_TYPE (node->low),
6019 node->low, integer_one_node));
6021 /* If the subtraction above overflowed, we can't verify anything.
6022 Otherwise, look for a parent that tests our value - 1. */
6024 if (! tree_int_cst_lt (low_minus_one, node->low))
6025 return 0;
6027 for (pnode = node->parent; pnode; pnode = pnode->parent)
6028 if (tree_int_cst_equal (low_minus_one, pnode->high))
6029 return 1;
6031 return 0;
6034 /* Search the parent sections of the case node tree
6035 to see if a test for the upper bound of NODE would be redundant.
6036 INDEX_TYPE is the type of the index expression.
6038 The instructions to generate the case decision tree are
6039 output in the same order as nodes are processed so it is
6040 known that if a parent node checks the range of the current
6041 node plus one that the current node is bounded at its upper
6042 span. Thus the test would be redundant. */
6044 static int
6045 node_has_high_bound (node, index_type)
6046 case_node_ptr node;
6047 tree index_type;
6049 tree high_plus_one;
6050 case_node_ptr pnode;
6052 /* If there is no upper bound, obviously no test is needed. */
6054 if (TYPE_MAX_VALUE (index_type) == NULL)
6055 return 1;
6057 /* If the upper bound of this node is the highest value in the type
6058 of the index expression, we need not test against it. */
6060 if (tree_int_cst_equal (node->high, TYPE_MAX_VALUE (index_type)))
6061 return 1;
6063 /* If this node has a right branch, the value at the right must be greater
6064 than that at this node, so it cannot be bounded at the top and
6065 we need not bother testing any further. */
6067 if (node->right)
6068 return 0;
6070 high_plus_one = fold (build (PLUS_EXPR, TREE_TYPE (node->high),
6071 node->high, integer_one_node));
6073 /* If the addition above overflowed, we can't verify anything.
6074 Otherwise, look for a parent that tests our value + 1. */
6076 if (! tree_int_cst_lt (node->high, high_plus_one))
6077 return 0;
6079 for (pnode = node->parent; pnode; pnode = pnode->parent)
6080 if (tree_int_cst_equal (high_plus_one, pnode->low))
6081 return 1;
6083 return 0;
6086 /* Search the parent sections of the
6087 case node tree to see if both tests for the upper and lower
6088 bounds of NODE would be redundant. */
6090 static int
6091 node_is_bounded (node, index_type)
6092 case_node_ptr node;
6093 tree index_type;
6095 return (node_has_low_bound (node, index_type)
6096 && node_has_high_bound (node, index_type));
6099 /* Emit an unconditional jump to LABEL unless it would be dead code. */
6101 static void
6102 emit_jump_if_reachable (label)
6103 rtx label;
6105 if (GET_CODE (get_last_insn ()) != BARRIER)
6106 emit_jump (label);
6109 /* Emit step-by-step code to select a case for the value of INDEX.
6110 The thus generated decision tree follows the form of the
6111 case-node binary tree NODE, whose nodes represent test conditions.
6112 INDEX_TYPE is the type of the index of the switch.
6114 Care is taken to prune redundant tests from the decision tree
6115 by detecting any boundary conditions already checked by
6116 emitted rtx. (See node_has_high_bound, node_has_low_bound
6117 and node_is_bounded, above.)
6119 Where the test conditions can be shown to be redundant we emit
6120 an unconditional jump to the target code. As a further
6121 optimization, the subordinates of a tree node are examined to
6122 check for bounded nodes. In this case conditional and/or
6123 unconditional jumps as a result of the boundary check for the
6124 current node are arranged to target the subordinates associated
6125 code for out of bound conditions on the current node.
6127 We can assume that when control reaches the code generated here,
6128 the index value has already been compared with the parents
6129 of this node, and determined to be on the same side of each parent
6130 as this node is. Thus, if this node tests for the value 51,
6131 and a parent tested for 52, we don't need to consider
6132 the possibility of a value greater than 51. If another parent
6133 tests for the value 50, then this node need not test anything. */
6135 static void
6136 emit_case_nodes (index, node, default_label, index_type)
6137 rtx index;
6138 case_node_ptr node;
6139 rtx default_label;
6140 tree index_type;
6142 /* If INDEX has an unsigned type, we must make unsigned branches. */
6143 int unsignedp = TREE_UNSIGNED (index_type);
6144 enum machine_mode mode = GET_MODE (index);
6146 /* See if our parents have already tested everything for us.
6147 If they have, emit an unconditional jump for this node. */
6148 if (node_is_bounded (node, index_type))
6149 emit_jump (label_rtx (node->code_label));
6151 else if (tree_int_cst_equal (node->low, node->high))
6153 /* Node is single valued. First see if the index expression matches
6154 this node and then check our children, if any. */
6156 do_jump_if_equal (index, expand_expr (node->low, NULL_RTX, VOIDmode, 0),
6157 label_rtx (node->code_label), unsignedp);
6159 if (node->right != 0 && node->left != 0)
6161 /* This node has children on both sides.
6162 Dispatch to one side or the other
6163 by comparing the index value with this node's value.
6164 If one subtree is bounded, check that one first,
6165 so we can avoid real branches in the tree. */
6167 if (node_is_bounded (node->right, index_type))
6169 emit_cmp_and_jump_insns (index,
6170 expand_expr (node->high, NULL_RTX,
6171 VOIDmode, 0),
6172 GT, NULL_RTX, mode, unsignedp, 0,
6173 label_rtx (node->right->code_label));
6174 emit_case_nodes (index, node->left, default_label, index_type);
6177 else if (node_is_bounded (node->left, index_type))
6179 emit_cmp_and_jump_insns (index,
6180 expand_expr (node->high, NULL_RTX,
6181 VOIDmode, 0),
6182 LT, NULL_RTX, mode, unsignedp, 0,
6183 label_rtx (node->left->code_label));
6184 emit_case_nodes (index, node->right, default_label, index_type);
6187 else
6189 /* Neither node is bounded. First distinguish the two sides;
6190 then emit the code for one side at a time. */
6192 tree test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
6194 /* See if the value is on the right. */
6195 emit_cmp_and_jump_insns (index,
6196 expand_expr (node->high, NULL_RTX,
6197 VOIDmode, 0),
6198 GT, NULL_RTX, mode, unsignedp, 0,
6199 label_rtx (test_label));
6201 /* Value must be on the left.
6202 Handle the left-hand subtree. */
6203 emit_case_nodes (index, node->left, default_label, index_type);
6204 /* If left-hand subtree does nothing,
6205 go to default. */
6206 emit_jump_if_reachable (default_label);
6208 /* Code branches here for the right-hand subtree. */
6209 expand_label (test_label);
6210 emit_case_nodes (index, node->right, default_label, index_type);
6214 else if (node->right != 0 && node->left == 0)
6216 /* Here we have a right child but no left so we issue conditional
6217 branch to default and process the right child.
6219 Omit the conditional branch to default if we it avoid only one
6220 right child; it costs too much space to save so little time. */
6222 if (node->right->right || node->right->left
6223 || !tree_int_cst_equal (node->right->low, node->right->high))
6225 if (!node_has_low_bound (node, index_type))
6227 emit_cmp_and_jump_insns (index,
6228 expand_expr (node->high, NULL_RTX,
6229 VOIDmode, 0),
6230 LT, NULL_RTX, mode, unsignedp, 0,
6231 default_label);
6234 emit_case_nodes (index, node->right, default_label, index_type);
6236 else
6237 /* We cannot process node->right normally
6238 since we haven't ruled out the numbers less than
6239 this node's value. So handle node->right explicitly. */
6240 do_jump_if_equal (index,
6241 expand_expr (node->right->low, NULL_RTX,
6242 VOIDmode, 0),
6243 label_rtx (node->right->code_label), unsignedp);
6246 else if (node->right == 0 && node->left != 0)
6248 /* Just one subtree, on the left. */
6250 #if 0 /* The following code and comment were formerly part
6251 of the condition here, but they didn't work
6252 and I don't understand what the idea was. -- rms. */
6253 /* If our "most probable entry" is less probable
6254 than the default label, emit a jump to
6255 the default label using condition codes
6256 already lying around. With no right branch,
6257 a branch-greater-than will get us to the default
6258 label correctly. */
6259 if (use_cost_table
6260 && COST_TABLE (TREE_INT_CST_LOW (node->high)) < 12)
6262 #endif /* 0 */
6263 if (node->left->left || node->left->right
6264 || !tree_int_cst_equal (node->left->low, node->left->high))
6266 if (!node_has_high_bound (node, index_type))
6268 emit_cmp_and_jump_insns (index, expand_expr (node->high,
6269 NULL_RTX,
6270 VOIDmode, 0),
6271 GT, NULL_RTX, mode, unsignedp, 0,
6272 default_label);
6275 emit_case_nodes (index, node->left, default_label, index_type);
6277 else
6278 /* We cannot process node->left normally
6279 since we haven't ruled out the numbers less than
6280 this node's value. So handle node->left explicitly. */
6281 do_jump_if_equal (index,
6282 expand_expr (node->left->low, NULL_RTX,
6283 VOIDmode, 0),
6284 label_rtx (node->left->code_label), unsignedp);
6287 else
6289 /* Node is a range. These cases are very similar to those for a single
6290 value, except that we do not start by testing whether this node
6291 is the one to branch to. */
6293 if (node->right != 0 && node->left != 0)
6295 /* Node has subtrees on both sides.
6296 If the right-hand subtree is bounded,
6297 test for it first, since we can go straight there.
6298 Otherwise, we need to make a branch in the control structure,
6299 then handle the two subtrees. */
6300 tree test_label = 0;
6302 if (node_is_bounded (node->right, index_type))
6303 /* Right hand node is fully bounded so we can eliminate any
6304 testing and branch directly to the target code. */
6305 emit_cmp_and_jump_insns (index, expand_expr (node->high, NULL_RTX,
6306 VOIDmode, 0),
6307 GT, NULL_RTX, mode, unsignedp, 0,
6308 label_rtx (node->right->code_label));
6309 else
6311 /* Right hand node requires testing.
6312 Branch to a label where we will handle it later. */
6314 test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
6315 emit_cmp_and_jump_insns (index,
6316 expand_expr (node->high, NULL_RTX,
6317 VOIDmode, 0),
6318 GT, NULL_RTX, mode, unsignedp, 0,
6319 label_rtx (test_label));
6322 /* Value belongs to this node or to the left-hand subtree. */
6324 emit_cmp_and_jump_insns (index, expand_expr (node->low, NULL_RTX,
6325 VOIDmode, 0),
6326 GE, NULL_RTX, mode, unsignedp, 0,
6327 label_rtx (node->code_label));
6329 /* Handle the left-hand subtree. */
6330 emit_case_nodes (index, node->left, default_label, index_type);
6332 /* If right node had to be handled later, do that now. */
6334 if (test_label)
6336 /* If the left-hand subtree fell through,
6337 don't let it fall into the right-hand subtree. */
6338 emit_jump_if_reachable (default_label);
6340 expand_label (test_label);
6341 emit_case_nodes (index, node->right, default_label, index_type);
6345 else if (node->right != 0 && node->left == 0)
6347 /* Deal with values to the left of this node,
6348 if they are possible. */
6349 if (!node_has_low_bound (node, index_type))
6351 emit_cmp_and_jump_insns (index,
6352 expand_expr (node->low, NULL_RTX,
6353 VOIDmode, 0),
6354 LT, NULL_RTX, mode, unsignedp, 0,
6355 default_label);
6358 /* Value belongs to this node or to the right-hand subtree. */
6360 emit_cmp_and_jump_insns (index, expand_expr (node->high, NULL_RTX,
6361 VOIDmode, 0),
6362 LE, NULL_RTX, mode, unsignedp, 0,
6363 label_rtx (node->code_label));
6365 emit_case_nodes (index, node->right, default_label, index_type);
6368 else if (node->right == 0 && node->left != 0)
6370 /* Deal with values to the right of this node,
6371 if they are possible. */
6372 if (!node_has_high_bound (node, index_type))
6374 emit_cmp_and_jump_insns (index,
6375 expand_expr (node->high, NULL_RTX,
6376 VOIDmode, 0),
6377 GT, NULL_RTX, mode, unsignedp, 0,
6378 default_label);
6381 /* Value belongs to this node or to the left-hand subtree. */
6383 emit_cmp_and_jump_insns (index,
6384 expand_expr (node->low, NULL_RTX,
6385 VOIDmode, 0),
6386 GE, NULL_RTX, mode, unsignedp, 0,
6387 label_rtx (node->code_label));
6389 emit_case_nodes (index, node->left, default_label, index_type);
6392 else
6394 /* Node has no children so we check low and high bounds to remove
6395 redundant tests. Only one of the bounds can exist,
6396 since otherwise this node is bounded--a case tested already. */
6398 if (!node_has_high_bound (node, index_type))
6400 emit_cmp_and_jump_insns (index,
6401 expand_expr (node->high, NULL_RTX,
6402 VOIDmode, 0),
6403 GT, NULL_RTX, mode, unsignedp, 0,
6404 default_label);
6407 if (!node_has_low_bound (node, index_type))
6409 emit_cmp_and_jump_insns (index,
6410 expand_expr (node->low, NULL_RTX,
6411 VOIDmode, 0),
6412 LT, NULL_RTX, mode, unsignedp, 0,
6413 default_label);
6416 emit_jump (label_rtx (node->code_label));