(in m32rx patch): Replace "." with "@." when preceeded by a capital letter
[official-gcc.git] / gcc / stmt.c
blob801700202760f5659e7d9f8e62f9069eaabd11d5
1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000, 2001 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 /* This file handles the generation of rtl code from tree structure
23 above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
24 It also creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
27 The functions whose names start with `expand_' are called by the
28 parser to generate RTL instructions for various kinds of constructs.
30 Some control and binding constructs require calling several such
31 functions at different times. For example, a simple if-then
32 is expanded by calling `expand_start_cond' (with the condition-expression
33 as argument) before parsing the then-clause and calling `expand_end_cond'
34 after parsing the then-clause. */
36 #include "config.h"
37 #include "system.h"
39 #include "rtl.h"
40 #include "tree.h"
41 #include "tm_p.h"
42 #include "flags.h"
43 #include "except.h"
44 #include "function.h"
45 #include "insn-config.h"
46 #include "expr.h"
47 #include "libfuncs.h"
48 #include "hard-reg-set.h"
49 #include "obstack.h"
50 #include "loop.h"
51 #include "recog.h"
52 #include "machmode.h"
53 #include "toplev.h"
54 #include "output.h"
55 #include "ggc.h"
57 #define obstack_chunk_alloc xmalloc
58 #define obstack_chunk_free free
59 struct obstack stmt_obstack;
61 /* Assume that case vectors are not pc-relative. */
62 #ifndef CASE_VECTOR_PC_RELATIVE
63 #define CASE_VECTOR_PC_RELATIVE 0
64 #endif
66 /* Functions and data structures for expanding case statements. */
68 /* Case label structure, used to hold info on labels within case
69 statements. We handle "range" labels; for a single-value label
70 as in C, the high and low limits are the same.
72 An AVL tree of case nodes is initially created, and later transformed
73 to a list linked via the RIGHT fields in the nodes. Nodes with
74 higher case values are later in the list.
76 Switch statements can be output in one of two forms. A branch table
77 is used if there are more than a few labels and the labels are dense
78 within the range between the smallest and largest case value. If a
79 branch table is used, no further manipulations are done with the case
80 node chain.
82 The alternative to the use of a branch table is to generate a series
83 of compare and jump insns. When that is done, we use the LEFT, RIGHT,
84 and PARENT fields to hold a binary tree. Initially the tree is
85 totally unbalanced, with everything on the right. We balance the tree
86 with nodes on the left having lower case values than the parent
87 and nodes on the right having higher values. We then output the tree
88 in order. */
90 struct case_node
92 struct case_node *left; /* Left son in binary tree */
93 struct case_node *right; /* Right son in binary tree; also node chain */
94 struct case_node *parent; /* Parent of node in binary tree */
95 tree low; /* Lowest index value for this label */
96 tree high; /* Highest index value for this label */
97 tree code_label; /* Label to jump to when node matches */
98 int balance;
101 typedef struct case_node case_node;
102 typedef struct case_node *case_node_ptr;
104 /* These are used by estimate_case_costs and balance_case_nodes. */
106 /* This must be a signed type, and non-ANSI compilers lack signed char. */
107 static short cost_table_[129];
108 static int use_cost_table;
109 static int cost_table_initialized;
111 /* Special care is needed because we allow -1, but TREE_INT_CST_LOW
112 is unsigned. */
113 #define COST_TABLE(I) cost_table_[(unsigned HOST_WIDE_INT)((I) + 1)]
115 /* Stack of control and binding constructs we are currently inside.
117 These constructs begin when you call `expand_start_WHATEVER'
118 and end when you call `expand_end_WHATEVER'. This stack records
119 info about how the construct began that tells the end-function
120 what to do. It also may provide information about the construct
121 to alter the behavior of other constructs within the body.
122 For example, they may affect the behavior of C `break' and `continue'.
124 Each construct gets one `struct nesting' object.
125 All of these objects are chained through the `all' field.
126 `nesting_stack' points to the first object (innermost construct).
127 The position of an entry on `nesting_stack' is in its `depth' field.
129 Each type of construct has its own individual stack.
130 For example, loops have `loop_stack'. Each object points to the
131 next object of the same type through the `next' field.
133 Some constructs are visible to `break' exit-statements and others
134 are not. Which constructs are visible depends on the language.
135 Therefore, the data structure allows each construct to be visible
136 or not, according to the args given when the construct is started.
137 The construct is visible if the `exit_label' field is non-null.
138 In that case, the value should be a CODE_LABEL rtx. */
140 struct nesting
142 struct nesting *all;
143 struct nesting *next;
144 int depth;
145 rtx exit_label;
146 union
148 /* For conds (if-then and if-then-else statements). */
149 struct
151 /* Label for the end of the if construct.
152 There is none if EXITFLAG was not set
153 and no `else' has been seen yet. */
154 rtx endif_label;
155 /* Label for the end of this alternative.
156 This may be the end of the if or the next else/elseif. */
157 rtx next_label;
158 } cond;
159 /* For loops. */
160 struct
162 /* Label at the top of the loop; place to loop back to. */
163 rtx start_label;
164 /* Label at the end of the whole construct. */
165 rtx end_label;
166 /* Label before a jump that branches to the end of the whole
167 construct. This is where destructors go if any. */
168 rtx alt_end_label;
169 /* Label for `continue' statement to jump to;
170 this is in front of the stepper of the loop. */
171 rtx continue_label;
172 } loop;
173 /* For variable binding contours. */
174 struct
176 /* Sequence number of this binding contour within the function,
177 in order of entry. */
178 int block_start_count;
179 /* Nonzero => value to restore stack to on exit. */
180 rtx stack_level;
181 /* The NOTE that starts this contour.
182 Used by expand_goto to check whether the destination
183 is within each contour or not. */
184 rtx first_insn;
185 /* Innermost containing binding contour that has a stack level. */
186 struct nesting *innermost_stack_block;
187 /* List of cleanups to be run on exit from this contour.
188 This is a list of expressions to be evaluated.
189 The TREE_PURPOSE of each link is the ..._DECL node
190 which the cleanup pertains to. */
191 tree cleanups;
192 /* List of cleanup-lists of blocks containing this block,
193 as they were at the locus where this block appears.
194 There is an element for each containing block,
195 ordered innermost containing block first.
196 The tail of this list can be 0,
197 if all remaining elements would be empty lists.
198 The element's TREE_VALUE is the cleanup-list of that block,
199 which may be null. */
200 tree outer_cleanups;
201 /* Chain of labels defined inside this binding contour.
202 For contours that have stack levels or cleanups. */
203 struct label_chain *label_chain;
204 /* Number of function calls seen, as of start of this block. */
205 int n_function_calls;
206 /* Nonzero if this is associated with a EH region. */
207 int exception_region;
208 /* The saved target_temp_slot_level from our outer block.
209 We may reset target_temp_slot_level to be the level of
210 this block, if that is done, target_temp_slot_level
211 reverts to the saved target_temp_slot_level at the very
212 end of the block. */
213 int block_target_temp_slot_level;
214 /* True if we are currently emitting insns in an area of
215 output code that is controlled by a conditional
216 expression. This is used by the cleanup handling code to
217 generate conditional cleanup actions. */
218 int conditional_code;
219 /* A place to move the start of the exception region for any
220 of the conditional cleanups, must be at the end or after
221 the start of the last unconditional cleanup, and before any
222 conditional branch points. */
223 rtx last_unconditional_cleanup;
224 /* When in a conditional context, this is the specific
225 cleanup list associated with last_unconditional_cleanup,
226 where we place the conditionalized cleanups. */
227 tree *cleanup_ptr;
228 } block;
229 /* For switch (C) or case (Pascal) statements,
230 and also for dummies (see `expand_start_case_dummy'). */
231 struct
233 /* The insn after which the case dispatch should finally
234 be emitted. Zero for a dummy. */
235 rtx start;
236 /* A list of case labels; it is first built as an AVL tree.
237 During expand_end_case, this is converted to a list, and may be
238 rearranged into a nearly balanced binary tree. */
239 struct case_node *case_list;
240 /* Label to jump to if no case matches. */
241 tree default_label;
242 /* The expression to be dispatched on. */
243 tree index_expr;
244 /* Type that INDEX_EXPR should be converted to. */
245 tree nominal_type;
246 /* Name of this kind of statement, for warnings. */
247 const char *printname;
248 /* Used to save no_line_numbers till we see the first case label.
249 We set this to -1 when we see the first case label in this
250 case statement. */
251 int line_number_status;
252 } case_stmt;
253 } data;
256 /* Allocate and return a new `struct nesting'. */
258 #define ALLOC_NESTING() \
259 (struct nesting *) obstack_alloc (&stmt_obstack, sizeof (struct nesting))
261 /* Pop the nesting stack element by element until we pop off
262 the element which is at the top of STACK.
263 Update all the other stacks, popping off elements from them
264 as we pop them from nesting_stack. */
266 #define POPSTACK(STACK) \
267 do { struct nesting *target = STACK; \
268 struct nesting *this; \
269 do { this = nesting_stack; \
270 if (loop_stack == this) \
271 loop_stack = loop_stack->next; \
272 if (cond_stack == this) \
273 cond_stack = cond_stack->next; \
274 if (block_stack == this) \
275 block_stack = block_stack->next; \
276 if (stack_block_stack == this) \
277 stack_block_stack = stack_block_stack->next; \
278 if (case_stack == this) \
279 case_stack = case_stack->next; \
280 nesting_depth = nesting_stack->depth - 1; \
281 nesting_stack = this->all; \
282 obstack_free (&stmt_obstack, this); } \
283 while (this != target); } while (0)
285 /* In some cases it is impossible to generate code for a forward goto
286 until the label definition is seen. This happens when it may be necessary
287 for the goto to reset the stack pointer: we don't yet know how to do that.
288 So expand_goto puts an entry on this fixup list.
289 Each time a binding contour that resets the stack is exited,
290 we check each fixup.
291 If the target label has now been defined, we can insert the proper code. */
293 struct goto_fixup
295 /* Points to following fixup. */
296 struct goto_fixup *next;
297 /* Points to the insn before the jump insn.
298 If more code must be inserted, it goes after this insn. */
299 rtx before_jump;
300 /* The LABEL_DECL that this jump is jumping to, or 0
301 for break, continue or return. */
302 tree target;
303 /* The BLOCK for the place where this goto was found. */
304 tree context;
305 /* The CODE_LABEL rtx that this is jumping to. */
306 rtx target_rtl;
307 /* Number of binding contours started in current function
308 before the label reference. */
309 int block_start_count;
310 /* The outermost stack level that should be restored for this jump.
311 Each time a binding contour that resets the stack is exited,
312 if the target label is *not* yet defined, this slot is updated. */
313 rtx stack_level;
314 /* List of lists of cleanup expressions to be run by this goto.
315 There is one element for each block that this goto is within.
316 The tail of this list can be 0,
317 if all remaining elements would be empty.
318 The TREE_VALUE contains the cleanup list of that block as of the
319 time this goto was seen.
320 The TREE_ADDRESSABLE flag is 1 for a block that has been exited. */
321 tree cleanup_list_list;
324 /* Within any binding contour that must restore a stack level,
325 all labels are recorded with a chain of these structures. */
327 struct label_chain
329 /* Points to following fixup. */
330 struct label_chain *next;
331 tree label;
334 struct stmt_status
336 /* Chain of all pending binding contours. */
337 struct nesting *x_block_stack;
339 /* If any new stacks are added here, add them to POPSTACKS too. */
341 /* Chain of all pending binding contours that restore stack levels
342 or have cleanups. */
343 struct nesting *x_stack_block_stack;
345 /* Chain of all pending conditional statements. */
346 struct nesting *x_cond_stack;
348 /* Chain of all pending loops. */
349 struct nesting *x_loop_stack;
351 /* Chain of all pending case or switch statements. */
352 struct nesting *x_case_stack;
354 /* Separate chain including all of the above,
355 chained through the `all' field. */
356 struct nesting *x_nesting_stack;
358 /* Number of entries on nesting_stack now. */
359 int x_nesting_depth;
361 /* Number of binding contours started so far in this function. */
362 int x_block_start_count;
364 /* Each time we expand an expression-statement,
365 record the expr's type and its RTL value here. */
366 tree x_last_expr_type;
367 rtx x_last_expr_value;
369 /* Nonzero if within a ({...}) grouping, in which case we must
370 always compute a value for each expr-stmt in case it is the last one. */
371 int x_expr_stmts_for_value;
373 /* Filename and line number of last line-number note,
374 whether we actually emitted it or not. */
375 const char *x_emit_filename;
376 int x_emit_lineno;
378 struct goto_fixup *x_goto_fixup_chain;
381 #define block_stack (cfun->stmt->x_block_stack)
382 #define stack_block_stack (cfun->stmt->x_stack_block_stack)
383 #define cond_stack (cfun->stmt->x_cond_stack)
384 #define loop_stack (cfun->stmt->x_loop_stack)
385 #define case_stack (cfun->stmt->x_case_stack)
386 #define nesting_stack (cfun->stmt->x_nesting_stack)
387 #define nesting_depth (cfun->stmt->x_nesting_depth)
388 #define current_block_start_count (cfun->stmt->x_block_start_count)
389 #define last_expr_type (cfun->stmt->x_last_expr_type)
390 #define last_expr_value (cfun->stmt->x_last_expr_value)
391 #define expr_stmts_for_value (cfun->stmt->x_expr_stmts_for_value)
392 #define emit_filename (cfun->stmt->x_emit_filename)
393 #define emit_lineno (cfun->stmt->x_emit_lineno)
394 #define goto_fixup_chain (cfun->stmt->x_goto_fixup_chain)
396 /* Non-zero if we are using EH to handle cleanus. */
397 static int using_eh_for_cleanups_p = 0;
399 static int n_occurrences PARAMS ((int, const char *));
400 static void expand_goto_internal PARAMS ((tree, rtx, rtx));
401 static int expand_fixup PARAMS ((tree, rtx, rtx));
402 static rtx expand_nl_handler_label PARAMS ((rtx, rtx));
403 static void expand_nl_goto_receiver PARAMS ((void));
404 static void expand_nl_goto_receivers PARAMS ((struct nesting *));
405 static void fixup_gotos PARAMS ((struct nesting *, rtx, tree,
406 rtx, int));
407 static bool check_operand_nalternatives PARAMS ((tree, tree));
408 static bool check_unique_operand_names PARAMS ((tree, tree));
409 static tree resolve_operand_names PARAMS ((tree, tree, tree,
410 const char **));
411 static char *resolve_operand_name_1 PARAMS ((char *, tree, tree));
412 static void expand_null_return_1 PARAMS ((rtx));
413 static void expand_value_return PARAMS ((rtx));
414 static int tail_recursion_args PARAMS ((tree, tree));
415 static void expand_cleanups PARAMS ((tree, tree, int, int));
416 static void check_seenlabel PARAMS ((void));
417 static void do_jump_if_equal PARAMS ((rtx, rtx, rtx, int));
418 static int estimate_case_costs PARAMS ((case_node_ptr));
419 static void group_case_nodes PARAMS ((case_node_ptr));
420 static void balance_case_nodes PARAMS ((case_node_ptr *,
421 case_node_ptr));
422 static int node_has_low_bound PARAMS ((case_node_ptr, tree));
423 static int node_has_high_bound PARAMS ((case_node_ptr, tree));
424 static int node_is_bounded PARAMS ((case_node_ptr, tree));
425 static void emit_jump_if_reachable PARAMS ((rtx));
426 static void emit_case_nodes PARAMS ((rtx, case_node_ptr, rtx, tree));
427 static struct case_node *case_tree2list PARAMS ((case_node *, case_node *));
428 static void mark_cond_nesting PARAMS ((struct nesting *));
429 static void mark_loop_nesting PARAMS ((struct nesting *));
430 static void mark_block_nesting PARAMS ((struct nesting *));
431 static void mark_case_nesting PARAMS ((struct nesting *));
432 static void mark_case_node PARAMS ((struct case_node *));
433 static void mark_goto_fixup PARAMS ((struct goto_fixup *));
434 static void free_case_nodes PARAMS ((case_node_ptr));
436 void
437 using_eh_for_cleanups ()
439 using_eh_for_cleanups_p = 1;
442 /* Mark N (known to be a cond-nesting) for GC. */
444 static void
445 mark_cond_nesting (n)
446 struct nesting *n;
448 while (n)
450 ggc_mark_rtx (n->exit_label);
451 ggc_mark_rtx (n->data.cond.endif_label);
452 ggc_mark_rtx (n->data.cond.next_label);
454 n = n->next;
458 /* Mark N (known to be a loop-nesting) for GC. */
460 static void
461 mark_loop_nesting (n)
462 struct nesting *n;
465 while (n)
467 ggc_mark_rtx (n->exit_label);
468 ggc_mark_rtx (n->data.loop.start_label);
469 ggc_mark_rtx (n->data.loop.end_label);
470 ggc_mark_rtx (n->data.loop.alt_end_label);
471 ggc_mark_rtx (n->data.loop.continue_label);
473 n = n->next;
477 /* Mark N (known to be a block-nesting) for GC. */
479 static void
480 mark_block_nesting (n)
481 struct nesting *n;
483 while (n)
485 struct label_chain *l;
487 ggc_mark_rtx (n->exit_label);
488 ggc_mark_rtx (n->data.block.stack_level);
489 ggc_mark_rtx (n->data.block.first_insn);
490 ggc_mark_tree (n->data.block.cleanups);
491 ggc_mark_tree (n->data.block.outer_cleanups);
493 for (l = n->data.block.label_chain; l != NULL; l = l->next)
495 ggc_mark (l);
496 ggc_mark_tree (l->label);
499 ggc_mark_rtx (n->data.block.last_unconditional_cleanup);
501 /* ??? cleanup_ptr never points outside the stack, does it? */
503 n = n->next;
507 /* Mark N (known to be a case-nesting) for GC. */
509 static void
510 mark_case_nesting (n)
511 struct nesting *n;
513 while (n)
515 ggc_mark_rtx (n->exit_label);
516 ggc_mark_rtx (n->data.case_stmt.start);
518 ggc_mark_tree (n->data.case_stmt.default_label);
519 ggc_mark_tree (n->data.case_stmt.index_expr);
520 ggc_mark_tree (n->data.case_stmt.nominal_type);
522 mark_case_node (n->data.case_stmt.case_list);
523 n = n->next;
527 /* Mark C for GC. */
529 static void
530 mark_case_node (c)
531 struct case_node *c;
533 if (c != 0)
535 ggc_mark_tree (c->low);
536 ggc_mark_tree (c->high);
537 ggc_mark_tree (c->code_label);
539 mark_case_node (c->right);
540 mark_case_node (c->left);
544 /* Mark G for GC. */
546 static void
547 mark_goto_fixup (g)
548 struct goto_fixup *g;
550 while (g)
552 ggc_mark (g);
553 ggc_mark_rtx (g->before_jump);
554 ggc_mark_tree (g->target);
555 ggc_mark_tree (g->context);
556 ggc_mark_rtx (g->target_rtl);
557 ggc_mark_rtx (g->stack_level);
558 ggc_mark_tree (g->cleanup_list_list);
560 g = g->next;
564 /* Clear out all parts of the state in F that can safely be discarded
565 after the function has been compiled, to let garbage collection
566 reclaim the memory. */
568 void
569 free_stmt_status (f)
570 struct function *f;
572 /* We're about to free the function obstack. If we hold pointers to
573 things allocated there, then we'll try to mark them when we do
574 GC. So, we clear them out here explicitly. */
575 if (f->stmt)
576 free (f->stmt);
577 f->stmt = NULL;
580 /* Mark P for GC. */
582 void
583 mark_stmt_status (p)
584 struct stmt_status *p;
586 if (p == 0)
587 return;
589 mark_block_nesting (p->x_block_stack);
590 mark_cond_nesting (p->x_cond_stack);
591 mark_loop_nesting (p->x_loop_stack);
592 mark_case_nesting (p->x_case_stack);
594 ggc_mark_tree (p->x_last_expr_type);
595 /* last_epxr_value is only valid if last_expr_type is nonzero. */
596 if (p->x_last_expr_type)
597 ggc_mark_rtx (p->x_last_expr_value);
599 mark_goto_fixup (p->x_goto_fixup_chain);
602 void
603 init_stmt ()
605 gcc_obstack_init (&stmt_obstack);
608 void
609 init_stmt_for_function ()
611 cfun->stmt = (struct stmt_status *) xmalloc (sizeof (struct stmt_status));
613 /* We are not currently within any block, conditional, loop or case. */
614 block_stack = 0;
615 stack_block_stack = 0;
616 loop_stack = 0;
617 case_stack = 0;
618 cond_stack = 0;
619 nesting_stack = 0;
620 nesting_depth = 0;
622 current_block_start_count = 0;
624 /* No gotos have been expanded yet. */
625 goto_fixup_chain = 0;
627 /* We are not processing a ({...}) grouping. */
628 expr_stmts_for_value = 0;
629 last_expr_type = 0;
630 last_expr_value = NULL_RTX;
633 /* Return nonzero if anything is pushed on the loop, condition, or case
634 stack. */
636 in_control_zone_p ()
638 return cond_stack || loop_stack || case_stack;
641 /* Record the current file and line. Called from emit_line_note. */
642 void
643 set_file_and_line_for_stmt (file, line)
644 const char *file;
645 int line;
647 /* If we're outputting an inline function, and we add a line note,
648 there may be no CFUN->STMT information. So, there's no need to
649 update it. */
650 if (cfun->stmt)
652 emit_filename = file;
653 emit_lineno = line;
657 /* Emit a no-op instruction. */
659 void
660 emit_nop ()
662 rtx last_insn;
664 last_insn = get_last_insn ();
665 if (!optimize
666 && (GET_CODE (last_insn) == CODE_LABEL
667 || (GET_CODE (last_insn) == NOTE
668 && prev_real_insn (last_insn) == 0)))
669 emit_insn (gen_nop ());
672 /* Return the rtx-label that corresponds to a LABEL_DECL,
673 creating it if necessary. */
676 label_rtx (label)
677 tree label;
679 if (TREE_CODE (label) != LABEL_DECL)
680 abort ();
682 if (!DECL_RTL_SET_P (label))
683 SET_DECL_RTL (label, gen_label_rtx ());
685 return DECL_RTL (label);
689 /* Add an unconditional jump to LABEL as the next sequential instruction. */
691 void
692 emit_jump (label)
693 rtx label;
695 do_pending_stack_adjust ();
696 emit_jump_insn (gen_jump (label));
697 emit_barrier ();
700 /* Emit code to jump to the address
701 specified by the pointer expression EXP. */
703 void
704 expand_computed_goto (exp)
705 tree exp;
707 rtx x = expand_expr (exp, NULL_RTX, VOIDmode, 0);
709 #ifdef POINTERS_EXTEND_UNSIGNED
710 if (GET_MODE (x) != Pmode)
711 x = convert_memory_address (Pmode, x);
712 #endif
714 emit_queue ();
715 /* Be sure the function is executable. */
716 if (current_function_check_memory_usage)
717 emit_library_call (chkr_check_exec_libfunc, LCT_CONST_MAKE_BLOCK,
718 VOIDmode, 1, x, ptr_mode);
720 do_pending_stack_adjust ();
721 emit_indirect_jump (x);
723 current_function_has_computed_jump = 1;
726 /* Handle goto statements and the labels that they can go to. */
728 /* Specify the location in the RTL code of a label LABEL,
729 which is a LABEL_DECL tree node.
731 This is used for the kind of label that the user can jump to with a
732 goto statement, and for alternatives of a switch or case statement.
733 RTL labels generated for loops and conditionals don't go through here;
734 they are generated directly at the RTL level, by other functions below.
736 Note that this has nothing to do with defining label *names*.
737 Languages vary in how they do that and what that even means. */
739 void
740 expand_label (label)
741 tree label;
743 struct label_chain *p;
745 do_pending_stack_adjust ();
746 emit_label (label_rtx (label));
747 if (DECL_NAME (label))
748 LABEL_NAME (DECL_RTL (label)) = IDENTIFIER_POINTER (DECL_NAME (label));
750 if (stack_block_stack != 0)
752 p = (struct label_chain *) ggc_alloc (sizeof (struct label_chain));
753 p->next = stack_block_stack->data.block.label_chain;
754 stack_block_stack->data.block.label_chain = p;
755 p->label = label;
759 /* Declare that LABEL (a LABEL_DECL) may be used for nonlocal gotos
760 from nested functions. */
762 void
763 declare_nonlocal_label (label)
764 tree label;
766 rtx slot = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
768 nonlocal_labels = tree_cons (NULL_TREE, label, nonlocal_labels);
769 LABEL_PRESERVE_P (label_rtx (label)) = 1;
770 if (nonlocal_goto_handler_slots == 0)
772 emit_stack_save (SAVE_NONLOCAL,
773 &nonlocal_goto_stack_level,
774 PREV_INSN (tail_recursion_reentry));
776 nonlocal_goto_handler_slots
777 = gen_rtx_EXPR_LIST (VOIDmode, slot, nonlocal_goto_handler_slots);
780 /* Generate RTL code for a `goto' statement with target label LABEL.
781 LABEL should be a LABEL_DECL tree node that was or will later be
782 defined with `expand_label'. */
784 void
785 expand_goto (label)
786 tree label;
788 tree context;
790 /* Check for a nonlocal goto to a containing function. */
791 context = decl_function_context (label);
792 if (context != 0 && context != current_function_decl)
794 struct function *p = find_function_data (context);
795 rtx label_ref = gen_rtx_LABEL_REF (Pmode, label_rtx (label));
796 rtx handler_slot, static_chain, save_area, insn;
797 tree link;
799 /* Find the corresponding handler slot for this label. */
800 handler_slot = p->x_nonlocal_goto_handler_slots;
801 for (link = p->x_nonlocal_labels; TREE_VALUE (link) != label;
802 link = TREE_CHAIN (link))
803 handler_slot = XEXP (handler_slot, 1);
804 handler_slot = XEXP (handler_slot, 0);
806 p->has_nonlocal_label = 1;
807 current_function_has_nonlocal_goto = 1;
808 LABEL_REF_NONLOCAL_P (label_ref) = 1;
810 /* Copy the rtl for the slots so that they won't be shared in
811 case the virtual stack vars register gets instantiated differently
812 in the parent than in the child. */
814 static_chain = copy_to_reg (lookup_static_chain (label));
816 /* Get addr of containing function's current nonlocal goto handler,
817 which will do any cleanups and then jump to the label. */
818 handler_slot = copy_to_reg (replace_rtx (copy_rtx (handler_slot),
819 virtual_stack_vars_rtx,
820 static_chain));
822 /* Get addr of containing function's nonlocal save area. */
823 save_area = p->x_nonlocal_goto_stack_level;
824 if (save_area)
825 save_area = replace_rtx (copy_rtx (save_area),
826 virtual_stack_vars_rtx, static_chain);
828 #if HAVE_nonlocal_goto
829 if (HAVE_nonlocal_goto)
830 emit_insn (gen_nonlocal_goto (static_chain, handler_slot,
831 save_area, label_ref));
832 else
833 #endif
835 /* Restore frame pointer for containing function.
836 This sets the actual hard register used for the frame pointer
837 to the location of the function's incoming static chain info.
838 The non-local goto handler will then adjust it to contain the
839 proper value and reload the argument pointer, if needed. */
840 emit_move_insn (hard_frame_pointer_rtx, static_chain);
841 emit_stack_restore (SAVE_NONLOCAL, save_area, NULL_RTX);
843 /* USE of hard_frame_pointer_rtx added for consistency;
844 not clear if really needed. */
845 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
846 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
847 emit_indirect_jump (handler_slot);
850 /* Search backwards to the jump insn and mark it as a
851 non-local goto. */
852 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
854 if (GET_CODE (insn) == JUMP_INSN)
856 REG_NOTES (insn) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO,
857 const0_rtx, REG_NOTES (insn));
858 break;
860 else if (GET_CODE (insn) == CALL_INSN)
861 break;
864 else
865 expand_goto_internal (label, label_rtx (label), NULL_RTX);
868 /* Generate RTL code for a `goto' statement with target label BODY.
869 LABEL should be a LABEL_REF.
870 LAST_INSN, if non-0, is the rtx we should consider as the last
871 insn emitted (for the purposes of cleaning up a return). */
873 static void
874 expand_goto_internal (body, label, last_insn)
875 tree body;
876 rtx label;
877 rtx last_insn;
879 struct nesting *block;
880 rtx stack_level = 0;
882 if (GET_CODE (label) != CODE_LABEL)
883 abort ();
885 /* If label has already been defined, we can tell now
886 whether and how we must alter the stack level. */
888 if (PREV_INSN (label) != 0)
890 /* Find the innermost pending block that contains the label.
891 (Check containment by comparing insn-uids.)
892 Then restore the outermost stack level within that block,
893 and do cleanups of all blocks contained in it. */
894 for (block = block_stack; block; block = block->next)
896 if (INSN_UID (block->data.block.first_insn) < INSN_UID (label))
897 break;
898 if (block->data.block.stack_level != 0)
899 stack_level = block->data.block.stack_level;
900 /* Execute the cleanups for blocks we are exiting. */
901 if (block->data.block.cleanups != 0)
903 expand_cleanups (block->data.block.cleanups, NULL_TREE, 1, 1);
904 do_pending_stack_adjust ();
908 if (stack_level)
910 /* Ensure stack adjust isn't done by emit_jump, as this
911 would clobber the stack pointer. This one should be
912 deleted as dead by flow. */
913 clear_pending_stack_adjust ();
914 do_pending_stack_adjust ();
916 /* Don't do this adjust if it's to the end label and this function
917 is to return with a depressed stack pointer. */
918 if (label == return_label
919 && (((TREE_CODE (TREE_TYPE (current_function_decl))
920 == FUNCTION_TYPE)
921 && (TYPE_RETURNS_STACK_DEPRESSED
922 (TREE_TYPE (current_function_decl))))))
924 else
925 emit_stack_restore (SAVE_BLOCK, stack_level, NULL_RTX);
928 if (body != 0 && DECL_TOO_LATE (body))
929 error ("jump to `%s' invalidly jumps into binding contour",
930 IDENTIFIER_POINTER (DECL_NAME (body)));
932 /* Label not yet defined: may need to put this goto
933 on the fixup list. */
934 else if (! expand_fixup (body, label, last_insn))
936 /* No fixup needed. Record that the label is the target
937 of at least one goto that has no fixup. */
938 if (body != 0)
939 TREE_ADDRESSABLE (body) = 1;
942 emit_jump (label);
945 /* Generate if necessary a fixup for a goto
946 whose target label in tree structure (if any) is TREE_LABEL
947 and whose target in rtl is RTL_LABEL.
949 If LAST_INSN is nonzero, we pretend that the jump appears
950 after insn LAST_INSN instead of at the current point in the insn stream.
952 The fixup will be used later to insert insns just before the goto.
953 Those insns will restore the stack level as appropriate for the
954 target label, and will (in the case of C++) also invoke any object
955 destructors which have to be invoked when we exit the scopes which
956 are exited by the goto.
958 Value is nonzero if a fixup is made. */
960 static int
961 expand_fixup (tree_label, rtl_label, last_insn)
962 tree tree_label;
963 rtx rtl_label;
964 rtx last_insn;
966 struct nesting *block, *end_block;
968 /* See if we can recognize which block the label will be output in.
969 This is possible in some very common cases.
970 If we succeed, set END_BLOCK to that block.
971 Otherwise, set it to 0. */
973 if (cond_stack
974 && (rtl_label == cond_stack->data.cond.endif_label
975 || rtl_label == cond_stack->data.cond.next_label))
976 end_block = cond_stack;
977 /* If we are in a loop, recognize certain labels which
978 are likely targets. This reduces the number of fixups
979 we need to create. */
980 else if (loop_stack
981 && (rtl_label == loop_stack->data.loop.start_label
982 || rtl_label == loop_stack->data.loop.end_label
983 || rtl_label == loop_stack->data.loop.continue_label))
984 end_block = loop_stack;
985 else
986 end_block = 0;
988 /* Now set END_BLOCK to the binding level to which we will return. */
990 if (end_block)
992 struct nesting *next_block = end_block->all;
993 block = block_stack;
995 /* First see if the END_BLOCK is inside the innermost binding level.
996 If so, then no cleanups or stack levels are relevant. */
997 while (next_block && next_block != block)
998 next_block = next_block->all;
1000 if (next_block)
1001 return 0;
1003 /* Otherwise, set END_BLOCK to the innermost binding level
1004 which is outside the relevant control-structure nesting. */
1005 next_block = block_stack->next;
1006 for (block = block_stack; block != end_block; block = block->all)
1007 if (block == next_block)
1008 next_block = next_block->next;
1009 end_block = next_block;
1012 /* Does any containing block have a stack level or cleanups?
1013 If not, no fixup is needed, and that is the normal case
1014 (the only case, for standard C). */
1015 for (block = block_stack; block != end_block; block = block->next)
1016 if (block->data.block.stack_level != 0
1017 || block->data.block.cleanups != 0)
1018 break;
1020 if (block != end_block)
1022 /* Ok, a fixup is needed. Add a fixup to the list of such. */
1023 struct goto_fixup *fixup
1024 = (struct goto_fixup *) ggc_alloc (sizeof (struct goto_fixup));
1025 /* In case an old stack level is restored, make sure that comes
1026 after any pending stack adjust. */
1027 /* ?? If the fixup isn't to come at the present position,
1028 doing the stack adjust here isn't useful. Doing it with our
1029 settings at that location isn't useful either. Let's hope
1030 someone does it! */
1031 if (last_insn == 0)
1032 do_pending_stack_adjust ();
1033 fixup->target = tree_label;
1034 fixup->target_rtl = rtl_label;
1036 /* Create a BLOCK node and a corresponding matched set of
1037 NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes at
1038 this point. The notes will encapsulate any and all fixup
1039 code which we might later insert at this point in the insn
1040 stream. Also, the BLOCK node will be the parent (i.e. the
1041 `SUPERBLOCK') of any other BLOCK nodes which we might create
1042 later on when we are expanding the fixup code.
1044 Note that optimization passes (including expand_end_loop)
1045 might move the *_BLOCK notes away, so we use a NOTE_INSN_DELETED
1046 as a placeholder. */
1049 rtx original_before_jump
1050 = last_insn ? last_insn : get_last_insn ();
1051 rtx start;
1052 rtx end;
1053 tree block;
1055 block = make_node (BLOCK);
1056 TREE_USED (block) = 1;
1058 if (!cfun->x_whole_function_mode_p)
1059 insert_block (block);
1060 else
1062 BLOCK_CHAIN (block)
1063 = BLOCK_CHAIN (DECL_INITIAL (current_function_decl));
1064 BLOCK_CHAIN (DECL_INITIAL (current_function_decl))
1065 = block;
1068 start_sequence ();
1069 start = emit_note (NULL, NOTE_INSN_BLOCK_BEG);
1070 if (cfun->x_whole_function_mode_p)
1071 NOTE_BLOCK (start) = block;
1072 fixup->before_jump = emit_note (NULL, NOTE_INSN_DELETED);
1073 end = emit_note (NULL, NOTE_INSN_BLOCK_END);
1074 if (cfun->x_whole_function_mode_p)
1075 NOTE_BLOCK (end) = block;
1076 fixup->context = block;
1077 end_sequence ();
1078 emit_insns_after (start, original_before_jump);
1081 fixup->block_start_count = current_block_start_count;
1082 fixup->stack_level = 0;
1083 fixup->cleanup_list_list
1084 = ((block->data.block.outer_cleanups
1085 || block->data.block.cleanups)
1086 ? tree_cons (NULL_TREE, block->data.block.cleanups,
1087 block->data.block.outer_cleanups)
1088 : 0);
1089 fixup->next = goto_fixup_chain;
1090 goto_fixup_chain = fixup;
1093 return block != 0;
1096 /* Expand any needed fixups in the outputmost binding level of the
1097 function. FIRST_INSN is the first insn in the function. */
1099 void
1100 expand_fixups (first_insn)
1101 rtx first_insn;
1103 fixup_gotos (NULL, NULL_RTX, NULL_TREE, first_insn, 0);
1106 /* When exiting a binding contour, process all pending gotos requiring fixups.
1107 THISBLOCK is the structure that describes the block being exited.
1108 STACK_LEVEL is the rtx for the stack level to restore exiting this contour.
1109 CLEANUP_LIST is a list of expressions to evaluate on exiting this contour.
1110 FIRST_INSN is the insn that began this contour.
1112 Gotos that jump out of this contour must restore the
1113 stack level and do the cleanups before actually jumping.
1115 DONT_JUMP_IN nonzero means report error there is a jump into this
1116 contour from before the beginning of the contour.
1117 This is also done if STACK_LEVEL is nonzero. */
1119 static void
1120 fixup_gotos (thisblock, stack_level, cleanup_list, first_insn, dont_jump_in)
1121 struct nesting *thisblock;
1122 rtx stack_level;
1123 tree cleanup_list;
1124 rtx first_insn;
1125 int dont_jump_in;
1127 struct goto_fixup *f, *prev;
1129 /* F is the fixup we are considering; PREV is the previous one. */
1130 /* We run this loop in two passes so that cleanups of exited blocks
1131 are run first, and blocks that are exited are marked so
1132 afterwards. */
1134 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1136 /* Test for a fixup that is inactive because it is already handled. */
1137 if (f->before_jump == 0)
1139 /* Delete inactive fixup from the chain, if that is easy to do. */
1140 if (prev != 0)
1141 prev->next = f->next;
1143 /* Has this fixup's target label been defined?
1144 If so, we can finalize it. */
1145 else if (PREV_INSN (f->target_rtl) != 0)
1147 rtx cleanup_insns;
1149 /* If this fixup jumped into this contour from before the beginning
1150 of this contour, report an error. This code used to use
1151 the first non-label insn after f->target_rtl, but that's
1152 wrong since such can be added, by things like put_var_into_stack
1153 and have INSN_UIDs that are out of the range of the block. */
1154 /* ??? Bug: this does not detect jumping in through intermediate
1155 blocks that have stack levels or cleanups.
1156 It detects only a problem with the innermost block
1157 around the label. */
1158 if (f->target != 0
1159 && (dont_jump_in || stack_level || cleanup_list)
1160 && INSN_UID (first_insn) < INSN_UID (f->target_rtl)
1161 && INSN_UID (first_insn) > INSN_UID (f->before_jump)
1162 && ! DECL_ERROR_ISSUED (f->target))
1164 error_with_decl (f->target,
1165 "label `%s' used before containing binding contour");
1166 /* Prevent multiple errors for one label. */
1167 DECL_ERROR_ISSUED (f->target) = 1;
1170 /* We will expand the cleanups into a sequence of their own and
1171 then later on we will attach this new sequence to the insn
1172 stream just ahead of the actual jump insn. */
1174 start_sequence ();
1176 /* Temporarily restore the lexical context where we will
1177 logically be inserting the fixup code. We do this for the
1178 sake of getting the debugging information right. */
1180 pushlevel (0);
1181 set_block (f->context);
1183 /* Expand the cleanups for blocks this jump exits. */
1184 if (f->cleanup_list_list)
1186 tree lists;
1187 for (lists = f->cleanup_list_list; lists; lists = TREE_CHAIN (lists))
1188 /* Marked elements correspond to blocks that have been closed.
1189 Do their cleanups. */
1190 if (TREE_ADDRESSABLE (lists)
1191 && TREE_VALUE (lists) != 0)
1193 expand_cleanups (TREE_VALUE (lists), NULL_TREE, 1, 1);
1194 /* Pop any pushes done in the cleanups,
1195 in case function is about to return. */
1196 do_pending_stack_adjust ();
1200 /* Restore stack level for the biggest contour that this
1201 jump jumps out of. */
1202 if (f->stack_level
1203 && ! (f->target_rtl == return_label
1204 && ((TREE_CODE (TREE_TYPE (current_function_decl))
1205 == FUNCTION_TYPE)
1206 && (TYPE_RETURNS_STACK_DEPRESSED
1207 (TREE_TYPE (current_function_decl))))))
1208 emit_stack_restore (SAVE_BLOCK, f->stack_level, f->before_jump);
1210 /* Finish up the sequence containing the insns which implement the
1211 necessary cleanups, and then attach that whole sequence to the
1212 insn stream just ahead of the actual jump insn. Attaching it
1213 at that point insures that any cleanups which are in fact
1214 implicit C++ object destructions (which must be executed upon
1215 leaving the block) appear (to the debugger) to be taking place
1216 in an area of the generated code where the object(s) being
1217 destructed are still "in scope". */
1219 cleanup_insns = get_insns ();
1220 poplevel (1, 0, 0);
1222 end_sequence ();
1223 emit_insns_after (cleanup_insns, f->before_jump);
1225 f->before_jump = 0;
1229 /* For any still-undefined labels, do the cleanups for this block now.
1230 We must do this now since items in the cleanup list may go out
1231 of scope when the block ends. */
1232 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1233 if (f->before_jump != 0
1234 && PREV_INSN (f->target_rtl) == 0
1235 /* Label has still not appeared. If we are exiting a block with
1236 a stack level to restore, that started before the fixup,
1237 mark this stack level as needing restoration
1238 when the fixup is later finalized. */
1239 && thisblock != 0
1240 /* Note: if THISBLOCK == 0 and we have a label that hasn't appeared, it
1241 means the label is undefined. That's erroneous, but possible. */
1242 && (thisblock->data.block.block_start_count
1243 <= f->block_start_count))
1245 tree lists = f->cleanup_list_list;
1246 rtx cleanup_insns;
1248 for (; lists; lists = TREE_CHAIN (lists))
1249 /* If the following elt. corresponds to our containing block
1250 then the elt. must be for this block. */
1251 if (TREE_CHAIN (lists) == thisblock->data.block.outer_cleanups)
1253 start_sequence ();
1254 pushlevel (0);
1255 set_block (f->context);
1256 expand_cleanups (TREE_VALUE (lists), NULL_TREE, 1, 1);
1257 do_pending_stack_adjust ();
1258 cleanup_insns = get_insns ();
1259 poplevel (1, 0, 0);
1260 end_sequence ();
1261 if (cleanup_insns != 0)
1262 f->before_jump
1263 = emit_insns_after (cleanup_insns, f->before_jump);
1265 f->cleanup_list_list = TREE_CHAIN (lists);
1268 if (stack_level)
1269 f->stack_level = stack_level;
1273 /* Return the number of times character C occurs in string S. */
1274 static int
1275 n_occurrences (c, s)
1276 int c;
1277 const char *s;
1279 int n = 0;
1280 while (*s)
1281 n += (*s++ == c);
1282 return n;
1285 /* Generate RTL for an asm statement (explicit assembler code).
1286 BODY is a STRING_CST node containing the assembler code text,
1287 or an ADDR_EXPR containing a STRING_CST. */
1289 void
1290 expand_asm (body)
1291 tree body;
1293 if (current_function_check_memory_usage)
1295 error ("`asm' cannot be used in function where memory usage is checked");
1296 return;
1299 if (TREE_CODE (body) == ADDR_EXPR)
1300 body = TREE_OPERAND (body, 0);
1302 emit_insn (gen_rtx_ASM_INPUT (VOIDmode,
1303 TREE_STRING_POINTER (body)));
1304 last_expr_type = 0;
1307 /* Parse the output constraint pointed to by *CONSTRAINT_P. It is the
1308 OPERAND_NUMth output operand, indexed from zero. There are NINPUTS
1309 inputs and NOUTPUTS outputs to this extended-asm. Upon return,
1310 *ALLOWS_MEM will be TRUE iff the constraint allows the use of a
1311 memory operand. Similarly, *ALLOWS_REG will be TRUE iff the
1312 constraint allows the use of a register operand. And, *IS_INOUT
1313 will be true if the operand is read-write, i.e., if it is used as
1314 an input as well as an output. If *CONSTRAINT_P is not in
1315 canonical form, it will be made canonical. (Note that `+' will be
1316 rpelaced with `=' as part of this process.)
1318 Returns TRUE if all went well; FALSE if an error occurred. */
1320 bool
1321 parse_output_constraint (constraint_p,
1322 operand_num,
1323 ninputs,
1324 noutputs,
1325 allows_mem,
1326 allows_reg,
1327 is_inout)
1328 const char **constraint_p;
1329 int operand_num;
1330 int ninputs;
1331 int noutputs;
1332 bool *allows_mem;
1333 bool *allows_reg;
1334 bool *is_inout;
1336 const char *constraint = *constraint_p;
1337 const char *p;
1339 /* Assume the constraint doesn't allow the use of either a register
1340 or memory. */
1341 *allows_mem = false;
1342 *allows_reg = false;
1344 /* Allow the `=' or `+' to not be at the beginning of the string,
1345 since it wasn't explicitly documented that way, and there is a
1346 large body of code that puts it last. Swap the character to
1347 the front, so as not to uglify any place else. */
1348 p = strchr (constraint, '=');
1349 if (!p)
1350 p = strchr (constraint, '+');
1352 /* If the string doesn't contain an `=', issue an error
1353 message. */
1354 if (!p)
1356 error ("output operand constraint lacks `='");
1357 return false;
1360 /* If the constraint begins with `+', then the operand is both read
1361 from and written to. */
1362 *is_inout = (*p == '+');
1364 /* Canonicalize the output constraint so that it begins with `='. */
1365 if (p != constraint || is_inout)
1367 char *buf;
1368 size_t c_len = strlen (constraint);
1370 if (p != constraint)
1371 warning ("output constraint `%c' for operand %d is not at the beginning",
1372 *p, operand_num);
1374 /* Make a copy of the constraint. */
1375 buf = alloca (c_len + 1);
1376 strcpy (buf, constraint);
1377 /* Swap the first character and the `=' or `+'. */
1378 buf[p - constraint] = buf[0];
1379 /* Make sure the first character is an `='. (Until we do this,
1380 it might be a `+'.) */
1381 buf[0] = '=';
1382 /* Replace the constraint with the canonicalized string. */
1383 *constraint_p = ggc_alloc_string (buf, c_len);
1384 constraint = *constraint_p;
1387 /* Loop through the constraint string. */
1388 for (p = constraint + 1; *p; ++p)
1389 switch (*p)
1391 case '+':
1392 case '=':
1393 error ("operand constraint contains '+' or '=' at illegal position.");
1394 return false;
1396 case '%':
1397 if (operand_num + 1 == ninputs + noutputs)
1399 error ("`%%' constraint used with last operand");
1400 return false;
1402 break;
1404 case 'V': case 'm': case 'o':
1405 *allows_mem = true;
1406 break;
1408 case '?': case '!': case '*': case '&': case '#':
1409 case 'E': case 'F': case 'G': case 'H':
1410 case 's': case 'i': case 'n':
1411 case 'I': case 'J': case 'K': case 'L': case 'M':
1412 case 'N': case 'O': case 'P': case ',':
1413 break;
1415 case '0': case '1': case '2': case '3': case '4':
1416 case '5': case '6': case '7': case '8': case '9':
1417 case '[':
1418 error ("matching constraint not valid in output operand");
1419 return false;
1421 case '<': case '>':
1422 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
1423 excepting those that expand_call created. So match memory
1424 and hope. */
1425 *allows_mem = true;
1426 break;
1428 case 'g': case 'X':
1429 *allows_reg = true;
1430 *allows_mem = true;
1431 break;
1433 case 'p': case 'r':
1434 *allows_reg = true;
1435 break;
1437 default:
1438 if (!ISALPHA (*p))
1439 break;
1440 if (REG_CLASS_FROM_LETTER (*p) != NO_REGS)
1441 *allows_reg = true;
1442 #ifdef EXTRA_CONSTRAINT
1443 else
1445 /* Otherwise we can't assume anything about the nature of
1446 the constraint except that it isn't purely registers.
1447 Treat it like "g" and hope for the best. */
1448 *allows_reg = true;
1449 *allows_mem = true;
1451 #endif
1452 break;
1455 return true;
1458 /* Generate RTL for an asm statement with arguments.
1459 STRING is the instruction template.
1460 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
1461 Each output or input has an expression in the TREE_VALUE and
1462 and a tree list in TREE_PURPOSE which in turn contains a constraint
1463 name in TREE_VALUE (or NULL_TREE) and a constraint string
1464 in TREE_PURPOSE.
1465 CLOBBERS is a list of STRING_CST nodes each naming a hard register
1466 that is clobbered by this insn.
1468 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
1469 Some elements of OUTPUTS may be replaced with trees representing temporary
1470 values. The caller should copy those temporary values to the originally
1471 specified lvalues.
1473 VOL nonzero means the insn is volatile; don't optimize it. */
1475 void
1476 expand_asm_operands (string, outputs, inputs, clobbers, vol, filename, line)
1477 tree string, outputs, inputs, clobbers;
1478 int vol;
1479 const char *filename;
1480 int line;
1482 rtvec argvec, constraintvec;
1483 rtx body;
1484 int ninputs = list_length (inputs);
1485 int noutputs = list_length (outputs);
1486 int ninout = 0;
1487 int nclobbers;
1488 tree tail;
1489 int i;
1490 /* Vector of RTX's of evaluated output operands. */
1491 rtx *output_rtx = (rtx *) alloca (noutputs * sizeof (rtx));
1492 int *inout_opnum = (int *) alloca (noutputs * sizeof (int));
1493 rtx *real_output_rtx = (rtx *) alloca (noutputs * sizeof (rtx));
1494 enum machine_mode *inout_mode
1495 = (enum machine_mode *) alloca (noutputs * sizeof (enum machine_mode));
1496 const char **constraints
1497 = (const char **) alloca ((noutputs + ninputs) * sizeof (const char *));
1498 /* The insn we have emitted. */
1499 rtx insn;
1500 int old_generating_concat_p = generating_concat_p;
1502 /* An ASM with no outputs needs to be treated as volatile, for now. */
1503 if (noutputs == 0)
1504 vol = 1;
1506 if (current_function_check_memory_usage)
1508 error ("`asm' cannot be used in function where memory usage is checked");
1509 return;
1512 if (! check_operand_nalternatives (outputs, inputs))
1513 return;
1515 if (! check_unique_operand_names (outputs, inputs))
1516 return;
1518 string = resolve_operand_names (string, outputs, inputs, constraints);
1520 #ifdef MD_ASM_CLOBBERS
1521 /* Sometimes we wish to automatically clobber registers across an asm.
1522 Case in point is when the i386 backend moved from cc0 to a hard reg --
1523 maintaining source-level compatibility means automatically clobbering
1524 the flags register. */
1525 MD_ASM_CLOBBERS (clobbers);
1526 #endif
1528 /* Count the number of meaningful clobbered registers, ignoring what
1529 we would ignore later. */
1530 nclobbers = 0;
1531 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1533 const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1535 i = decode_reg_name (regname);
1536 if (i >= 0 || i == -4)
1537 ++nclobbers;
1538 else if (i == -2)
1539 error ("unknown register name `%s' in `asm'", regname);
1542 last_expr_type = 0;
1544 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1546 tree val = TREE_VALUE (tail);
1547 tree type = TREE_TYPE (val);
1548 bool is_inout;
1549 bool allows_reg;
1550 bool allows_mem;
1552 /* If there's an erroneous arg, emit no insn. */
1553 if (type == error_mark_node)
1554 return;
1556 /* Make sure constraint has `=' and does not have `+'. Also, see
1557 if it allows any register. Be liberal on the latter test, since
1558 the worst that happens if we get it wrong is we issue an error
1559 message. */
1561 /* Try to parse the output constraint. If that fails, there's
1562 no point in going further. */
1563 if (!parse_output_constraint (&constraints[i],
1565 ninputs,
1566 noutputs,
1567 &allows_mem,
1568 &allows_reg,
1569 &is_inout))
1570 return;
1572 /* If an output operand is not a decl or indirect ref and our constraint
1573 allows a register, make a temporary to act as an intermediate.
1574 Make the asm insn write into that, then our caller will copy it to
1575 the real output operand. Likewise for promoted variables. */
1577 generating_concat_p = 0;
1579 real_output_rtx[i] = NULL_RTX;
1580 if ((TREE_CODE (val) == INDIRECT_REF
1581 && allows_mem)
1582 || (DECL_P (val)
1583 && (allows_mem || GET_CODE (DECL_RTL (val)) == REG)
1584 && ! (GET_CODE (DECL_RTL (val)) == REG
1585 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
1586 || ! allows_reg
1587 || is_inout)
1589 if (! allows_reg)
1590 mark_addressable (TREE_VALUE (tail));
1592 output_rtx[i]
1593 = expand_expr (TREE_VALUE (tail), NULL_RTX, VOIDmode,
1594 EXPAND_MEMORY_USE_WO);
1596 if (! allows_reg && GET_CODE (output_rtx[i]) != MEM)
1597 error ("output number %d not directly addressable", i);
1598 if ((! allows_mem && GET_CODE (output_rtx[i]) == MEM)
1599 || GET_CODE (output_rtx[i]) == CONCAT)
1601 real_output_rtx[i] = protect_from_queue (output_rtx[i], 1);
1602 output_rtx[i] = gen_reg_rtx (GET_MODE (output_rtx[i]));
1603 if (is_inout)
1604 emit_move_insn (output_rtx[i], real_output_rtx[i]);
1607 else
1609 output_rtx[i] = assign_temp (type, 0, 0, 1);
1610 TREE_VALUE (tail) = make_tree (type, output_rtx[i]);
1613 generating_concat_p = old_generating_concat_p;
1615 if (is_inout)
1617 inout_mode[ninout] = TYPE_MODE (TREE_TYPE (TREE_VALUE (tail)));
1618 inout_opnum[ninout++] = i;
1622 ninputs += ninout;
1623 if (ninputs + noutputs > MAX_RECOG_OPERANDS)
1625 error ("more than %d operands in `asm'", MAX_RECOG_OPERANDS);
1626 return;
1629 /* Make vectors for the expression-rtx, constraint strings,
1630 and named operands. */
1632 argvec = rtvec_alloc (ninputs);
1633 constraintvec = rtvec_alloc (ninputs);
1635 body = gen_rtx_ASM_OPERANDS ((noutputs == 0 ? VOIDmode
1636 : GET_MODE (output_rtx[0])),
1637 TREE_STRING_POINTER (string),
1638 empty_string, 0, argvec, constraintvec,
1639 filename, line);
1641 MEM_VOLATILE_P (body) = vol;
1643 /* Eval the inputs and put them into ARGVEC.
1644 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
1646 for (i = 0, tail = inputs; tail; tail = TREE_CHAIN (tail), ++i)
1648 int j;
1649 int allows_reg = 0, allows_mem = 0;
1650 const char *constraint, *orig_constraint;
1651 int c_len;
1652 rtx op;
1654 /* If there's an erroneous arg, emit no insn,
1655 because the ASM_INPUT would get VOIDmode
1656 and that could cause a crash in reload. */
1657 if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
1658 return;
1660 /* ??? Can this happen, and does the error message make any sense? */
1661 if (TREE_PURPOSE (tail) == NULL_TREE)
1663 error ("hard register `%s' listed as input operand to `asm'",
1664 TREE_STRING_POINTER (TREE_VALUE (tail)) );
1665 return;
1668 orig_constraint = constraint = constraints[i + noutputs];
1669 c_len = strlen (constraint);
1671 /* Make sure constraint has neither `=', `+', nor '&'. */
1673 for (j = 0; j < c_len; j++)
1674 switch (constraint[j])
1676 case '+': case '=': case '&':
1677 if (constraint == orig_constraint)
1679 error ("input operand constraint contains `%c'",
1680 constraint[j]);
1681 return;
1683 break;
1685 case '%':
1686 if (constraint == orig_constraint
1687 && i + 1 == ninputs - ninout)
1689 error ("`%%' constraint used with last operand");
1690 return;
1692 break;
1694 case 'V': case 'm': case 'o':
1695 allows_mem = 1;
1696 break;
1698 case '<': case '>':
1699 case '?': case '!': case '*': case '#':
1700 case 'E': case 'F': case 'G': case 'H':
1701 case 's': case 'i': case 'n':
1702 case 'I': case 'J': case 'K': case 'L': case 'M':
1703 case 'N': case 'O': case 'P': case ',':
1704 break;
1706 /* Whether or not a numeric constraint allows a register is
1707 decided by the matching constraint, and so there is no need
1708 to do anything special with them. We must handle them in
1709 the default case, so that we don't unnecessarily force
1710 operands to memory. */
1711 case '0': case '1': case '2': case '3': case '4':
1712 case '5': case '6': case '7': case '8': case '9':
1714 char *end;
1715 unsigned long match;
1717 match = strtoul (constraint + j, &end, 10);
1718 if (match >= (unsigned long) noutputs)
1720 error ("matching constraint references invalid operand number");
1721 return;
1724 /* Try and find the real constraint for this dup. Only do
1725 this if the matching constraint is the only alternative. */
1726 if (*end == '\0'
1727 && (j == 0 || (j == 1 && constraint[0] == '%')))
1729 constraint = constraints[match];
1730 c_len = strlen (constraint);
1731 j = 0;
1732 break;
1734 else
1735 j = end - constraint;
1737 /* Fall through. */
1739 case 'p': case 'r':
1740 allows_reg = 1;
1741 break;
1743 case 'g': case 'X':
1744 allows_reg = 1;
1745 allows_mem = 1;
1746 break;
1748 default:
1749 if (! ISALPHA (constraint[j]))
1751 error ("invalid punctuation `%c' in constraint",
1752 constraint[j]);
1753 return;
1755 if (REG_CLASS_FROM_LETTER (constraint[j]) != NO_REGS)
1756 allows_reg = 1;
1757 #ifdef EXTRA_CONSTRAINT
1758 else
1760 /* Otherwise we can't assume anything about the nature of
1761 the constraint except that it isn't purely registers.
1762 Treat it like "g" and hope for the best. */
1763 allows_reg = 1;
1764 allows_mem = 1;
1766 #endif
1767 break;
1770 if (! allows_reg && allows_mem)
1771 mark_addressable (TREE_VALUE (tail));
1773 op = expand_expr (TREE_VALUE (tail), NULL_RTX, VOIDmode, 0);
1775 /* Never pass a CONCAT to an ASM. */
1776 generating_concat_p = 0;
1777 if (GET_CODE (op) == CONCAT)
1778 op = force_reg (GET_MODE (op), op);
1780 if (asm_operand_ok (op, constraint) <= 0)
1782 if (allows_reg)
1783 op = force_reg (TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))), op);
1784 else if (!allows_mem)
1785 warning ("asm operand %d probably doesn't match constraints",
1786 i + noutputs);
1787 else if (CONSTANT_P (op))
1788 op = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))),
1789 op);
1790 else if (GET_CODE (op) == REG
1791 || GET_CODE (op) == SUBREG
1792 || GET_CODE (op) == ADDRESSOF
1793 || GET_CODE (op) == CONCAT)
1795 tree type = TREE_TYPE (TREE_VALUE (tail));
1796 tree qual_type = build_qualified_type (type,
1797 (TYPE_QUALS (type)
1798 | TYPE_QUAL_CONST));
1799 rtx memloc = assign_temp (qual_type, 1, 1, 1);
1801 emit_move_insn (memloc, op);
1802 op = memloc;
1805 else if (GET_CODE (op) == MEM && MEM_VOLATILE_P (op))
1806 /* We won't recognize volatile memory as available a
1807 memory_operand at this point. Ignore it. */
1809 else if (queued_subexp_p (op))
1811 else
1812 /* ??? Leave this only until we have experience with what
1813 happens in combine and elsewhere when constraints are
1814 not satisfied. */
1815 warning ("asm operand %d probably doesn't match constraints",
1816 i + noutputs);
1818 generating_concat_p = old_generating_concat_p;
1819 ASM_OPERANDS_INPUT (body, i) = op;
1821 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, i)
1822 = gen_rtx_ASM_INPUT (TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))),
1823 orig_constraint);
1826 /* Protect all the operands from the queue now that they have all been
1827 evaluated. */
1829 generating_concat_p = 0;
1831 for (i = 0; i < ninputs - ninout; i++)
1832 ASM_OPERANDS_INPUT (body, i)
1833 = protect_from_queue (ASM_OPERANDS_INPUT (body, i), 0);
1835 for (i = 0; i < noutputs; i++)
1836 output_rtx[i] = protect_from_queue (output_rtx[i], 1);
1838 /* For in-out operands, copy output rtx to input rtx. */
1839 for (i = 0; i < ninout; i++)
1841 int j = inout_opnum[i];
1842 char buffer[16];
1844 ASM_OPERANDS_INPUT (body, ninputs - ninout + i)
1845 = output_rtx[j];
1847 sprintf (buffer, "%d", j);
1848 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, ninputs - ninout + i)
1849 = gen_rtx_ASM_INPUT (inout_mode[i], ggc_alloc_string (buffer, -1));
1852 generating_concat_p = old_generating_concat_p;
1854 /* Now, for each output, construct an rtx
1855 (set OUTPUT (asm_operands INSN OUTPUTCONSTRAINT OUTPUTNUMBER
1856 ARGVEC CONSTRAINTS OPNAMES))
1857 If there is more than one, put them inside a PARALLEL. */
1859 if (noutputs == 1 && nclobbers == 0)
1861 ASM_OPERANDS_OUTPUT_CONSTRAINT (body) = constraints[0];
1862 insn = emit_insn (gen_rtx_SET (VOIDmode, output_rtx[0], body));
1865 else if (noutputs == 0 && nclobbers == 0)
1867 /* No output operands: put in a raw ASM_OPERANDS rtx. */
1868 insn = emit_insn (body);
1871 else
1873 rtx obody = body;
1874 int num = noutputs;
1876 if (num == 0)
1877 num = 1;
1879 body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
1881 /* For each output operand, store a SET. */
1882 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1884 XVECEXP (body, 0, i)
1885 = gen_rtx_SET (VOIDmode,
1886 output_rtx[i],
1887 gen_rtx_ASM_OPERANDS
1888 (GET_MODE (output_rtx[i]),
1889 TREE_STRING_POINTER (string),
1890 constraints[i], i, argvec, constraintvec,
1891 filename, line));
1893 MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
1896 /* If there are no outputs (but there are some clobbers)
1897 store the bare ASM_OPERANDS into the PARALLEL. */
1899 if (i == 0)
1900 XVECEXP (body, 0, i++) = obody;
1902 /* Store (clobber REG) for each clobbered register specified. */
1904 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1906 const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1907 int j = decode_reg_name (regname);
1909 if (j < 0)
1911 if (j == -3) /* `cc', which is not a register */
1912 continue;
1914 if (j == -4) /* `memory', don't cache memory across asm */
1916 XVECEXP (body, 0, i++)
1917 = gen_rtx_CLOBBER (VOIDmode,
1918 gen_rtx_MEM
1919 (BLKmode,
1920 gen_rtx_SCRATCH (VOIDmode)));
1921 continue;
1924 /* Ignore unknown register, error already signaled. */
1925 continue;
1928 /* Use QImode since that's guaranteed to clobber just one reg. */
1929 XVECEXP (body, 0, i++)
1930 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (QImode, j));
1933 insn = emit_insn (body);
1936 /* For any outputs that needed reloading into registers, spill them
1937 back to where they belong. */
1938 for (i = 0; i < noutputs; ++i)
1939 if (real_output_rtx[i])
1940 emit_move_insn (real_output_rtx[i], output_rtx[i]);
1942 free_temp_slots ();
1945 /* A subroutine of expand_asm_operands. Check that all operands have
1946 the same number of alternatives. Return true if so. */
1948 static bool
1949 check_operand_nalternatives (outputs, inputs)
1950 tree outputs, inputs;
1952 if (outputs || inputs)
1954 tree tmp = TREE_PURPOSE (outputs ? outputs : inputs);
1955 int nalternatives
1956 = n_occurrences (',', TREE_STRING_POINTER (TREE_VALUE (tmp)));
1957 tree next = inputs;
1959 if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
1961 error ("too many alternatives in `asm'");
1962 return false;
1965 tmp = outputs;
1966 while (tmp)
1968 const char *constraint
1969 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (tmp)));
1971 if (n_occurrences (',', constraint) != nalternatives)
1973 error ("operand constraints for `asm' differ in number of alternatives");
1974 return false;
1977 if (TREE_CHAIN (tmp))
1978 tmp = TREE_CHAIN (tmp);
1979 else
1980 tmp = next, next = 0;
1984 return true;
1987 /* A subroutine of expand_asm_operands. Check that all operand names
1988 are unique. Return true if so. We rely on the fact that these names
1989 are identifiers, and so have been canonicalized by get_identifier,
1990 so all we need are pointer comparisons. */
1992 static bool
1993 check_unique_operand_names (outputs, inputs)
1994 tree outputs, inputs;
1996 tree i, j;
1998 for (i = outputs; i ; i = TREE_CHAIN (i))
2000 tree i_name = TREE_PURPOSE (TREE_PURPOSE (i));
2001 if (! i_name)
2002 continue;
2004 for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
2005 if (i_name == TREE_PURPOSE (TREE_PURPOSE (j)))
2006 goto failure;
2009 for (i = inputs; i ; i = TREE_CHAIN (i))
2011 tree i_name = TREE_PURPOSE (TREE_PURPOSE (i));
2012 if (! i_name)
2013 continue;
2015 for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
2016 if (i_name == TREE_PURPOSE (TREE_PURPOSE (j)))
2017 goto failure;
2018 for (j = outputs; j ; j = TREE_CHAIN (j))
2019 if (i_name == TREE_PURPOSE (TREE_PURPOSE (j)))
2020 goto failure;
2023 return true;
2025 failure:
2026 error ("duplicate asm operand name '%s'",
2027 IDENTIFIER_POINTER (TREE_PURPOSE (TREE_PURPOSE (i))));
2028 return false;
2031 /* A subroutine of expand_asm_operands. Resolve the names of the operands
2032 in *POUTPUTS and *PINPUTS to numbers, and replace the name expansions in
2033 STRING and in the constraints to those numbers. */
2035 static tree
2036 resolve_operand_names (string, outputs, inputs, pconstraints)
2037 tree string;
2038 tree outputs, inputs;
2039 const char **pconstraints;
2041 char *buffer = xstrdup (TREE_STRING_POINTER (string));
2042 char *p;
2043 tree t;
2045 /* Assume that we will not need extra space to perform the substitution.
2046 This because we get to remove '[' and ']', which means we cannot have
2047 a problem until we have more than 999 operands. */
2049 p = buffer;
2050 while ((p = strchr (p, '%')) != NULL)
2052 if (*++p != '[')
2053 continue;
2054 p = resolve_operand_name_1 (p, outputs, inputs);
2057 string = build_string (strlen (buffer), buffer);
2058 free (buffer);
2060 /* Collect output constraints here because it's convenient.
2061 There should be no named operands here; this is verified
2062 in expand_asm_operand. */
2063 for (t = outputs; t ; t = TREE_CHAIN (t), pconstraints++)
2064 *pconstraints = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
2066 /* Substitute [<name>] in input constraint strings. */
2067 for (t = inputs; t ; t = TREE_CHAIN (t), pconstraints++)
2069 const char *c = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
2070 if (strchr (c, '[') == NULL)
2071 *pconstraints = c;
2072 else
2074 p = buffer = xstrdup (c);
2075 while ((p = strchr (p, '[')) != NULL)
2076 p = resolve_operand_name_1 (p, outputs, inputs);
2078 *pconstraints = ggc_alloc_string (buffer, -1);
2079 free (buffer);
2083 return string;
2086 /* A subroutine of resolve_operand_names. P points to the '[' for a
2087 potential named operand of the form [<name>]. In place, replace
2088 the name and brackets with a number. Return a pointer to the
2089 balance of the string after substitution. */
2091 static char *
2092 resolve_operand_name_1 (p, outputs, inputs)
2093 char *p;
2094 tree outputs, inputs;
2096 char *q;
2097 int op;
2098 tree t;
2099 size_t len;
2101 /* Collect the operand name. */
2102 q = strchr (p, ']');
2103 if (!q)
2105 error ("missing close brace for named operand");
2106 return strchr (p, '\0');
2108 len = q - p - 1;
2110 /* Resolve the name to a number. */
2111 for (op = 0, t = outputs; t ; t = TREE_CHAIN (t), op++)
2113 const char *c = IDENTIFIER_POINTER (TREE_PURPOSE (TREE_PURPOSE (t)));
2114 if (strncmp (c, p + 1, len) == 0 && c[len] == '\0')
2115 goto found;
2117 for (t = inputs; t ; t = TREE_CHAIN (t), op++)
2119 const char *c = IDENTIFIER_POINTER (TREE_PURPOSE (TREE_PURPOSE (t)));
2120 if (strncmp (c, p + 1, len) == 0 && c[len] == '\0')
2121 goto found;
2124 *q = '\0';
2125 error ("undefined named operand '%s'", p + 1);
2126 op = 0;
2127 found:
2129 /* Replace the name with the number. Unfortunately, not all libraries
2130 get the return value of sprintf correct, so search for the end of the
2131 generated string by hand. */
2132 sprintf (p, "%d", op);
2133 p = strchr (p, '\0');
2135 /* Verify the no extra buffer space assumption. */
2136 if (p > q)
2137 abort ();
2139 /* Shift the rest of the buffer down to fill the gap. */
2140 memmove (p, q + 1, strlen (q + 1) + 1);
2142 return p;
2145 /* Generate RTL to evaluate the expression EXP
2146 and remember it in case this is the VALUE in a ({... VALUE; }) constr. */
2148 void
2149 expand_expr_stmt (exp)
2150 tree exp;
2152 /* If -W, warn about statements with no side effects,
2153 except for an explicit cast to void (e.g. for assert()), and
2154 except inside a ({...}) where they may be useful. */
2155 if (expr_stmts_for_value == 0 && exp != error_mark_node)
2157 if (! TREE_SIDE_EFFECTS (exp))
2159 if ((extra_warnings || warn_unused_value)
2160 && !(TREE_CODE (exp) == CONVERT_EXPR
2161 && VOID_TYPE_P (TREE_TYPE (exp))))
2162 warning_with_file_and_line (emit_filename, emit_lineno,
2163 "statement with no effect");
2165 else if (warn_unused_value)
2166 warn_if_unused_value (exp);
2169 /* If EXP is of function type and we are expanding statements for
2170 value, convert it to pointer-to-function. */
2171 if (expr_stmts_for_value && TREE_CODE (TREE_TYPE (exp)) == FUNCTION_TYPE)
2172 exp = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (exp)), exp);
2174 /* The call to `expand_expr' could cause last_expr_type and
2175 last_expr_value to get reset. Therefore, we set last_expr_value
2176 and last_expr_type *after* calling expand_expr. */
2177 last_expr_value = expand_expr (exp,
2178 (expr_stmts_for_value
2179 ? NULL_RTX : const0_rtx),
2180 VOIDmode, 0);
2181 last_expr_type = TREE_TYPE (exp);
2183 /* If all we do is reference a volatile value in memory,
2184 copy it to a register to be sure it is actually touched. */
2185 if (last_expr_value != 0 && GET_CODE (last_expr_value) == MEM
2186 && TREE_THIS_VOLATILE (exp))
2188 if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode)
2190 else if (TYPE_MODE (TREE_TYPE (exp)) != BLKmode)
2191 copy_to_reg (last_expr_value);
2192 else
2194 rtx lab = gen_label_rtx ();
2196 /* Compare the value with itself to reference it. */
2197 emit_cmp_and_jump_insns (last_expr_value, last_expr_value, EQ,
2198 expand_expr (TYPE_SIZE (last_expr_type),
2199 NULL_RTX, VOIDmode, 0),
2200 BLKmode, 0, lab);
2201 emit_label (lab);
2205 /* If this expression is part of a ({...}) and is in memory, we may have
2206 to preserve temporaries. */
2207 preserve_temp_slots (last_expr_value);
2209 /* Free any temporaries used to evaluate this expression. Any temporary
2210 used as a result of this expression will already have been preserved
2211 above. */
2212 free_temp_slots ();
2214 emit_queue ();
2217 /* Warn if EXP contains any computations whose results are not used.
2218 Return 1 if a warning is printed; 0 otherwise. */
2221 warn_if_unused_value (exp)
2222 tree exp;
2224 if (TREE_USED (exp))
2225 return 0;
2227 /* Don't warn about void constructs. This includes casting to void,
2228 void function calls, and statement expressions with a final cast
2229 to void. */
2230 if (VOID_TYPE_P (TREE_TYPE (exp)))
2231 return 0;
2233 /* If this is an expression with side effects, don't warn. */
2234 if (TREE_SIDE_EFFECTS (exp))
2235 return 0;
2237 switch (TREE_CODE (exp))
2239 case PREINCREMENT_EXPR:
2240 case POSTINCREMENT_EXPR:
2241 case PREDECREMENT_EXPR:
2242 case POSTDECREMENT_EXPR:
2243 case MODIFY_EXPR:
2244 case INIT_EXPR:
2245 case TARGET_EXPR:
2246 case CALL_EXPR:
2247 case METHOD_CALL_EXPR:
2248 case RTL_EXPR:
2249 case TRY_CATCH_EXPR:
2250 case WITH_CLEANUP_EXPR:
2251 case EXIT_EXPR:
2252 return 0;
2254 case BIND_EXPR:
2255 /* For a binding, warn if no side effect within it. */
2256 return warn_if_unused_value (TREE_OPERAND (exp, 1));
2258 case SAVE_EXPR:
2259 return warn_if_unused_value (TREE_OPERAND (exp, 1));
2261 case TRUTH_ORIF_EXPR:
2262 case TRUTH_ANDIF_EXPR:
2263 /* In && or ||, warn if 2nd operand has no side effect. */
2264 return warn_if_unused_value (TREE_OPERAND (exp, 1));
2266 case COMPOUND_EXPR:
2267 if (TREE_NO_UNUSED_WARNING (exp))
2268 return 0;
2269 if (warn_if_unused_value (TREE_OPERAND (exp, 0)))
2270 return 1;
2271 /* Let people do `(foo (), 0)' without a warning. */
2272 if (TREE_CONSTANT (TREE_OPERAND (exp, 1)))
2273 return 0;
2274 return warn_if_unused_value (TREE_OPERAND (exp, 1));
2276 case NOP_EXPR:
2277 case CONVERT_EXPR:
2278 case NON_LVALUE_EXPR:
2279 /* Don't warn about conversions not explicit in the user's program. */
2280 if (TREE_NO_UNUSED_WARNING (exp))
2281 return 0;
2282 /* Assignment to a cast usually results in a cast of a modify.
2283 Don't complain about that. There can be an arbitrary number of
2284 casts before the modify, so we must loop until we find the first
2285 non-cast expression and then test to see if that is a modify. */
2287 tree tem = TREE_OPERAND (exp, 0);
2289 while (TREE_CODE (tem) == CONVERT_EXPR || TREE_CODE (tem) == NOP_EXPR)
2290 tem = TREE_OPERAND (tem, 0);
2292 if (TREE_CODE (tem) == MODIFY_EXPR || TREE_CODE (tem) == INIT_EXPR
2293 || TREE_CODE (tem) == CALL_EXPR)
2294 return 0;
2296 goto warn;
2298 case INDIRECT_REF:
2299 /* Don't warn about automatic dereferencing of references, since
2300 the user cannot control it. */
2301 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == REFERENCE_TYPE)
2302 return warn_if_unused_value (TREE_OPERAND (exp, 0));
2303 /* Fall through. */
2305 default:
2306 /* Referencing a volatile value is a side effect, so don't warn. */
2307 if ((DECL_P (exp)
2308 || TREE_CODE_CLASS (TREE_CODE (exp)) == 'r')
2309 && TREE_THIS_VOLATILE (exp))
2310 return 0;
2312 /* If this is an expression which has no operands, there is no value
2313 to be unused. There are no such language-independent codes,
2314 but front ends may define such. */
2315 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'e'
2316 && TREE_CODE_LENGTH (TREE_CODE (exp)) == 0)
2317 return 0;
2319 warn:
2320 warning_with_file_and_line (emit_filename, emit_lineno,
2321 "value computed is not used");
2322 return 1;
2326 /* Clear out the memory of the last expression evaluated. */
2328 void
2329 clear_last_expr ()
2331 last_expr_type = 0;
2334 /* Begin a statement which will return a value.
2335 Return the RTL_EXPR for this statement expr.
2336 The caller must save that value and pass it to expand_end_stmt_expr. */
2338 tree
2339 expand_start_stmt_expr ()
2341 tree t;
2343 /* Make the RTL_EXPR node temporary, not momentary,
2344 so that rtl_expr_chain doesn't become garbage. */
2345 t = make_node (RTL_EXPR);
2346 do_pending_stack_adjust ();
2347 start_sequence_for_rtl_expr (t);
2348 NO_DEFER_POP;
2349 expr_stmts_for_value++;
2350 return t;
2353 /* Restore the previous state at the end of a statement that returns a value.
2354 Returns a tree node representing the statement's value and the
2355 insns to compute the value.
2357 The nodes of that expression have been freed by now, so we cannot use them.
2358 But we don't want to do that anyway; the expression has already been
2359 evaluated and now we just want to use the value. So generate a RTL_EXPR
2360 with the proper type and RTL value.
2362 If the last substatement was not an expression,
2363 return something with type `void'. */
2365 tree
2366 expand_end_stmt_expr (t)
2367 tree t;
2369 OK_DEFER_POP;
2371 if (last_expr_type == 0)
2373 last_expr_type = void_type_node;
2374 last_expr_value = const0_rtx;
2376 else if (last_expr_value == 0)
2377 /* There are some cases where this can happen, such as when the
2378 statement is void type. */
2379 last_expr_value = const0_rtx;
2380 else if (GET_CODE (last_expr_value) != REG && ! CONSTANT_P (last_expr_value))
2381 /* Remove any possible QUEUED. */
2382 last_expr_value = protect_from_queue (last_expr_value, 0);
2384 emit_queue ();
2386 TREE_TYPE (t) = last_expr_type;
2387 RTL_EXPR_RTL (t) = last_expr_value;
2388 RTL_EXPR_SEQUENCE (t) = get_insns ();
2390 rtl_expr_chain = tree_cons (NULL_TREE, t, rtl_expr_chain);
2392 end_sequence ();
2394 /* Don't consider deleting this expr or containing exprs at tree level. */
2395 TREE_SIDE_EFFECTS (t) = 1;
2396 /* Propagate volatility of the actual RTL expr. */
2397 TREE_THIS_VOLATILE (t) = volatile_refs_p (last_expr_value);
2399 last_expr_type = 0;
2400 expr_stmts_for_value--;
2402 return t;
2405 /* Generate RTL for the start of an if-then. COND is the expression
2406 whose truth should be tested.
2408 If EXITFLAG is nonzero, this conditional is visible to
2409 `exit_something'. */
2411 void
2412 expand_start_cond (cond, exitflag)
2413 tree cond;
2414 int exitflag;
2416 struct nesting *thiscond = ALLOC_NESTING ();
2418 /* Make an entry on cond_stack for the cond we are entering. */
2420 thiscond->next = cond_stack;
2421 thiscond->all = nesting_stack;
2422 thiscond->depth = ++nesting_depth;
2423 thiscond->data.cond.next_label = gen_label_rtx ();
2424 /* Before we encounter an `else', we don't need a separate exit label
2425 unless there are supposed to be exit statements
2426 to exit this conditional. */
2427 thiscond->exit_label = exitflag ? gen_label_rtx () : 0;
2428 thiscond->data.cond.endif_label = thiscond->exit_label;
2429 cond_stack = thiscond;
2430 nesting_stack = thiscond;
2432 do_jump (cond, thiscond->data.cond.next_label, NULL_RTX);
2435 /* Generate RTL between then-clause and the elseif-clause
2436 of an if-then-elseif-.... */
2438 void
2439 expand_start_elseif (cond)
2440 tree cond;
2442 if (cond_stack->data.cond.endif_label == 0)
2443 cond_stack->data.cond.endif_label = gen_label_rtx ();
2444 emit_jump (cond_stack->data.cond.endif_label);
2445 emit_label (cond_stack->data.cond.next_label);
2446 cond_stack->data.cond.next_label = gen_label_rtx ();
2447 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
2450 /* Generate RTL between the then-clause and the else-clause
2451 of an if-then-else. */
2453 void
2454 expand_start_else ()
2456 if (cond_stack->data.cond.endif_label == 0)
2457 cond_stack->data.cond.endif_label = gen_label_rtx ();
2459 emit_jump (cond_stack->data.cond.endif_label);
2460 emit_label (cond_stack->data.cond.next_label);
2461 cond_stack->data.cond.next_label = 0; /* No more _else or _elseif calls. */
2464 /* After calling expand_start_else, turn this "else" into an "else if"
2465 by providing another condition. */
2467 void
2468 expand_elseif (cond)
2469 tree cond;
2471 cond_stack->data.cond.next_label = gen_label_rtx ();
2472 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
2475 /* Generate RTL for the end of an if-then.
2476 Pop the record for it off of cond_stack. */
2478 void
2479 expand_end_cond ()
2481 struct nesting *thiscond = cond_stack;
2483 do_pending_stack_adjust ();
2484 if (thiscond->data.cond.next_label)
2485 emit_label (thiscond->data.cond.next_label);
2486 if (thiscond->data.cond.endif_label)
2487 emit_label (thiscond->data.cond.endif_label);
2489 POPSTACK (cond_stack);
2490 last_expr_type = 0;
2493 /* Generate RTL for the start of a loop. EXIT_FLAG is nonzero if this
2494 loop should be exited by `exit_something'. This is a loop for which
2495 `expand_continue' will jump to the top of the loop.
2497 Make an entry on loop_stack to record the labels associated with
2498 this loop. */
2500 struct nesting *
2501 expand_start_loop (exit_flag)
2502 int exit_flag;
2504 struct nesting *thisloop = ALLOC_NESTING ();
2506 /* Make an entry on loop_stack for the loop we are entering. */
2508 thisloop->next = loop_stack;
2509 thisloop->all = nesting_stack;
2510 thisloop->depth = ++nesting_depth;
2511 thisloop->data.loop.start_label = gen_label_rtx ();
2512 thisloop->data.loop.end_label = gen_label_rtx ();
2513 thisloop->data.loop.alt_end_label = 0;
2514 thisloop->data.loop.continue_label = thisloop->data.loop.start_label;
2515 thisloop->exit_label = exit_flag ? thisloop->data.loop.end_label : 0;
2516 loop_stack = thisloop;
2517 nesting_stack = thisloop;
2519 do_pending_stack_adjust ();
2520 emit_queue ();
2521 emit_note (NULL, NOTE_INSN_LOOP_BEG);
2522 emit_label (thisloop->data.loop.start_label);
2524 return thisloop;
2527 /* Like expand_start_loop but for a loop where the continuation point
2528 (for expand_continue_loop) will be specified explicitly. */
2530 struct nesting *
2531 expand_start_loop_continue_elsewhere (exit_flag)
2532 int exit_flag;
2534 struct nesting *thisloop = expand_start_loop (exit_flag);
2535 loop_stack->data.loop.continue_label = gen_label_rtx ();
2536 return thisloop;
2539 /* Begin a null, aka do { } while (0) "loop". But since the contents
2540 of said loop can still contain a break, we must frob the loop nest. */
2542 struct nesting *
2543 expand_start_null_loop ()
2545 struct nesting *thisloop = ALLOC_NESTING ();
2547 /* Make an entry on loop_stack for the loop we are entering. */
2549 thisloop->next = loop_stack;
2550 thisloop->all = nesting_stack;
2551 thisloop->depth = ++nesting_depth;
2552 thisloop->data.loop.start_label = emit_note (NULL, NOTE_INSN_DELETED);
2553 thisloop->data.loop.end_label = gen_label_rtx ();
2554 thisloop->data.loop.alt_end_label = NULL_RTX;
2555 thisloop->data.loop.continue_label = thisloop->data.loop.end_label;
2556 thisloop->exit_label = thisloop->data.loop.end_label;
2557 loop_stack = thisloop;
2558 nesting_stack = thisloop;
2560 return thisloop;
2563 /* Specify the continuation point for a loop started with
2564 expand_start_loop_continue_elsewhere.
2565 Use this at the point in the code to which a continue statement
2566 should jump. */
2568 void
2569 expand_loop_continue_here ()
2571 do_pending_stack_adjust ();
2572 emit_note (NULL, NOTE_INSN_LOOP_CONT);
2573 emit_label (loop_stack->data.loop.continue_label);
2576 /* Finish a loop. Generate a jump back to the top and the loop-exit label.
2577 Pop the block off of loop_stack. */
2579 void
2580 expand_end_loop ()
2582 rtx start_label = loop_stack->data.loop.start_label;
2583 rtx insn = get_last_insn ();
2584 int needs_end_jump = 1;
2586 /* Mark the continue-point at the top of the loop if none elsewhere. */
2587 if (start_label == loop_stack->data.loop.continue_label)
2588 emit_note_before (NOTE_INSN_LOOP_CONT, start_label);
2590 do_pending_stack_adjust ();
2592 /* If optimizing, perhaps reorder the loop.
2593 First, try to use a condjump near the end.
2594 expand_exit_loop_if_false ends loops with unconditional jumps,
2595 like this:
2597 if (test) goto label;
2598 optional: cleanup
2599 goto loop_stack->data.loop.end_label
2600 barrier
2601 label:
2603 If we find such a pattern, we can end the loop earlier. */
2605 if (optimize
2606 && GET_CODE (insn) == CODE_LABEL
2607 && LABEL_NAME (insn) == NULL
2608 && GET_CODE (PREV_INSN (insn)) == BARRIER)
2610 rtx label = insn;
2611 rtx jump = PREV_INSN (PREV_INSN (label));
2613 if (GET_CODE (jump) == JUMP_INSN
2614 && GET_CODE (PATTERN (jump)) == SET
2615 && SET_DEST (PATTERN (jump)) == pc_rtx
2616 && GET_CODE (SET_SRC (PATTERN (jump))) == LABEL_REF
2617 && (XEXP (SET_SRC (PATTERN (jump)), 0)
2618 == loop_stack->data.loop.end_label))
2620 rtx prev;
2622 /* The test might be complex and reference LABEL multiple times,
2623 like the loop in loop_iterations to set vtop. To handle this,
2624 we move LABEL. */
2625 insn = PREV_INSN (label);
2626 reorder_insns (label, label, start_label);
2628 for (prev = PREV_INSN (jump);; prev = PREV_INSN (prev))
2630 /* We ignore line number notes, but if we see any other note,
2631 in particular NOTE_INSN_BLOCK_*, NOTE_INSN_EH_REGION_*,
2632 NOTE_INSN_LOOP_*, we disable this optimization. */
2633 if (GET_CODE (prev) == NOTE)
2635 if (NOTE_LINE_NUMBER (prev) < 0)
2636 break;
2637 continue;
2639 if (GET_CODE (prev) == CODE_LABEL)
2640 break;
2641 if (GET_CODE (prev) == JUMP_INSN)
2643 if (GET_CODE (PATTERN (prev)) == SET
2644 && SET_DEST (PATTERN (prev)) == pc_rtx
2645 && GET_CODE (SET_SRC (PATTERN (prev))) == IF_THEN_ELSE
2646 && (GET_CODE (XEXP (SET_SRC (PATTERN (prev)), 1))
2647 == LABEL_REF)
2648 && XEXP (XEXP (SET_SRC (PATTERN (prev)), 1), 0) == label)
2650 XEXP (XEXP (SET_SRC (PATTERN (prev)), 1), 0)
2651 = start_label;
2652 emit_note_after (NOTE_INSN_LOOP_END, prev);
2653 needs_end_jump = 0;
2655 break;
2661 /* If the loop starts with a loop exit, roll that to the end where
2662 it will optimize together with the jump back.
2664 We look for the conditional branch to the exit, except that once
2665 we find such a branch, we don't look past 30 instructions.
2667 In more detail, if the loop presently looks like this (in pseudo-C):
2669 start_label:
2670 if (test) goto end_label;
2671 body;
2672 goto start_label;
2673 end_label:
2675 transform it to look like:
2677 goto start_label;
2678 newstart_label:
2679 body;
2680 start_label:
2681 if (test) goto end_label;
2682 goto newstart_label;
2683 end_label:
2685 Here, the `test' may actually consist of some reasonably complex
2686 code, terminating in a test. */
2688 if (optimize
2689 && needs_end_jump
2691 ! (GET_CODE (insn) == JUMP_INSN
2692 && GET_CODE (PATTERN (insn)) == SET
2693 && SET_DEST (PATTERN (insn)) == pc_rtx
2694 && GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE))
2696 int eh_regions = 0;
2697 int num_insns = 0;
2698 rtx last_test_insn = NULL_RTX;
2700 /* Scan insns from the top of the loop looking for a qualified
2701 conditional exit. */
2702 for (insn = NEXT_INSN (loop_stack->data.loop.start_label); insn;
2703 insn = NEXT_INSN (insn))
2705 if (GET_CODE (insn) == NOTE)
2707 if (optimize < 2
2708 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2709 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
2710 /* The code that actually moves the exit test will
2711 carefully leave BLOCK notes in their original
2712 location. That means, however, that we can't debug
2713 the exit test itself. So, we refuse to move code
2714 containing BLOCK notes at low optimization levels. */
2715 break;
2717 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
2718 ++eh_regions;
2719 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END)
2721 --eh_regions;
2722 if (eh_regions < 0)
2723 /* We've come to the end of an EH region, but
2724 never saw the beginning of that region. That
2725 means that an EH region begins before the top
2726 of the loop, and ends in the middle of it. The
2727 existence of such a situation violates a basic
2728 assumption in this code, since that would imply
2729 that even when EH_REGIONS is zero, we might
2730 move code out of an exception region. */
2731 abort ();
2734 /* We must not walk into a nested loop. */
2735 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
2736 break;
2738 /* We already know this INSN is a NOTE, so there's no
2739 point in looking at it to see if it's a JUMP. */
2740 continue;
2743 if (GET_CODE (insn) == JUMP_INSN || GET_CODE (insn) == INSN)
2744 num_insns++;
2746 if (last_test_insn && num_insns > 30)
2747 break;
2749 if (eh_regions > 0)
2750 /* We don't want to move a partial EH region. Consider:
2752 while ( ( { try {
2753 if (cond ()) 0;
2754 else {
2755 bar();
2758 } catch (...) {
2760 } )) {
2761 body;
2764 This isn't legal C++, but here's what it's supposed to
2765 mean: if cond() is true, stop looping. Otherwise,
2766 call bar, and keep looping. In addition, if cond
2767 throws an exception, catch it and keep looping. Such
2768 constructs are certainy legal in LISP.
2770 We should not move the `if (cond()) 0' test since then
2771 the EH-region for the try-block would be broken up.
2772 (In this case we would the EH_BEG note for the `try'
2773 and `if cond()' but not the call to bar() or the
2774 EH_END note.)
2776 So we don't look for tests within an EH region. */
2777 continue;
2779 if (GET_CODE (insn) == JUMP_INSN
2780 && GET_CODE (PATTERN (insn)) == SET
2781 && SET_DEST (PATTERN (insn)) == pc_rtx)
2783 /* This is indeed a jump. */
2784 rtx dest1 = NULL_RTX;
2785 rtx dest2 = NULL_RTX;
2786 rtx potential_last_test;
2787 if (GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE)
2789 /* A conditional jump. */
2790 dest1 = XEXP (SET_SRC (PATTERN (insn)), 1);
2791 dest2 = XEXP (SET_SRC (PATTERN (insn)), 2);
2792 potential_last_test = insn;
2794 else
2796 /* An unconditional jump. */
2797 dest1 = SET_SRC (PATTERN (insn));
2798 /* Include the BARRIER after the JUMP. */
2799 potential_last_test = NEXT_INSN (insn);
2802 do {
2803 if (dest1 && GET_CODE (dest1) == LABEL_REF
2804 && ((XEXP (dest1, 0)
2805 == loop_stack->data.loop.alt_end_label)
2806 || (XEXP (dest1, 0)
2807 == loop_stack->data.loop.end_label)))
2809 last_test_insn = potential_last_test;
2810 break;
2813 /* If this was a conditional jump, there may be
2814 another label at which we should look. */
2815 dest1 = dest2;
2816 dest2 = NULL_RTX;
2817 } while (dest1);
2821 if (last_test_insn != 0 && last_test_insn != get_last_insn ())
2823 /* We found one. Move everything from there up
2824 to the end of the loop, and add a jump into the loop
2825 to jump to there. */
2826 rtx newstart_label = gen_label_rtx ();
2827 rtx start_move = start_label;
2828 rtx next_insn;
2830 /* If the start label is preceded by a NOTE_INSN_LOOP_CONT note,
2831 then we want to move this note also. */
2832 if (GET_CODE (PREV_INSN (start_move)) == NOTE
2833 && (NOTE_LINE_NUMBER (PREV_INSN (start_move))
2834 == NOTE_INSN_LOOP_CONT))
2835 start_move = PREV_INSN (start_move);
2837 emit_label_after (newstart_label, PREV_INSN (start_move));
2839 /* Actually move the insns. Start at the beginning, and
2840 keep copying insns until we've copied the
2841 last_test_insn. */
2842 for (insn = start_move; insn; insn = next_insn)
2844 /* Figure out which insn comes after this one. We have
2845 to do this before we move INSN. */
2846 if (insn == last_test_insn)
2847 /* We've moved all the insns. */
2848 next_insn = NULL_RTX;
2849 else
2850 next_insn = NEXT_INSN (insn);
2852 if (GET_CODE (insn) == NOTE
2853 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2854 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
2855 /* We don't want to move NOTE_INSN_BLOCK_BEGs or
2856 NOTE_INSN_BLOCK_ENDs because the correct generation
2857 of debugging information depends on these appearing
2858 in the same order in the RTL and in the tree
2859 structure, where they are represented as BLOCKs.
2860 So, we don't move block notes. Of course, moving
2861 the code inside the block is likely to make it
2862 impossible to debug the instructions in the exit
2863 test, but such is the price of optimization. */
2864 continue;
2866 /* Move the INSN. */
2867 reorder_insns (insn, insn, get_last_insn ());
2870 emit_jump_insn_after (gen_jump (start_label),
2871 PREV_INSN (newstart_label));
2872 emit_barrier_after (PREV_INSN (newstart_label));
2873 start_label = newstart_label;
2877 if (needs_end_jump)
2879 emit_jump (start_label);
2880 emit_note (NULL, NOTE_INSN_LOOP_END);
2882 emit_label (loop_stack->data.loop.end_label);
2884 POPSTACK (loop_stack);
2886 last_expr_type = 0;
2889 /* Finish a null loop, aka do { } while (0). */
2891 void
2892 expand_end_null_loop ()
2894 do_pending_stack_adjust ();
2895 emit_label (loop_stack->data.loop.end_label);
2897 POPSTACK (loop_stack);
2899 last_expr_type = 0;
2902 /* Generate a jump to the current loop's continue-point.
2903 This is usually the top of the loop, but may be specified
2904 explicitly elsewhere. If not currently inside a loop,
2905 return 0 and do nothing; caller will print an error message. */
2908 expand_continue_loop (whichloop)
2909 struct nesting *whichloop;
2911 last_expr_type = 0;
2912 if (whichloop == 0)
2913 whichloop = loop_stack;
2914 if (whichloop == 0)
2915 return 0;
2916 expand_goto_internal (NULL_TREE, whichloop->data.loop.continue_label,
2917 NULL_RTX);
2918 return 1;
2921 /* Generate a jump to exit the current loop. If not currently inside a loop,
2922 return 0 and do nothing; caller will print an error message. */
2925 expand_exit_loop (whichloop)
2926 struct nesting *whichloop;
2928 last_expr_type = 0;
2929 if (whichloop == 0)
2930 whichloop = loop_stack;
2931 if (whichloop == 0)
2932 return 0;
2933 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label, NULL_RTX);
2934 return 1;
2937 /* Generate a conditional jump to exit the current loop if COND
2938 evaluates to zero. If not currently inside a loop,
2939 return 0 and do nothing; caller will print an error message. */
2942 expand_exit_loop_if_false (whichloop, cond)
2943 struct nesting *whichloop;
2944 tree cond;
2946 rtx label = gen_label_rtx ();
2947 rtx last_insn;
2948 last_expr_type = 0;
2950 if (whichloop == 0)
2951 whichloop = loop_stack;
2952 if (whichloop == 0)
2953 return 0;
2954 /* In order to handle fixups, we actually create a conditional jump
2955 around an unconditional branch to exit the loop. If fixups are
2956 necessary, they go before the unconditional branch. */
2958 do_jump (cond, NULL_RTX, label);
2959 last_insn = get_last_insn ();
2960 if (GET_CODE (last_insn) == CODE_LABEL)
2961 whichloop->data.loop.alt_end_label = last_insn;
2962 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label,
2963 NULL_RTX);
2964 emit_label (label);
2966 return 1;
2969 /* Return nonzero if the loop nest is empty. Else return zero. */
2972 stmt_loop_nest_empty ()
2974 /* cfun->stmt can be NULL if we are building a call to get the
2975 EH context for a setjmp/longjmp EH target and the current
2976 function was a deferred inline function. */
2977 return (cfun->stmt == NULL || loop_stack == NULL);
2980 /* Return non-zero if we should preserve sub-expressions as separate
2981 pseudos. We never do so if we aren't optimizing. We always do so
2982 if -fexpensive-optimizations.
2984 Otherwise, we only do so if we are in the "early" part of a loop. I.e.,
2985 the loop may still be a small one. */
2988 preserve_subexpressions_p ()
2990 rtx insn;
2992 if (flag_expensive_optimizations)
2993 return 1;
2995 if (optimize == 0 || cfun == 0 || cfun->stmt == 0 || loop_stack == 0)
2996 return 0;
2998 insn = get_last_insn_anywhere ();
3000 return (insn
3001 && (INSN_UID (insn) - INSN_UID (loop_stack->data.loop.start_label)
3002 < n_non_fixed_regs * 3));
3006 /* Generate a jump to exit the current loop, conditional, binding contour
3007 or case statement. Not all such constructs are visible to this function,
3008 only those started with EXIT_FLAG nonzero. Individual languages use
3009 the EXIT_FLAG parameter to control which kinds of constructs you can
3010 exit this way.
3012 If not currently inside anything that can be exited,
3013 return 0 and do nothing; caller will print an error message. */
3016 expand_exit_something ()
3018 struct nesting *n;
3019 last_expr_type = 0;
3020 for (n = nesting_stack; n; n = n->all)
3021 if (n->exit_label != 0)
3023 expand_goto_internal (NULL_TREE, n->exit_label, NULL_RTX);
3024 return 1;
3027 return 0;
3030 /* Generate RTL to return from the current function, with no value.
3031 (That is, we do not do anything about returning any value.) */
3033 void
3034 expand_null_return ()
3036 rtx last_insn = get_last_insn ();
3038 /* If this function was declared to return a value, but we
3039 didn't, clobber the return registers so that they are not
3040 propogated live to the rest of the function. */
3041 clobber_return_register ();
3043 expand_null_return_1 (last_insn);
3046 /* Generate RTL to return from the current function, with value VAL. */
3048 static void
3049 expand_value_return (val)
3050 rtx val;
3052 rtx last_insn = get_last_insn ();
3053 rtx return_reg = DECL_RTL (DECL_RESULT (current_function_decl));
3055 /* Copy the value to the return location
3056 unless it's already there. */
3058 if (return_reg != val)
3060 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
3061 #ifdef PROMOTE_FUNCTION_RETURN
3062 int unsignedp = TREE_UNSIGNED (type);
3063 enum machine_mode old_mode
3064 = DECL_MODE (DECL_RESULT (current_function_decl));
3065 enum machine_mode mode
3066 = promote_mode (type, old_mode, &unsignedp, 1);
3068 if (mode != old_mode)
3069 val = convert_modes (mode, old_mode, val, unsignedp);
3070 #endif
3071 if (GET_CODE (return_reg) == PARALLEL)
3072 emit_group_load (return_reg, val, int_size_in_bytes (type));
3073 else
3074 emit_move_insn (return_reg, val);
3077 expand_null_return_1 (last_insn);
3080 /* Output a return with no value. If LAST_INSN is nonzero,
3081 pretend that the return takes place after LAST_INSN. */
3083 static void
3084 expand_null_return_1 (last_insn)
3085 rtx last_insn;
3087 rtx end_label = cleanup_label ? cleanup_label : return_label;
3089 clear_pending_stack_adjust ();
3090 do_pending_stack_adjust ();
3091 last_expr_type = 0;
3093 if (end_label == 0)
3094 end_label = return_label = gen_label_rtx ();
3095 expand_goto_internal (NULL_TREE, end_label, last_insn);
3098 /* Generate RTL to evaluate the expression RETVAL and return it
3099 from the current function. */
3101 void
3102 expand_return (retval)
3103 tree retval;
3105 /* If there are any cleanups to be performed, then they will
3106 be inserted following LAST_INSN. It is desirable
3107 that the last_insn, for such purposes, should be the
3108 last insn before computing the return value. Otherwise, cleanups
3109 which call functions can clobber the return value. */
3110 /* ??? rms: I think that is erroneous, because in C++ it would
3111 run destructors on variables that might be used in the subsequent
3112 computation of the return value. */
3113 rtx last_insn = 0;
3114 rtx result_rtl;
3115 rtx val = 0;
3116 tree retval_rhs;
3118 /* If function wants no value, give it none. */
3119 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
3121 expand_expr (retval, NULL_RTX, VOIDmode, 0);
3122 emit_queue ();
3123 expand_null_return ();
3124 return;
3127 if (retval == error_mark_node)
3129 /* Treat this like a return of no value from a function that
3130 returns a value. */
3131 expand_null_return ();
3132 return;
3134 else if (TREE_CODE (retval) == RESULT_DECL)
3135 retval_rhs = retval;
3136 else if ((TREE_CODE (retval) == MODIFY_EXPR || TREE_CODE (retval) == INIT_EXPR)
3137 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
3138 retval_rhs = TREE_OPERAND (retval, 1);
3139 else if (VOID_TYPE_P (TREE_TYPE (retval)))
3140 /* Recognize tail-recursive call to void function. */
3141 retval_rhs = retval;
3142 else
3143 retval_rhs = NULL_TREE;
3145 last_insn = get_last_insn ();
3147 /* Distribute return down conditional expr if either of the sides
3148 may involve tail recursion (see test below). This enhances the number
3149 of tail recursions we see. Don't do this always since it can produce
3150 sub-optimal code in some cases and we distribute assignments into
3151 conditional expressions when it would help. */
3153 if (optimize && retval_rhs != 0
3154 && frame_offset == 0
3155 && TREE_CODE (retval_rhs) == COND_EXPR
3156 && (TREE_CODE (TREE_OPERAND (retval_rhs, 1)) == CALL_EXPR
3157 || TREE_CODE (TREE_OPERAND (retval_rhs, 2)) == CALL_EXPR))
3159 rtx label = gen_label_rtx ();
3160 tree expr;
3162 do_jump (TREE_OPERAND (retval_rhs, 0), label, NULL_RTX);
3163 start_cleanup_deferral ();
3164 expr = build (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (current_function_decl)),
3165 DECL_RESULT (current_function_decl),
3166 TREE_OPERAND (retval_rhs, 1));
3167 TREE_SIDE_EFFECTS (expr) = 1;
3168 expand_return (expr);
3169 emit_label (label);
3171 expr = build (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (current_function_decl)),
3172 DECL_RESULT (current_function_decl),
3173 TREE_OPERAND (retval_rhs, 2));
3174 TREE_SIDE_EFFECTS (expr) = 1;
3175 expand_return (expr);
3176 end_cleanup_deferral ();
3177 return;
3180 result_rtl = DECL_RTL (DECL_RESULT (current_function_decl));
3182 /* If the result is an aggregate that is being returned in one (or more)
3183 registers, load the registers here. The compiler currently can't handle
3184 copying a BLKmode value into registers. We could put this code in a
3185 more general area (for use by everyone instead of just function
3186 call/return), but until this feature is generally usable it is kept here
3187 (and in expand_call). The value must go into a pseudo in case there
3188 are cleanups that will clobber the real return register. */
3190 if (retval_rhs != 0
3191 && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
3192 && GET_CODE (result_rtl) == REG)
3194 int i;
3195 unsigned HOST_WIDE_INT bitpos, xbitpos;
3196 unsigned HOST_WIDE_INT big_endian_correction = 0;
3197 unsigned HOST_WIDE_INT bytes
3198 = int_size_in_bytes (TREE_TYPE (retval_rhs));
3199 int n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
3200 unsigned int bitsize
3201 = MIN (TYPE_ALIGN (TREE_TYPE (retval_rhs)), BITS_PER_WORD);
3202 rtx *result_pseudos = (rtx *) alloca (sizeof (rtx) * n_regs);
3203 rtx result_reg, src = NULL_RTX, dst = NULL_RTX;
3204 rtx result_val = expand_expr (retval_rhs, NULL_RTX, VOIDmode, 0);
3205 enum machine_mode tmpmode, result_reg_mode;
3207 if (bytes == 0)
3209 expand_null_return ();
3210 return;
3213 /* Structures whose size is not a multiple of a word are aligned
3214 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
3215 machine, this means we must skip the empty high order bytes when
3216 calculating the bit offset. */
3217 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
3218 big_endian_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
3219 * BITS_PER_UNIT));
3221 /* Copy the structure BITSIZE bits at a time. */
3222 for (bitpos = 0, xbitpos = big_endian_correction;
3223 bitpos < bytes * BITS_PER_UNIT;
3224 bitpos += bitsize, xbitpos += bitsize)
3226 /* We need a new destination pseudo each time xbitpos is
3227 on a word boundary and when xbitpos == big_endian_correction
3228 (the first time through). */
3229 if (xbitpos % BITS_PER_WORD == 0
3230 || xbitpos == big_endian_correction)
3232 /* Generate an appropriate register. */
3233 dst = gen_reg_rtx (word_mode);
3234 result_pseudos[xbitpos / BITS_PER_WORD] = dst;
3236 /* Clobber the destination before we move anything into it. */
3237 emit_insn (gen_rtx_CLOBBER (VOIDmode, dst));
3240 /* We need a new source operand each time bitpos is on a word
3241 boundary. */
3242 if (bitpos % BITS_PER_WORD == 0)
3243 src = operand_subword_force (result_val,
3244 bitpos / BITS_PER_WORD,
3245 BLKmode);
3247 /* Use bitpos for the source extraction (left justified) and
3248 xbitpos for the destination store (right justified). */
3249 store_bit_field (dst, bitsize, xbitpos % BITS_PER_WORD, word_mode,
3250 extract_bit_field (src, bitsize,
3251 bitpos % BITS_PER_WORD, 1,
3252 NULL_RTX, word_mode, word_mode,
3253 BITS_PER_WORD),
3254 BITS_PER_WORD);
3257 /* Find the smallest integer mode large enough to hold the
3258 entire structure and use that mode instead of BLKmode
3259 on the USE insn for the return register. */
3260 for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3261 tmpmode != VOIDmode;
3262 tmpmode = GET_MODE_WIDER_MODE (tmpmode))
3263 /* Have we found a large enough mode? */
3264 if (GET_MODE_SIZE (tmpmode) >= bytes)
3265 break;
3267 /* No suitable mode found. */
3268 if (tmpmode == VOIDmode)
3269 abort ();
3271 PUT_MODE (result_rtl, tmpmode);
3273 if (GET_MODE_SIZE (tmpmode) < GET_MODE_SIZE (word_mode))
3274 result_reg_mode = word_mode;
3275 else
3276 result_reg_mode = tmpmode;
3277 result_reg = gen_reg_rtx (result_reg_mode);
3279 emit_queue ();
3280 for (i = 0; i < n_regs; i++)
3281 emit_move_insn (operand_subword (result_reg, i, 0, result_reg_mode),
3282 result_pseudos[i]);
3284 if (tmpmode != result_reg_mode)
3285 result_reg = gen_lowpart (tmpmode, result_reg);
3287 expand_value_return (result_reg);
3289 else if (retval_rhs != 0
3290 && !VOID_TYPE_P (TREE_TYPE (retval_rhs))
3291 && (GET_CODE (result_rtl) == REG
3292 || (GET_CODE (result_rtl) == PARALLEL)))
3294 /* Calculate the return value into a temporary (usually a pseudo
3295 reg). */
3296 tree ot = TREE_TYPE (DECL_RESULT (current_function_decl));
3297 tree nt = build_qualified_type (ot, TYPE_QUALS (ot) | TYPE_QUAL_CONST);
3299 val = assign_temp (nt, 0, 0, 1);
3300 val = expand_expr (retval_rhs, val, GET_MODE (val), 0);
3301 val = force_not_mem (val);
3302 emit_queue ();
3303 /* Return the calculated value, doing cleanups first. */
3304 expand_value_return (val);
3306 else
3308 /* No cleanups or no hard reg used;
3309 calculate value into hard return reg. */
3310 expand_expr (retval, const0_rtx, VOIDmode, 0);
3311 emit_queue ();
3312 expand_value_return (result_rtl);
3316 /* Return 1 if the end of the generated RTX is not a barrier.
3317 This means code already compiled can drop through. */
3320 drop_through_at_end_p ()
3322 rtx insn = get_last_insn ();
3323 while (insn && GET_CODE (insn) == NOTE)
3324 insn = PREV_INSN (insn);
3325 return insn && GET_CODE (insn) != BARRIER;
3328 /* Attempt to optimize a potential tail recursion call into a goto.
3329 ARGUMENTS are the arguments to a CALL_EXPR; LAST_INSN indicates
3330 where to place the jump to the tail recursion label.
3332 Return TRUE if the call was optimized into a goto. */
3335 optimize_tail_recursion (arguments, last_insn)
3336 tree arguments;
3337 rtx last_insn;
3339 /* Finish checking validity, and if valid emit code to set the
3340 argument variables for the new call. */
3341 if (tail_recursion_args (arguments, DECL_ARGUMENTS (current_function_decl)))
3343 if (tail_recursion_label == 0)
3345 tail_recursion_label = gen_label_rtx ();
3346 emit_label_after (tail_recursion_label,
3347 tail_recursion_reentry);
3349 emit_queue ();
3350 expand_goto_internal (NULL_TREE, tail_recursion_label, last_insn);
3351 emit_barrier ();
3352 return 1;
3354 return 0;
3357 /* Emit code to alter this function's formal parms for a tail-recursive call.
3358 ACTUALS is a list of actual parameter expressions (chain of TREE_LISTs).
3359 FORMALS is the chain of decls of formals.
3360 Return 1 if this can be done;
3361 otherwise return 0 and do not emit any code. */
3363 static int
3364 tail_recursion_args (actuals, formals)
3365 tree actuals, formals;
3367 tree a = actuals, f = formals;
3368 int i;
3369 rtx *argvec;
3371 /* Check that number and types of actuals are compatible
3372 with the formals. This is not always true in valid C code.
3373 Also check that no formal needs to be addressable
3374 and that all formals are scalars. */
3376 /* Also count the args. */
3378 for (a = actuals, f = formals, i = 0; a && f; a = TREE_CHAIN (a), f = TREE_CHAIN (f), i++)
3380 if (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_VALUE (a)))
3381 != TYPE_MAIN_VARIANT (TREE_TYPE (f)))
3382 return 0;
3383 if (GET_CODE (DECL_RTL (f)) != REG || DECL_MODE (f) == BLKmode)
3384 return 0;
3386 if (a != 0 || f != 0)
3387 return 0;
3389 /* Compute all the actuals. */
3391 argvec = (rtx *) alloca (i * sizeof (rtx));
3393 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
3394 argvec[i] = expand_expr (TREE_VALUE (a), NULL_RTX, VOIDmode, 0);
3396 /* Find which actual values refer to current values of previous formals.
3397 Copy each of them now, before any formal is changed. */
3399 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
3401 int copy = 0;
3402 int j;
3403 for (f = formals, j = 0; j < i; f = TREE_CHAIN (f), j++)
3404 if (reg_mentioned_p (DECL_RTL (f), argvec[i]))
3406 copy = 1;
3407 break;
3409 if (copy)
3410 argvec[i] = copy_to_reg (argvec[i]);
3413 /* Store the values of the actuals into the formals. */
3415 for (f = formals, a = actuals, i = 0; f;
3416 f = TREE_CHAIN (f), a = TREE_CHAIN (a), i++)
3418 if (GET_MODE (DECL_RTL (f)) == GET_MODE (argvec[i]))
3419 emit_move_insn (DECL_RTL (f), argvec[i]);
3420 else
3421 convert_move (DECL_RTL (f), argvec[i],
3422 TREE_UNSIGNED (TREE_TYPE (TREE_VALUE (a))));
3425 free_temp_slots ();
3426 return 1;
3429 /* Generate the RTL code for entering a binding contour.
3430 The variables are declared one by one, by calls to `expand_decl'.
3432 FLAGS is a bitwise or of the following flags:
3434 1 - Nonzero if this construct should be visible to
3435 `exit_something'.
3437 2 - Nonzero if this contour does not require a
3438 NOTE_INSN_BLOCK_BEG note. Virtually all calls from
3439 language-independent code should set this flag because they
3440 will not create corresponding BLOCK nodes. (There should be
3441 a one-to-one correspondence between NOTE_INSN_BLOCK_BEG notes
3442 and BLOCKs.) If this flag is set, MARK_ENDS should be zero
3443 when expand_end_bindings is called.
3445 If we are creating a NOTE_INSN_BLOCK_BEG note, a BLOCK may
3446 optionally be supplied. If so, it becomes the NOTE_BLOCK for the
3447 note. */
3449 void
3450 expand_start_bindings_and_block (flags, block)
3451 int flags;
3452 tree block;
3454 struct nesting *thisblock = ALLOC_NESTING ();
3455 rtx note;
3456 int exit_flag = ((flags & 1) != 0);
3457 int block_flag = ((flags & 2) == 0);
3459 /* If a BLOCK is supplied, then the caller should be requesting a
3460 NOTE_INSN_BLOCK_BEG note. */
3461 if (!block_flag && block)
3462 abort ();
3464 /* Create a note to mark the beginning of the block. */
3465 if (block_flag)
3467 note = emit_note (NULL, NOTE_INSN_BLOCK_BEG);
3468 NOTE_BLOCK (note) = block;
3470 else
3471 note = emit_note (NULL, NOTE_INSN_DELETED);
3473 /* Make an entry on block_stack for the block we are entering. */
3475 thisblock->next = block_stack;
3476 thisblock->all = nesting_stack;
3477 thisblock->depth = ++nesting_depth;
3478 thisblock->data.block.stack_level = 0;
3479 thisblock->data.block.cleanups = 0;
3480 thisblock->data.block.n_function_calls = 0;
3481 thisblock->data.block.exception_region = 0;
3482 thisblock->data.block.block_target_temp_slot_level = target_temp_slot_level;
3484 thisblock->data.block.conditional_code = 0;
3485 thisblock->data.block.last_unconditional_cleanup = note;
3486 /* When we insert instructions after the last unconditional cleanup,
3487 we don't adjust last_insn. That means that a later add_insn will
3488 clobber the instructions we've just added. The easiest way to
3489 fix this is to just insert another instruction here, so that the
3490 instructions inserted after the last unconditional cleanup are
3491 never the last instruction. */
3492 emit_note (NULL, NOTE_INSN_DELETED);
3493 thisblock->data.block.cleanup_ptr = &thisblock->data.block.cleanups;
3495 if (block_stack
3496 && !(block_stack->data.block.cleanups == NULL_TREE
3497 && block_stack->data.block.outer_cleanups == NULL_TREE))
3498 thisblock->data.block.outer_cleanups
3499 = tree_cons (NULL_TREE, block_stack->data.block.cleanups,
3500 block_stack->data.block.outer_cleanups);
3501 else
3502 thisblock->data.block.outer_cleanups = 0;
3503 thisblock->data.block.label_chain = 0;
3504 thisblock->data.block.innermost_stack_block = stack_block_stack;
3505 thisblock->data.block.first_insn = note;
3506 thisblock->data.block.block_start_count = ++current_block_start_count;
3507 thisblock->exit_label = exit_flag ? gen_label_rtx () : 0;
3508 block_stack = thisblock;
3509 nesting_stack = thisblock;
3511 /* Make a new level for allocating stack slots. */
3512 push_temp_slots ();
3515 /* Specify the scope of temporaries created by TARGET_EXPRs. Similar
3516 to CLEANUP_POINT_EXPR, but handles cases when a series of calls to
3517 expand_expr are made. After we end the region, we know that all
3518 space for all temporaries that were created by TARGET_EXPRs will be
3519 destroyed and their space freed for reuse. */
3521 void
3522 expand_start_target_temps ()
3524 /* This is so that even if the result is preserved, the space
3525 allocated will be freed, as we know that it is no longer in use. */
3526 push_temp_slots ();
3528 /* Start a new binding layer that will keep track of all cleanup
3529 actions to be performed. */
3530 expand_start_bindings (2);
3532 target_temp_slot_level = temp_slot_level;
3535 void
3536 expand_end_target_temps ()
3538 expand_end_bindings (NULL_TREE, 0, 0);
3540 /* This is so that even if the result is preserved, the space
3541 allocated will be freed, as we know that it is no longer in use. */
3542 pop_temp_slots ();
3545 /* Given a pointer to a BLOCK node return non-zero if (and only if) the node
3546 in question represents the outermost pair of curly braces (i.e. the "body
3547 block") of a function or method.
3549 For any BLOCK node representing a "body block" of a function or method, the
3550 BLOCK_SUPERCONTEXT of the node will point to another BLOCK node which
3551 represents the outermost (function) scope for the function or method (i.e.
3552 the one which includes the formal parameters). The BLOCK_SUPERCONTEXT of
3553 *that* node in turn will point to the relevant FUNCTION_DECL node. */
3556 is_body_block (stmt)
3557 tree stmt;
3559 if (TREE_CODE (stmt) == BLOCK)
3561 tree parent = BLOCK_SUPERCONTEXT (stmt);
3563 if (parent && TREE_CODE (parent) == BLOCK)
3565 tree grandparent = BLOCK_SUPERCONTEXT (parent);
3567 if (grandparent && TREE_CODE (grandparent) == FUNCTION_DECL)
3568 return 1;
3572 return 0;
3575 /* True if we are currently emitting insns in an area of output code
3576 that is controlled by a conditional expression. This is used by
3577 the cleanup handling code to generate conditional cleanup actions. */
3580 conditional_context ()
3582 return block_stack && block_stack->data.block.conditional_code;
3585 /* Return an opaque pointer to the current nesting level, so frontend code
3586 can check its own sanity. */
3588 struct nesting *
3589 current_nesting_level ()
3591 return cfun ? block_stack : 0;
3594 /* Emit a handler label for a nonlocal goto handler.
3595 Also emit code to store the handler label in SLOT before BEFORE_INSN. */
3597 static rtx
3598 expand_nl_handler_label (slot, before_insn)
3599 rtx slot, before_insn;
3601 rtx insns;
3602 rtx handler_label = gen_label_rtx ();
3604 /* Don't let cleanup_cfg delete the handler. */
3605 LABEL_PRESERVE_P (handler_label) = 1;
3607 start_sequence ();
3608 emit_move_insn (slot, gen_rtx_LABEL_REF (Pmode, handler_label));
3609 insns = get_insns ();
3610 end_sequence ();
3611 emit_insns_before (insns, before_insn);
3613 emit_label (handler_label);
3615 return handler_label;
3618 /* Emit code to restore vital registers at the beginning of a nonlocal goto
3619 handler. */
3620 static void
3621 expand_nl_goto_receiver ()
3623 #ifdef HAVE_nonlocal_goto
3624 if (! HAVE_nonlocal_goto)
3625 #endif
3626 /* First adjust our frame pointer to its actual value. It was
3627 previously set to the start of the virtual area corresponding to
3628 the stacked variables when we branched here and now needs to be
3629 adjusted to the actual hardware fp value.
3631 Assignments are to virtual registers are converted by
3632 instantiate_virtual_regs into the corresponding assignment
3633 to the underlying register (fp in this case) that makes
3634 the original assignment true.
3635 So the following insn will actually be
3636 decrementing fp by STARTING_FRAME_OFFSET. */
3637 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
3639 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3640 if (fixed_regs[ARG_POINTER_REGNUM])
3642 #ifdef ELIMINABLE_REGS
3643 /* If the argument pointer can be eliminated in favor of the
3644 frame pointer, we don't need to restore it. We assume here
3645 that if such an elimination is present, it can always be used.
3646 This is the case on all known machines; if we don't make this
3647 assumption, we do unnecessary saving on many machines. */
3648 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
3649 size_t i;
3651 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
3652 if (elim_regs[i].from == ARG_POINTER_REGNUM
3653 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
3654 break;
3656 if (i == ARRAY_SIZE (elim_regs))
3657 #endif
3659 /* Now restore our arg pointer from the address at which it
3660 was saved in our stack frame. */
3661 emit_move_insn (virtual_incoming_args_rtx,
3662 copy_to_reg (get_arg_pointer_save_area (cfun)));
3665 #endif
3667 #ifdef HAVE_nonlocal_goto_receiver
3668 if (HAVE_nonlocal_goto_receiver)
3669 emit_insn (gen_nonlocal_goto_receiver ());
3670 #endif
3673 /* Make handlers for nonlocal gotos taking place in the function calls in
3674 block THISBLOCK. */
3676 static void
3677 expand_nl_goto_receivers (thisblock)
3678 struct nesting *thisblock;
3680 tree link;
3681 rtx afterward = gen_label_rtx ();
3682 rtx insns, slot;
3683 rtx label_list;
3684 int any_invalid;
3686 /* Record the handler address in the stack slot for that purpose,
3687 during this block, saving and restoring the outer value. */
3688 if (thisblock->next != 0)
3689 for (slot = nonlocal_goto_handler_slots; slot; slot = XEXP (slot, 1))
3691 rtx save_receiver = gen_reg_rtx (Pmode);
3692 emit_move_insn (XEXP (slot, 0), save_receiver);
3694 start_sequence ();
3695 emit_move_insn (save_receiver, XEXP (slot, 0));
3696 insns = get_insns ();
3697 end_sequence ();
3698 emit_insns_before (insns, thisblock->data.block.first_insn);
3701 /* Jump around the handlers; they run only when specially invoked. */
3702 emit_jump (afterward);
3704 /* Make a separate handler for each label. */
3705 link = nonlocal_labels;
3706 slot = nonlocal_goto_handler_slots;
3707 label_list = NULL_RTX;
3708 for (; link; link = TREE_CHAIN (link), slot = XEXP (slot, 1))
3709 /* Skip any labels we shouldn't be able to jump to from here,
3710 we generate one special handler for all of them below which just calls
3711 abort. */
3712 if (! DECL_TOO_LATE (TREE_VALUE (link)))
3714 rtx lab;
3715 lab = expand_nl_handler_label (XEXP (slot, 0),
3716 thisblock->data.block.first_insn);
3717 label_list = gen_rtx_EXPR_LIST (VOIDmode, lab, label_list);
3719 expand_nl_goto_receiver ();
3721 /* Jump to the "real" nonlocal label. */
3722 expand_goto (TREE_VALUE (link));
3725 /* A second pass over all nonlocal labels; this time we handle those
3726 we should not be able to jump to at this point. */
3727 link = nonlocal_labels;
3728 slot = nonlocal_goto_handler_slots;
3729 any_invalid = 0;
3730 for (; link; link = TREE_CHAIN (link), slot = XEXP (slot, 1))
3731 if (DECL_TOO_LATE (TREE_VALUE (link)))
3733 rtx lab;
3734 lab = expand_nl_handler_label (XEXP (slot, 0),
3735 thisblock->data.block.first_insn);
3736 label_list = gen_rtx_EXPR_LIST (VOIDmode, lab, label_list);
3737 any_invalid = 1;
3740 if (any_invalid)
3742 expand_nl_goto_receiver ();
3743 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "abort"), LCT_NORETURN,
3744 VOIDmode, 0);
3745 emit_barrier ();
3748 nonlocal_goto_handler_labels = label_list;
3749 emit_label (afterward);
3752 /* Warn about any unused VARS (which may contain nodes other than
3753 VAR_DECLs, but such nodes are ignored). The nodes are connected
3754 via the TREE_CHAIN field. */
3756 void
3757 warn_about_unused_variables (vars)
3758 tree vars;
3760 tree decl;
3762 if (warn_unused_variable)
3763 for (decl = vars; decl; decl = TREE_CHAIN (decl))
3764 if (TREE_CODE (decl) == VAR_DECL
3765 && ! TREE_USED (decl)
3766 && ! DECL_IN_SYSTEM_HEADER (decl)
3767 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
3768 warning_with_decl (decl, "unused variable `%s'");
3771 /* Generate RTL code to terminate a binding contour.
3773 VARS is the chain of VAR_DECL nodes for the variables bound in this
3774 contour. There may actually be other nodes in this chain, but any
3775 nodes other than VAR_DECLS are ignored.
3777 MARK_ENDS is nonzero if we should put a note at the beginning
3778 and end of this binding contour.
3780 DONT_JUMP_IN is nonzero if it is not valid to jump into this contour.
3781 (That is true automatically if the contour has a saved stack level.) */
3783 void
3784 expand_end_bindings (vars, mark_ends, dont_jump_in)
3785 tree vars;
3786 int mark_ends;
3787 int dont_jump_in;
3789 struct nesting *thisblock = block_stack;
3791 /* If any of the variables in this scope were not used, warn the
3792 user. */
3793 warn_about_unused_variables (vars);
3795 if (thisblock->exit_label)
3797 do_pending_stack_adjust ();
3798 emit_label (thisblock->exit_label);
3801 /* If necessary, make handlers for nonlocal gotos taking
3802 place in the function calls in this block. */
3803 if (function_call_count != thisblock->data.block.n_function_calls
3804 && nonlocal_labels
3805 /* Make handler for outermost block
3806 if there were any nonlocal gotos to this function. */
3807 && (thisblock->next == 0 ? current_function_has_nonlocal_label
3808 /* Make handler for inner block if it has something
3809 special to do when you jump out of it. */
3810 : (thisblock->data.block.cleanups != 0
3811 || thisblock->data.block.stack_level != 0)))
3812 expand_nl_goto_receivers (thisblock);
3814 /* Don't allow jumping into a block that has a stack level.
3815 Cleanups are allowed, though. */
3816 if (dont_jump_in
3817 || thisblock->data.block.stack_level != 0)
3819 struct label_chain *chain;
3821 /* Any labels in this block are no longer valid to go to.
3822 Mark them to cause an error message. */
3823 for (chain = thisblock->data.block.label_chain; chain; chain = chain->next)
3825 DECL_TOO_LATE (chain->label) = 1;
3826 /* If any goto without a fixup came to this label,
3827 that must be an error, because gotos without fixups
3828 come from outside all saved stack-levels. */
3829 if (TREE_ADDRESSABLE (chain->label))
3830 error_with_decl (chain->label,
3831 "label `%s' used before containing binding contour");
3835 /* Restore stack level in effect before the block
3836 (only if variable-size objects allocated). */
3837 /* Perform any cleanups associated with the block. */
3839 if (thisblock->data.block.stack_level != 0
3840 || thisblock->data.block.cleanups != 0)
3842 int reachable;
3843 rtx insn;
3845 /* Don't let cleanups affect ({...}) constructs. */
3846 int old_expr_stmts_for_value = expr_stmts_for_value;
3847 rtx old_last_expr_value = last_expr_value;
3848 tree old_last_expr_type = last_expr_type;
3849 expr_stmts_for_value = 0;
3851 /* Only clean up here if this point can actually be reached. */
3852 insn = get_last_insn ();
3853 if (GET_CODE (insn) == NOTE)
3854 insn = prev_nonnote_insn (insn);
3855 reachable = (! insn || GET_CODE (insn) != BARRIER);
3857 /* Do the cleanups. */
3858 expand_cleanups (thisblock->data.block.cleanups, NULL_TREE, 0, reachable);
3859 if (reachable)
3860 do_pending_stack_adjust ();
3862 expr_stmts_for_value = old_expr_stmts_for_value;
3863 last_expr_value = old_last_expr_value;
3864 last_expr_type = old_last_expr_type;
3866 /* Restore the stack level. */
3868 if (reachable && thisblock->data.block.stack_level != 0)
3870 emit_stack_restore (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3871 thisblock->data.block.stack_level, NULL_RTX);
3872 if (nonlocal_goto_handler_slots != 0)
3873 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level,
3874 NULL_RTX);
3877 /* Any gotos out of this block must also do these things.
3878 Also report any gotos with fixups that came to labels in this
3879 level. */
3880 fixup_gotos (thisblock,
3881 thisblock->data.block.stack_level,
3882 thisblock->data.block.cleanups,
3883 thisblock->data.block.first_insn,
3884 dont_jump_in);
3887 /* Mark the beginning and end of the scope if requested.
3888 We do this now, after running cleanups on the variables
3889 just going out of scope, so they are in scope for their cleanups. */
3891 if (mark_ends)
3893 rtx note = emit_note (NULL, NOTE_INSN_BLOCK_END);
3894 NOTE_BLOCK (note) = NOTE_BLOCK (thisblock->data.block.first_insn);
3896 else
3897 /* Get rid of the beginning-mark if we don't make an end-mark. */
3898 NOTE_LINE_NUMBER (thisblock->data.block.first_insn) = NOTE_INSN_DELETED;
3900 /* Restore the temporary level of TARGET_EXPRs. */
3901 target_temp_slot_level = thisblock->data.block.block_target_temp_slot_level;
3903 /* Restore block_stack level for containing block. */
3905 stack_block_stack = thisblock->data.block.innermost_stack_block;
3906 POPSTACK (block_stack);
3908 /* Pop the stack slot nesting and free any slots at this level. */
3909 pop_temp_slots ();
3912 /* Generate code to save the stack pointer at the start of the current block
3913 and set up to restore it on exit. */
3915 void
3916 save_stack_pointer ()
3918 struct nesting *thisblock = block_stack;
3920 if (thisblock->data.block.stack_level == 0)
3922 emit_stack_save (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3923 &thisblock->data.block.stack_level,
3924 thisblock->data.block.first_insn);
3925 stack_block_stack = thisblock;
3929 /* Generate RTL for the automatic variable declaration DECL.
3930 (Other kinds of declarations are simply ignored if seen here.) */
3932 void
3933 expand_decl (decl)
3934 tree decl;
3936 struct nesting *thisblock;
3937 tree type;
3939 type = TREE_TYPE (decl);
3941 /* For a CONST_DECL, set mode, alignment, and sizes from those of the
3942 type in case this node is used in a reference. */
3943 if (TREE_CODE (decl) == CONST_DECL)
3945 DECL_MODE (decl) = TYPE_MODE (type);
3946 DECL_ALIGN (decl) = TYPE_ALIGN (type);
3947 DECL_SIZE (decl) = TYPE_SIZE (type);
3948 DECL_SIZE_UNIT (decl) = TYPE_SIZE_UNIT (type);
3949 return;
3952 /* Otherwise, only automatic variables need any expansion done. Static and
3953 external variables, and external functions, will be handled by
3954 `assemble_variable' (called from finish_decl). TYPE_DECL requires
3955 nothing. PARM_DECLs are handled in `assign_parms'. */
3956 if (TREE_CODE (decl) != VAR_DECL)
3957 return;
3959 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
3960 return;
3962 thisblock = block_stack;
3964 /* Create the RTL representation for the variable. */
3966 if (type == error_mark_node)
3967 SET_DECL_RTL (decl, gen_rtx_MEM (BLKmode, const0_rtx));
3969 else if (DECL_SIZE (decl) == 0)
3970 /* Variable with incomplete type. */
3972 rtx x;
3973 if (DECL_INITIAL (decl) == 0)
3974 /* Error message was already done; now avoid a crash. */
3975 x = gen_rtx_MEM (BLKmode, const0_rtx);
3976 else
3977 /* An initializer is going to decide the size of this array.
3978 Until we know the size, represent its address with a reg. */
3979 x = gen_rtx_MEM (BLKmode, gen_reg_rtx (Pmode));
3981 set_mem_attributes (x, decl, 1);
3982 SET_DECL_RTL (decl, x);
3984 else if (DECL_MODE (decl) != BLKmode
3985 /* If -ffloat-store, don't put explicit float vars
3986 into regs. */
3987 && !(flag_float_store
3988 && TREE_CODE (type) == REAL_TYPE)
3989 && ! TREE_THIS_VOLATILE (decl)
3990 && (DECL_REGISTER (decl) || optimize)
3991 /* if -fcheck-memory-usage, check all variables. */
3992 && ! current_function_check_memory_usage)
3994 /* Automatic variable that can go in a register. */
3995 int unsignedp = TREE_UNSIGNED (type);
3996 enum machine_mode reg_mode
3997 = promote_mode (type, DECL_MODE (decl), &unsignedp, 0);
3999 SET_DECL_RTL (decl, gen_reg_rtx (reg_mode));
4001 if (GET_CODE (DECL_RTL (decl)) == REG)
4002 REGNO_DECL (REGNO (DECL_RTL (decl))) = decl;
4003 else if (GET_CODE (DECL_RTL (decl)) == CONCAT)
4005 REGNO_DECL (REGNO (XEXP (DECL_RTL (decl), 0))) = decl;
4006 REGNO_DECL (REGNO (XEXP (DECL_RTL (decl), 1))) = decl;
4009 mark_user_reg (DECL_RTL (decl));
4011 if (POINTER_TYPE_P (type))
4012 mark_reg_pointer (DECL_RTL (decl),
4013 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (decl))));
4015 maybe_set_unchanging (DECL_RTL (decl), decl);
4017 /* If something wants our address, try to use ADDRESSOF. */
4018 if (TREE_ADDRESSABLE (decl))
4019 put_var_into_stack (decl);
4022 else if (TREE_CODE (DECL_SIZE_UNIT (decl)) == INTEGER_CST
4023 && ! (flag_stack_check && ! STACK_CHECK_BUILTIN
4024 && 0 < compare_tree_int (DECL_SIZE_UNIT (decl),
4025 STACK_CHECK_MAX_VAR_SIZE)))
4027 /* Variable of fixed size that goes on the stack. */
4028 rtx oldaddr = 0;
4029 rtx addr;
4030 rtx x;
4032 /* If we previously made RTL for this decl, it must be an array
4033 whose size was determined by the initializer.
4034 The old address was a register; set that register now
4035 to the proper address. */
4036 if (DECL_RTL_SET_P (decl))
4038 if (GET_CODE (DECL_RTL (decl)) != MEM
4039 || GET_CODE (XEXP (DECL_RTL (decl), 0)) != REG)
4040 abort ();
4041 oldaddr = XEXP (DECL_RTL (decl), 0);
4044 /* Set alignment we actually gave this decl. */
4045 DECL_ALIGN (decl) = (DECL_MODE (decl) == BLKmode ? BIGGEST_ALIGNMENT
4046 : GET_MODE_BITSIZE (DECL_MODE (decl)));
4047 DECL_USER_ALIGN (decl) = 0;
4049 x = assign_temp (TREE_TYPE (decl), 1, 1, 1);
4050 set_mem_attributes (x, decl, 1);
4051 SET_DECL_RTL (decl, x);
4053 if (oldaddr)
4055 addr = force_operand (XEXP (DECL_RTL (decl), 0), oldaddr);
4056 if (addr != oldaddr)
4057 emit_move_insn (oldaddr, addr);
4060 else
4061 /* Dynamic-size object: must push space on the stack. */
4063 rtx address, size, x;
4065 /* Record the stack pointer on entry to block, if have
4066 not already done so. */
4067 do_pending_stack_adjust ();
4068 save_stack_pointer ();
4070 /* In function-at-a-time mode, variable_size doesn't expand this,
4071 so do it now. */
4072 if (TREE_CODE (type) == ARRAY_TYPE && TYPE_DOMAIN (type))
4073 expand_expr (TYPE_MAX_VALUE (TYPE_DOMAIN (type)),
4074 const0_rtx, VOIDmode, 0);
4076 /* Compute the variable's size, in bytes. */
4077 size = expand_expr (DECL_SIZE_UNIT (decl), NULL_RTX, VOIDmode, 0);
4078 free_temp_slots ();
4080 /* Allocate space on the stack for the variable. Note that
4081 DECL_ALIGN says how the variable is to be aligned and we
4082 cannot use it to conclude anything about the alignment of
4083 the size. */
4084 address = allocate_dynamic_stack_space (size, NULL_RTX,
4085 TYPE_ALIGN (TREE_TYPE (decl)));
4087 /* Reference the variable indirect through that rtx. */
4088 x = gen_rtx_MEM (DECL_MODE (decl), address);
4089 set_mem_attributes (x, decl, 1);
4090 SET_DECL_RTL (decl, x);
4093 /* Indicate the alignment we actually gave this variable. */
4094 #ifdef STACK_BOUNDARY
4095 DECL_ALIGN (decl) = STACK_BOUNDARY;
4096 #else
4097 DECL_ALIGN (decl) = BIGGEST_ALIGNMENT;
4098 #endif
4099 DECL_USER_ALIGN (decl) = 0;
4103 /* Emit code to perform the initialization of a declaration DECL. */
4105 void
4106 expand_decl_init (decl)
4107 tree decl;
4109 int was_used = TREE_USED (decl);
4111 /* If this is a CONST_DECL, we don't have to generate any code, but
4112 if DECL_INITIAL is a constant, call expand_expr to force TREE_CST_RTL
4113 to be set while in the obstack containing the constant. If we don't
4114 do this, we can lose if we have functions nested three deep and the middle
4115 function makes a CONST_DECL whose DECL_INITIAL is a STRING_CST while
4116 the innermost function is the first to expand that STRING_CST. */
4117 if (TREE_CODE (decl) == CONST_DECL)
4119 if (DECL_INITIAL (decl) && TREE_CONSTANT (DECL_INITIAL (decl)))
4120 expand_expr (DECL_INITIAL (decl), NULL_RTX, VOIDmode,
4121 EXPAND_INITIALIZER);
4122 return;
4125 if (TREE_STATIC (decl))
4126 return;
4128 /* Compute and store the initial value now. */
4130 if (DECL_INITIAL (decl) == error_mark_node)
4132 enum tree_code code = TREE_CODE (TREE_TYPE (decl));
4134 if (code == INTEGER_TYPE || code == REAL_TYPE || code == ENUMERAL_TYPE
4135 || code == POINTER_TYPE || code == REFERENCE_TYPE)
4136 expand_assignment (decl, convert (TREE_TYPE (decl), integer_zero_node),
4137 0, 0);
4138 emit_queue ();
4140 else if (DECL_INITIAL (decl) && TREE_CODE (DECL_INITIAL (decl)) != TREE_LIST)
4142 emit_line_note (DECL_SOURCE_FILE (decl), DECL_SOURCE_LINE (decl));
4143 expand_assignment (decl, DECL_INITIAL (decl), 0, 0);
4144 emit_queue ();
4147 /* Don't let the initialization count as "using" the variable. */
4148 TREE_USED (decl) = was_used;
4150 /* Free any temporaries we made while initializing the decl. */
4151 preserve_temp_slots (NULL_RTX);
4152 free_temp_slots ();
4155 /* CLEANUP is an expression to be executed at exit from this binding contour;
4156 for example, in C++, it might call the destructor for this variable.
4158 We wrap CLEANUP in an UNSAVE_EXPR node, so that we can expand the
4159 CLEANUP multiple times, and have the correct semantics. This
4160 happens in exception handling, for gotos, returns, breaks that
4161 leave the current scope.
4163 If CLEANUP is nonzero and DECL is zero, we record a cleanup
4164 that is not associated with any particular variable. */
4167 expand_decl_cleanup (decl, cleanup)
4168 tree decl, cleanup;
4170 struct nesting *thisblock;
4172 /* Error if we are not in any block. */
4173 if (cfun == 0 || block_stack == 0)
4174 return 0;
4176 thisblock = block_stack;
4178 /* Record the cleanup if there is one. */
4180 if (cleanup != 0)
4182 tree t;
4183 rtx seq;
4184 tree *cleanups = &thisblock->data.block.cleanups;
4185 int cond_context = conditional_context ();
4187 if (cond_context)
4189 rtx flag = gen_reg_rtx (word_mode);
4190 rtx set_flag_0;
4191 tree cond;
4193 start_sequence ();
4194 emit_move_insn (flag, const0_rtx);
4195 set_flag_0 = get_insns ();
4196 end_sequence ();
4198 thisblock->data.block.last_unconditional_cleanup
4199 = emit_insns_after (set_flag_0,
4200 thisblock->data.block.last_unconditional_cleanup);
4202 emit_move_insn (flag, const1_rtx);
4204 cond = build_decl (VAR_DECL, NULL_TREE, type_for_mode (word_mode, 1));
4205 SET_DECL_RTL (cond, flag);
4207 /* Conditionalize the cleanup. */
4208 cleanup = build (COND_EXPR, void_type_node,
4209 truthvalue_conversion (cond),
4210 cleanup, integer_zero_node);
4211 cleanup = fold (cleanup);
4213 cleanups = thisblock->data.block.cleanup_ptr;
4216 cleanup = unsave_expr (cleanup);
4218 t = *cleanups = tree_cons (decl, cleanup, *cleanups);
4220 if (! cond_context)
4221 /* If this block has a cleanup, it belongs in stack_block_stack. */
4222 stack_block_stack = thisblock;
4224 if (cond_context)
4226 start_sequence ();
4229 if (! using_eh_for_cleanups_p)
4230 TREE_ADDRESSABLE (t) = 1;
4231 else
4232 expand_eh_region_start ();
4234 if (cond_context)
4236 seq = get_insns ();
4237 end_sequence ();
4238 if (seq)
4239 thisblock->data.block.last_unconditional_cleanup
4240 = emit_insns_after (seq,
4241 thisblock->data.block.last_unconditional_cleanup);
4243 else
4245 thisblock->data.block.last_unconditional_cleanup
4246 = get_last_insn ();
4247 /* When we insert instructions after the last unconditional cleanup,
4248 we don't adjust last_insn. That means that a later add_insn will
4249 clobber the instructions we've just added. The easiest way to
4250 fix this is to just insert another instruction here, so that the
4251 instructions inserted after the last unconditional cleanup are
4252 never the last instruction. */
4253 emit_note (NULL, NOTE_INSN_DELETED);
4254 thisblock->data.block.cleanup_ptr = &thisblock->data.block.cleanups;
4257 return 1;
4260 /* DECL is an anonymous union. CLEANUP is a cleanup for DECL.
4261 DECL_ELTS is the list of elements that belong to DECL's type.
4262 In each, the TREE_VALUE is a VAR_DECL, and the TREE_PURPOSE a cleanup. */
4264 void
4265 expand_anon_union_decl (decl, cleanup, decl_elts)
4266 tree decl, cleanup, decl_elts;
4268 struct nesting *thisblock = cfun == 0 ? 0 : block_stack;
4269 rtx x;
4270 tree t;
4272 /* If any of the elements are addressable, so is the entire union. */
4273 for (t = decl_elts; t; t = TREE_CHAIN (t))
4274 if (TREE_ADDRESSABLE (TREE_VALUE (t)))
4276 TREE_ADDRESSABLE (decl) = 1;
4277 break;
4280 expand_decl (decl);
4281 expand_decl_cleanup (decl, cleanup);
4282 x = DECL_RTL (decl);
4284 /* Go through the elements, assigning RTL to each. */
4285 for (t = decl_elts; t; t = TREE_CHAIN (t))
4287 tree decl_elt = TREE_VALUE (t);
4288 tree cleanup_elt = TREE_PURPOSE (t);
4289 enum machine_mode mode = TYPE_MODE (TREE_TYPE (decl_elt));
4291 /* Propagate the union's alignment to the elements. */
4292 DECL_ALIGN (decl_elt) = DECL_ALIGN (decl);
4293 DECL_USER_ALIGN (decl_elt) = DECL_USER_ALIGN (decl);
4295 /* If the element has BLKmode and the union doesn't, the union is
4296 aligned such that the element doesn't need to have BLKmode, so
4297 change the element's mode to the appropriate one for its size. */
4298 if (mode == BLKmode && DECL_MODE (decl) != BLKmode)
4299 DECL_MODE (decl_elt) = mode
4300 = mode_for_size_tree (DECL_SIZE (decl_elt), MODE_INT, 1);
4302 /* (SUBREG (MEM ...)) at RTL generation time is invalid, so we
4303 instead create a new MEM rtx with the proper mode. */
4304 if (GET_CODE (x) == MEM)
4306 if (mode == GET_MODE (x))
4307 SET_DECL_RTL (decl_elt, x);
4308 else
4309 SET_DECL_RTL (decl_elt, adjust_address_nv (x, mode, 0));
4311 else if (GET_CODE (x) == REG)
4313 if (mode == GET_MODE (x))
4314 SET_DECL_RTL (decl_elt, x);
4315 else
4316 SET_DECL_RTL (decl_elt, gen_lowpart_SUBREG (mode, x));
4318 else
4319 abort ();
4321 /* Record the cleanup if there is one. */
4323 if (cleanup != 0)
4324 thisblock->data.block.cleanups
4325 = tree_cons (decl_elt, cleanup_elt,
4326 thisblock->data.block.cleanups);
4330 /* Expand a list of cleanups LIST.
4331 Elements may be expressions or may be nested lists.
4333 If DONT_DO is nonnull, then any list-element
4334 whose TREE_PURPOSE matches DONT_DO is omitted.
4335 This is sometimes used to avoid a cleanup associated with
4336 a value that is being returned out of the scope.
4338 If IN_FIXUP is non-zero, we are generating this cleanup for a fixup
4339 goto and handle protection regions specially in that case.
4341 If REACHABLE, we emit code, otherwise just inform the exception handling
4342 code about this finalization. */
4344 static void
4345 expand_cleanups (list, dont_do, in_fixup, reachable)
4346 tree list;
4347 tree dont_do;
4348 int in_fixup;
4349 int reachable;
4351 tree tail;
4352 for (tail = list; tail; tail = TREE_CHAIN (tail))
4353 if (dont_do == 0 || TREE_PURPOSE (tail) != dont_do)
4355 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
4356 expand_cleanups (TREE_VALUE (tail), dont_do, in_fixup, reachable);
4357 else
4359 if (! in_fixup && using_eh_for_cleanups_p)
4360 expand_eh_region_end_cleanup (TREE_VALUE (tail));
4362 if (reachable)
4364 /* Cleanups may be run multiple times. For example,
4365 when exiting a binding contour, we expand the
4366 cleanups associated with that contour. When a goto
4367 within that binding contour has a target outside that
4368 contour, it will expand all cleanups from its scope to
4369 the target. Though the cleanups are expanded multiple
4370 times, the control paths are non-overlapping so the
4371 cleanups will not be executed twice. */
4373 /* We may need to protect from outer cleanups. */
4374 if (in_fixup && using_eh_for_cleanups_p)
4376 expand_eh_region_start ();
4378 expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
4380 expand_eh_region_end_fixup (TREE_VALUE (tail));
4382 else
4383 expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
4385 free_temp_slots ();
4391 /* Mark when the context we are emitting RTL for as a conditional
4392 context, so that any cleanup actions we register with
4393 expand_decl_init will be properly conditionalized when those
4394 cleanup actions are later performed. Must be called before any
4395 expression (tree) is expanded that is within a conditional context. */
4397 void
4398 start_cleanup_deferral ()
4400 /* block_stack can be NULL if we are inside the parameter list. It is
4401 OK to do nothing, because cleanups aren't possible here. */
4402 if (block_stack)
4403 ++block_stack->data.block.conditional_code;
4406 /* Mark the end of a conditional region of code. Because cleanup
4407 deferrals may be nested, we may still be in a conditional region
4408 after we end the currently deferred cleanups, only after we end all
4409 deferred cleanups, are we back in unconditional code. */
4411 void
4412 end_cleanup_deferral ()
4414 /* block_stack can be NULL if we are inside the parameter list. It is
4415 OK to do nothing, because cleanups aren't possible here. */
4416 if (block_stack)
4417 --block_stack->data.block.conditional_code;
4420 /* Move all cleanups from the current block_stack
4421 to the containing block_stack, where they are assumed to
4422 have been created. If anything can cause a temporary to
4423 be created, but not expanded for more than one level of
4424 block_stacks, then this code will have to change. */
4426 void
4427 move_cleanups_up ()
4429 struct nesting *block = block_stack;
4430 struct nesting *outer = block->next;
4432 outer->data.block.cleanups
4433 = chainon (block->data.block.cleanups,
4434 outer->data.block.cleanups);
4435 block->data.block.cleanups = 0;
4438 tree
4439 last_cleanup_this_contour ()
4441 if (block_stack == 0)
4442 return 0;
4444 return block_stack->data.block.cleanups;
4447 /* Return 1 if there are any pending cleanups at this point.
4448 If THIS_CONTOUR is nonzero, check the current contour as well.
4449 Otherwise, look only at the contours that enclose this one. */
4452 any_pending_cleanups (this_contour)
4453 int this_contour;
4455 struct nesting *block;
4457 if (cfun == NULL || cfun->stmt == NULL || block_stack == 0)
4458 return 0;
4460 if (this_contour && block_stack->data.block.cleanups != NULL)
4461 return 1;
4462 if (block_stack->data.block.cleanups == 0
4463 && block_stack->data.block.outer_cleanups == 0)
4464 return 0;
4466 for (block = block_stack->next; block; block = block->next)
4467 if (block->data.block.cleanups != 0)
4468 return 1;
4470 return 0;
4473 /* Enter a case (Pascal) or switch (C) statement.
4474 Push a block onto case_stack and nesting_stack
4475 to accumulate the case-labels that are seen
4476 and to record the labels generated for the statement.
4478 EXIT_FLAG is nonzero if `exit_something' should exit this case stmt.
4479 Otherwise, this construct is transparent for `exit_something'.
4481 EXPR is the index-expression to be dispatched on.
4482 TYPE is its nominal type. We could simply convert EXPR to this type,
4483 but instead we take short cuts. */
4485 void
4486 expand_start_case (exit_flag, expr, type, printname)
4487 int exit_flag;
4488 tree expr;
4489 tree type;
4490 const char *printname;
4492 struct nesting *thiscase = ALLOC_NESTING ();
4494 /* Make an entry on case_stack for the case we are entering. */
4496 thiscase->next = case_stack;
4497 thiscase->all = nesting_stack;
4498 thiscase->depth = ++nesting_depth;
4499 thiscase->exit_label = exit_flag ? gen_label_rtx () : 0;
4500 thiscase->data.case_stmt.case_list = 0;
4501 thiscase->data.case_stmt.index_expr = expr;
4502 thiscase->data.case_stmt.nominal_type = type;
4503 thiscase->data.case_stmt.default_label = 0;
4504 thiscase->data.case_stmt.printname = printname;
4505 thiscase->data.case_stmt.line_number_status = force_line_numbers ();
4506 case_stack = thiscase;
4507 nesting_stack = thiscase;
4509 do_pending_stack_adjust ();
4511 /* Make sure case_stmt.start points to something that won't
4512 need any transformation before expand_end_case. */
4513 if (GET_CODE (get_last_insn ()) != NOTE)
4514 emit_note (NULL, NOTE_INSN_DELETED);
4516 thiscase->data.case_stmt.start = get_last_insn ();
4518 start_cleanup_deferral ();
4521 /* Start a "dummy case statement" within which case labels are invalid
4522 and are not connected to any larger real case statement.
4523 This can be used if you don't want to let a case statement jump
4524 into the middle of certain kinds of constructs. */
4526 void
4527 expand_start_case_dummy ()
4529 struct nesting *thiscase = ALLOC_NESTING ();
4531 /* Make an entry on case_stack for the dummy. */
4533 thiscase->next = case_stack;
4534 thiscase->all = nesting_stack;
4535 thiscase->depth = ++nesting_depth;
4536 thiscase->exit_label = 0;
4537 thiscase->data.case_stmt.case_list = 0;
4538 thiscase->data.case_stmt.start = 0;
4539 thiscase->data.case_stmt.nominal_type = 0;
4540 thiscase->data.case_stmt.default_label = 0;
4541 case_stack = thiscase;
4542 nesting_stack = thiscase;
4543 start_cleanup_deferral ();
4546 /* End a dummy case statement. */
4548 void
4549 expand_end_case_dummy ()
4551 end_cleanup_deferral ();
4552 POPSTACK (case_stack);
4555 /* Return the data type of the index-expression
4556 of the innermost case statement, or null if none. */
4558 tree
4559 case_index_expr_type ()
4561 if (case_stack)
4562 return TREE_TYPE (case_stack->data.case_stmt.index_expr);
4563 return 0;
4566 static void
4567 check_seenlabel ()
4569 /* If this is the first label, warn if any insns have been emitted. */
4570 if (case_stack->data.case_stmt.line_number_status >= 0)
4572 rtx insn;
4574 restore_line_number_status
4575 (case_stack->data.case_stmt.line_number_status);
4576 case_stack->data.case_stmt.line_number_status = -1;
4578 for (insn = case_stack->data.case_stmt.start;
4579 insn;
4580 insn = NEXT_INSN (insn))
4582 if (GET_CODE (insn) == CODE_LABEL)
4583 break;
4584 if (GET_CODE (insn) != NOTE
4585 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn)) != USE))
4588 insn = PREV_INSN (insn);
4589 while (insn && (GET_CODE (insn) != NOTE || NOTE_LINE_NUMBER (insn) < 0));
4591 /* If insn is zero, then there must have been a syntax error. */
4592 if (insn)
4593 warning_with_file_and_line (NOTE_SOURCE_FILE (insn),
4594 NOTE_LINE_NUMBER (insn),
4595 "unreachable code at beginning of %s",
4596 case_stack->data.case_stmt.printname);
4597 break;
4603 /* Accumulate one case or default label inside a case or switch statement.
4604 VALUE is the value of the case (a null pointer, for a default label).
4605 The function CONVERTER, when applied to arguments T and V,
4606 converts the value V to the type T.
4608 If not currently inside a case or switch statement, return 1 and do
4609 nothing. The caller will print a language-specific error message.
4610 If VALUE is a duplicate or overlaps, return 2 and do nothing
4611 except store the (first) duplicate node in *DUPLICATE.
4612 If VALUE is out of range, return 3 and do nothing.
4613 If we are jumping into the scope of a cleanup or var-sized array, return 5.
4614 Return 0 on success.
4616 Extended to handle range statements. */
4619 pushcase (value, converter, label, duplicate)
4620 tree value;
4621 tree (*converter) PARAMS ((tree, tree));
4622 tree label;
4623 tree *duplicate;
4625 tree index_type;
4626 tree nominal_type;
4628 /* Fail if not inside a real case statement. */
4629 if (! (case_stack && case_stack->data.case_stmt.start))
4630 return 1;
4632 if (stack_block_stack
4633 && stack_block_stack->depth > case_stack->depth)
4634 return 5;
4636 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4637 nominal_type = case_stack->data.case_stmt.nominal_type;
4639 /* If the index is erroneous, avoid more problems: pretend to succeed. */
4640 if (index_type == error_mark_node)
4641 return 0;
4643 /* Convert VALUE to the type in which the comparisons are nominally done. */
4644 if (value != 0)
4645 value = (*converter) (nominal_type, value);
4647 check_seenlabel ();
4649 /* Fail if this value is out of range for the actual type of the index
4650 (which may be narrower than NOMINAL_TYPE). */
4651 if (value != 0
4652 && (TREE_CONSTANT_OVERFLOW (value)
4653 || ! int_fits_type_p (value, index_type)))
4654 return 3;
4656 return add_case_node (value, value, label, duplicate);
4659 /* Like pushcase but this case applies to all values between VALUE1 and
4660 VALUE2 (inclusive). If VALUE1 is NULL, the range starts at the lowest
4661 value of the index type and ends at VALUE2. If VALUE2 is NULL, the range
4662 starts at VALUE1 and ends at the highest value of the index type.
4663 If both are NULL, this case applies to all values.
4665 The return value is the same as that of pushcase but there is one
4666 additional error code: 4 means the specified range was empty. */
4669 pushcase_range (value1, value2, converter, label, duplicate)
4670 tree value1, value2;
4671 tree (*converter) PARAMS ((tree, tree));
4672 tree label;
4673 tree *duplicate;
4675 tree index_type;
4676 tree nominal_type;
4678 /* Fail if not inside a real case statement. */
4679 if (! (case_stack && case_stack->data.case_stmt.start))
4680 return 1;
4682 if (stack_block_stack
4683 && stack_block_stack->depth > case_stack->depth)
4684 return 5;
4686 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4687 nominal_type = case_stack->data.case_stmt.nominal_type;
4689 /* If the index is erroneous, avoid more problems: pretend to succeed. */
4690 if (index_type == error_mark_node)
4691 return 0;
4693 check_seenlabel ();
4695 /* Convert VALUEs to type in which the comparisons are nominally done
4696 and replace any unspecified value with the corresponding bound. */
4697 if (value1 == 0)
4698 value1 = TYPE_MIN_VALUE (index_type);
4699 if (value2 == 0)
4700 value2 = TYPE_MAX_VALUE (index_type);
4702 /* Fail if the range is empty. Do this before any conversion since
4703 we want to allow out-of-range empty ranges. */
4704 if (value2 != 0 && tree_int_cst_lt (value2, value1))
4705 return 4;
4707 /* If the max was unbounded, use the max of the nominal_type we are
4708 converting to. Do this after the < check above to suppress false
4709 positives. */
4710 if (value2 == 0)
4711 value2 = TYPE_MAX_VALUE (nominal_type);
4713 value1 = (*converter) (nominal_type, value1);
4714 value2 = (*converter) (nominal_type, value2);
4716 /* Fail if these values are out of range. */
4717 if (TREE_CONSTANT_OVERFLOW (value1)
4718 || ! int_fits_type_p (value1, index_type))
4719 return 3;
4721 if (TREE_CONSTANT_OVERFLOW (value2)
4722 || ! int_fits_type_p (value2, index_type))
4723 return 3;
4725 return add_case_node (value1, value2, label, duplicate);
4728 /* Do the actual insertion of a case label for pushcase and pushcase_range
4729 into case_stack->data.case_stmt.case_list. Use an AVL tree to avoid
4730 slowdown for large switch statements. */
4733 add_case_node (low, high, label, duplicate)
4734 tree low, high;
4735 tree label;
4736 tree *duplicate;
4738 struct case_node *p, **q, *r;
4740 /* If there's no HIGH value, then this is not a case range; it's
4741 just a simple case label. But that's just a degenerate case
4742 range. */
4743 if (!high)
4744 high = low;
4746 /* Handle default labels specially. */
4747 if (!high && !low)
4749 if (case_stack->data.case_stmt.default_label != 0)
4751 *duplicate = case_stack->data.case_stmt.default_label;
4752 return 2;
4754 case_stack->data.case_stmt.default_label = label;
4755 expand_label (label);
4756 return 0;
4759 q = &case_stack->data.case_stmt.case_list;
4760 p = *q;
4762 while ((r = *q))
4764 p = r;
4766 /* Keep going past elements distinctly greater than HIGH. */
4767 if (tree_int_cst_lt (high, p->low))
4768 q = &p->left;
4770 /* or distinctly less than LOW. */
4771 else if (tree_int_cst_lt (p->high, low))
4772 q = &p->right;
4774 else
4776 /* We have an overlap; this is an error. */
4777 *duplicate = p->code_label;
4778 return 2;
4782 /* Add this label to the chain, and succeed. */
4784 r = (struct case_node *) xmalloc (sizeof (struct case_node));
4785 r->low = low;
4787 /* If the bounds are equal, turn this into the one-value case. */
4788 if (tree_int_cst_equal (low, high))
4789 r->high = r->low;
4790 else
4791 r->high = high;
4793 r->code_label = label;
4794 expand_label (label);
4796 *q = r;
4797 r->parent = p;
4798 r->left = 0;
4799 r->right = 0;
4800 r->balance = 0;
4802 while (p)
4804 struct case_node *s;
4806 if (r == p->left)
4808 int b;
4810 if (! (b = p->balance))
4811 /* Growth propagation from left side. */
4812 p->balance = -1;
4813 else if (b < 0)
4815 if (r->balance < 0)
4817 /* R-Rotation */
4818 if ((p->left = s = r->right))
4819 s->parent = p;
4821 r->right = p;
4822 p->balance = 0;
4823 r->balance = 0;
4824 s = p->parent;
4825 p->parent = r;
4827 if ((r->parent = s))
4829 if (s->left == p)
4830 s->left = r;
4831 else
4832 s->right = r;
4834 else
4835 case_stack->data.case_stmt.case_list = r;
4837 else
4838 /* r->balance == +1 */
4840 /* LR-Rotation */
4842 int b2;
4843 struct case_node *t = r->right;
4845 if ((p->left = s = t->right))
4846 s->parent = p;
4848 t->right = p;
4849 if ((r->right = s = t->left))
4850 s->parent = r;
4852 t->left = r;
4853 b = t->balance;
4854 b2 = b < 0;
4855 p->balance = b2;
4856 b2 = -b2 - b;
4857 r->balance = b2;
4858 t->balance = 0;
4859 s = p->parent;
4860 p->parent = t;
4861 r->parent = t;
4863 if ((t->parent = s))
4865 if (s->left == p)
4866 s->left = t;
4867 else
4868 s->right = t;
4870 else
4871 case_stack->data.case_stmt.case_list = t;
4873 break;
4876 else
4878 /* p->balance == +1; growth of left side balances the node. */
4879 p->balance = 0;
4880 break;
4883 else
4884 /* r == p->right */
4886 int b;
4888 if (! (b = p->balance))
4889 /* Growth propagation from right side. */
4890 p->balance++;
4891 else if (b > 0)
4893 if (r->balance > 0)
4895 /* L-Rotation */
4897 if ((p->right = s = r->left))
4898 s->parent = p;
4900 r->left = p;
4901 p->balance = 0;
4902 r->balance = 0;
4903 s = p->parent;
4904 p->parent = r;
4905 if ((r->parent = s))
4907 if (s->left == p)
4908 s->left = r;
4909 else
4910 s->right = r;
4913 else
4914 case_stack->data.case_stmt.case_list = r;
4917 else
4918 /* r->balance == -1 */
4920 /* RL-Rotation */
4921 int b2;
4922 struct case_node *t = r->left;
4924 if ((p->right = s = t->left))
4925 s->parent = p;
4927 t->left = p;
4929 if ((r->left = s = t->right))
4930 s->parent = r;
4932 t->right = r;
4933 b = t->balance;
4934 b2 = b < 0;
4935 r->balance = b2;
4936 b2 = -b2 - b;
4937 p->balance = b2;
4938 t->balance = 0;
4939 s = p->parent;
4940 p->parent = t;
4941 r->parent = t;
4943 if ((t->parent = s))
4945 if (s->left == p)
4946 s->left = t;
4947 else
4948 s->right = t;
4951 else
4952 case_stack->data.case_stmt.case_list = t;
4954 break;
4956 else
4958 /* p->balance == -1; growth of right side balances the node. */
4959 p->balance = 0;
4960 break;
4964 r = p;
4965 p = p->parent;
4968 return 0;
4971 /* Returns the number of possible values of TYPE.
4972 Returns -1 if the number is unknown, variable, or if the number does not
4973 fit in a HOST_WIDE_INT.
4974 Sets *SPARENESS to 2 if TYPE is an ENUMERAL_TYPE whose values
4975 do not increase monotonically (there may be duplicates);
4976 to 1 if the values increase monotonically, but not always by 1;
4977 otherwise sets it to 0. */
4979 HOST_WIDE_INT
4980 all_cases_count (type, spareness)
4981 tree type;
4982 int *spareness;
4984 tree t;
4985 HOST_WIDE_INT count, minval, lastval;
4987 *spareness = 0;
4989 switch (TREE_CODE (type))
4991 case BOOLEAN_TYPE:
4992 count = 2;
4993 break;
4995 case CHAR_TYPE:
4996 count = 1 << BITS_PER_UNIT;
4997 break;
4999 default:
5000 case INTEGER_TYPE:
5001 if (TYPE_MAX_VALUE (type) != 0
5002 && 0 != (t = fold (build (MINUS_EXPR, type, TYPE_MAX_VALUE (type),
5003 TYPE_MIN_VALUE (type))))
5004 && 0 != (t = fold (build (PLUS_EXPR, type, t,
5005 convert (type, integer_zero_node))))
5006 && host_integerp (t, 1))
5007 count = tree_low_cst (t, 1);
5008 else
5009 return -1;
5010 break;
5012 case ENUMERAL_TYPE:
5013 /* Don't waste time with enumeral types with huge values. */
5014 if (! host_integerp (TYPE_MIN_VALUE (type), 0)
5015 || TYPE_MAX_VALUE (type) == 0
5016 || ! host_integerp (TYPE_MAX_VALUE (type), 0))
5017 return -1;
5019 lastval = minval = tree_low_cst (TYPE_MIN_VALUE (type), 0);
5020 count = 0;
5022 for (t = TYPE_VALUES (type); t != NULL_TREE; t = TREE_CHAIN (t))
5024 HOST_WIDE_INT thisval = tree_low_cst (TREE_VALUE (t), 0);
5026 if (*spareness == 2 || thisval < lastval)
5027 *spareness = 2;
5028 else if (thisval != minval + count)
5029 *spareness = 1;
5031 count++;
5035 return count;
5038 #define BITARRAY_TEST(ARRAY, INDEX) \
5039 ((ARRAY)[(unsigned) (INDEX) / HOST_BITS_PER_CHAR]\
5040 & (1 << ((unsigned) (INDEX) % HOST_BITS_PER_CHAR)))
5041 #define BITARRAY_SET(ARRAY, INDEX) \
5042 ((ARRAY)[(unsigned) (INDEX) / HOST_BITS_PER_CHAR]\
5043 |= 1 << ((unsigned) (INDEX) % HOST_BITS_PER_CHAR))
5045 /* Set the elements of the bitstring CASES_SEEN (which has length COUNT),
5046 with the case values we have seen, assuming the case expression
5047 has the given TYPE.
5048 SPARSENESS is as determined by all_cases_count.
5050 The time needed is proportional to COUNT, unless
5051 SPARSENESS is 2, in which case quadratic time is needed. */
5053 void
5054 mark_seen_cases (type, cases_seen, count, sparseness)
5055 tree type;
5056 unsigned char *cases_seen;
5057 HOST_WIDE_INT count;
5058 int sparseness;
5060 tree next_node_to_try = NULL_TREE;
5061 HOST_WIDE_INT next_node_offset = 0;
5063 struct case_node *n, *root = case_stack->data.case_stmt.case_list;
5064 tree val = make_node (INTEGER_CST);
5066 TREE_TYPE (val) = type;
5067 if (! root)
5068 /* Do nothing. */
5070 else if (sparseness == 2)
5072 tree t;
5073 unsigned HOST_WIDE_INT xlo;
5075 /* This less efficient loop is only needed to handle
5076 duplicate case values (multiple enum constants
5077 with the same value). */
5078 TREE_TYPE (val) = TREE_TYPE (root->low);
5079 for (t = TYPE_VALUES (type), xlo = 0; t != NULL_TREE;
5080 t = TREE_CHAIN (t), xlo++)
5082 TREE_INT_CST_LOW (val) = TREE_INT_CST_LOW (TREE_VALUE (t));
5083 TREE_INT_CST_HIGH (val) = TREE_INT_CST_HIGH (TREE_VALUE (t));
5084 n = root;
5087 /* Keep going past elements distinctly greater than VAL. */
5088 if (tree_int_cst_lt (val, n->low))
5089 n = n->left;
5091 /* or distinctly less than VAL. */
5092 else if (tree_int_cst_lt (n->high, val))
5093 n = n->right;
5095 else
5097 /* We have found a matching range. */
5098 BITARRAY_SET (cases_seen, xlo);
5099 break;
5102 while (n);
5105 else
5107 if (root->left)
5108 case_stack->data.case_stmt.case_list = root = case_tree2list (root, 0);
5110 for (n = root; n; n = n->right)
5112 TREE_INT_CST_LOW (val) = TREE_INT_CST_LOW (n->low);
5113 TREE_INT_CST_HIGH (val) = TREE_INT_CST_HIGH (n->low);
5114 while (! tree_int_cst_lt (n->high, val))
5116 /* Calculate (into xlo) the "offset" of the integer (val).
5117 The element with lowest value has offset 0, the next smallest
5118 element has offset 1, etc. */
5120 unsigned HOST_WIDE_INT xlo;
5121 HOST_WIDE_INT xhi;
5122 tree t;
5124 if (sparseness && TYPE_VALUES (type) != NULL_TREE)
5126 /* The TYPE_VALUES will be in increasing order, so
5127 starting searching where we last ended. */
5128 t = next_node_to_try;
5129 xlo = next_node_offset;
5130 xhi = 0;
5131 for (;;)
5133 if (t == NULL_TREE)
5135 t = TYPE_VALUES (type);
5136 xlo = 0;
5138 if (tree_int_cst_equal (val, TREE_VALUE (t)))
5140 next_node_to_try = TREE_CHAIN (t);
5141 next_node_offset = xlo + 1;
5142 break;
5144 xlo++;
5145 t = TREE_CHAIN (t);
5146 if (t == next_node_to_try)
5148 xlo = -1;
5149 break;
5153 else
5155 t = TYPE_MIN_VALUE (type);
5156 if (t)
5157 neg_double (TREE_INT_CST_LOW (t), TREE_INT_CST_HIGH (t),
5158 &xlo, &xhi);
5159 else
5160 xlo = xhi = 0;
5161 add_double (xlo, xhi,
5162 TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
5163 &xlo, &xhi);
5166 if (xhi == 0 && xlo < (unsigned HOST_WIDE_INT) count)
5167 BITARRAY_SET (cases_seen, xlo);
5169 add_double (TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
5170 1, 0,
5171 &TREE_INT_CST_LOW (val), &TREE_INT_CST_HIGH (val));
5177 /* Called when the index of a switch statement is an enumerated type
5178 and there is no default label.
5180 Checks that all enumeration literals are covered by the case
5181 expressions of a switch. Also, warn if there are any extra
5182 switch cases that are *not* elements of the enumerated type.
5184 If all enumeration literals were covered by the case expressions,
5185 turn one of the expressions into the default expression since it should
5186 not be possible to fall through such a switch. */
5188 void
5189 check_for_full_enumeration_handling (type)
5190 tree type;
5192 struct case_node *n;
5193 tree chain;
5195 /* True iff the selector type is a numbered set mode. */
5196 int sparseness = 0;
5198 /* The number of possible selector values. */
5199 HOST_WIDE_INT size;
5201 /* For each possible selector value. a one iff it has been matched
5202 by a case value alternative. */
5203 unsigned char *cases_seen;
5205 /* The allocated size of cases_seen, in chars. */
5206 HOST_WIDE_INT bytes_needed;
5208 if (! warn_switch)
5209 return;
5211 size = all_cases_count (type, &sparseness);
5212 bytes_needed = (size + HOST_BITS_PER_CHAR) / HOST_BITS_PER_CHAR;
5214 if (size > 0 && size < 600000
5215 /* We deliberately use calloc here, not cmalloc, so that we can suppress
5216 this optimization if we don't have enough memory rather than
5217 aborting, as xmalloc would do. */
5218 && (cases_seen =
5219 (unsigned char *) really_call_calloc (bytes_needed, 1)) != NULL)
5221 HOST_WIDE_INT i;
5222 tree v = TYPE_VALUES (type);
5224 /* The time complexity of this code is normally O(N), where
5225 N being the number of members in the enumerated type.
5226 However, if type is a ENUMERAL_TYPE whose values do not
5227 increase monotonically, O(N*log(N)) time may be needed. */
5229 mark_seen_cases (type, cases_seen, size, sparseness);
5231 for (i = 0; v != NULL_TREE && i < size; i++, v = TREE_CHAIN (v))
5232 if (BITARRAY_TEST (cases_seen, i) == 0)
5233 warning ("enumeration value `%s' not handled in switch",
5234 IDENTIFIER_POINTER (TREE_PURPOSE (v)));
5236 free (cases_seen);
5239 /* Now we go the other way around; we warn if there are case
5240 expressions that don't correspond to enumerators. This can
5241 occur since C and C++ don't enforce type-checking of
5242 assignments to enumeration variables. */
5244 if (case_stack->data.case_stmt.case_list
5245 && case_stack->data.case_stmt.case_list->left)
5246 case_stack->data.case_stmt.case_list
5247 = case_tree2list (case_stack->data.case_stmt.case_list, 0);
5248 if (warn_switch)
5249 for (n = case_stack->data.case_stmt.case_list; n; n = n->right)
5251 for (chain = TYPE_VALUES (type);
5252 chain && !tree_int_cst_equal (n->low, TREE_VALUE (chain));
5253 chain = TREE_CHAIN (chain))
5256 if (!chain)
5258 if (TYPE_NAME (type) == 0)
5259 warning ("case value `%ld' not in enumerated type",
5260 (long) TREE_INT_CST_LOW (n->low));
5261 else
5262 warning ("case value `%ld' not in enumerated type `%s'",
5263 (long) TREE_INT_CST_LOW (n->low),
5264 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
5265 == IDENTIFIER_NODE)
5266 ? TYPE_NAME (type)
5267 : DECL_NAME (TYPE_NAME (type))));
5269 if (!tree_int_cst_equal (n->low, n->high))
5271 for (chain = TYPE_VALUES (type);
5272 chain && !tree_int_cst_equal (n->high, TREE_VALUE (chain));
5273 chain = TREE_CHAIN (chain))
5276 if (!chain)
5278 if (TYPE_NAME (type) == 0)
5279 warning ("case value `%ld' not in enumerated type",
5280 (long) TREE_INT_CST_LOW (n->high));
5281 else
5282 warning ("case value `%ld' not in enumerated type `%s'",
5283 (long) TREE_INT_CST_LOW (n->high),
5284 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
5285 == IDENTIFIER_NODE)
5286 ? TYPE_NAME (type)
5287 : DECL_NAME (TYPE_NAME (type))));
5293 /* Free CN, and its children. */
5295 static void
5296 free_case_nodes (cn)
5297 case_node_ptr cn;
5299 if (cn)
5301 free_case_nodes (cn->left);
5302 free_case_nodes (cn->right);
5303 free (cn);
5309 /* Terminate a case (Pascal) or switch (C) statement
5310 in which ORIG_INDEX is the expression to be tested.
5311 Generate the code to test it and jump to the right place. */
5313 void
5314 expand_end_case (orig_index)
5315 tree orig_index;
5317 tree minval = NULL_TREE, maxval = NULL_TREE, range = NULL_TREE;
5318 rtx default_label = 0;
5319 struct case_node *n;
5320 unsigned int count;
5321 rtx index;
5322 rtx table_label;
5323 int ncases;
5324 rtx *labelvec;
5325 int i;
5326 rtx before_case, end;
5327 struct nesting *thiscase = case_stack;
5328 tree index_expr, index_type;
5329 int unsignedp;
5331 /* Don't crash due to previous errors. */
5332 if (thiscase == NULL)
5333 return;
5335 table_label = gen_label_rtx ();
5336 index_expr = thiscase->data.case_stmt.index_expr;
5337 index_type = TREE_TYPE (index_expr);
5338 unsignedp = TREE_UNSIGNED (index_type);
5340 do_pending_stack_adjust ();
5342 /* This might get an spurious warning in the presence of a syntax error;
5343 it could be fixed by moving the call to check_seenlabel after the
5344 check for error_mark_node, and copying the code of check_seenlabel that
5345 deals with case_stack->data.case_stmt.line_number_status /
5346 restore_line_number_status in front of the call to end_cleanup_deferral;
5347 However, this might miss some useful warnings in the presence of
5348 non-syntax errors. */
5349 check_seenlabel ();
5351 /* An ERROR_MARK occurs for various reasons including invalid data type. */
5352 if (index_type != error_mark_node)
5354 /* If switch expression was an enumerated type, check that all
5355 enumeration literals are covered by the cases.
5356 No sense trying this if there's a default case, however. */
5358 if (!thiscase->data.case_stmt.default_label
5359 && TREE_CODE (TREE_TYPE (orig_index)) == ENUMERAL_TYPE
5360 && TREE_CODE (index_expr) != INTEGER_CST)
5361 check_for_full_enumeration_handling (TREE_TYPE (orig_index));
5363 /* If we don't have a default-label, create one here,
5364 after the body of the switch. */
5365 if (thiscase->data.case_stmt.default_label == 0)
5367 thiscase->data.case_stmt.default_label
5368 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
5369 expand_label (thiscase->data.case_stmt.default_label);
5371 default_label = label_rtx (thiscase->data.case_stmt.default_label);
5373 before_case = get_last_insn ();
5375 if (thiscase->data.case_stmt.case_list
5376 && thiscase->data.case_stmt.case_list->left)
5377 thiscase->data.case_stmt.case_list
5378 = case_tree2list (thiscase->data.case_stmt.case_list, 0);
5380 /* Simplify the case-list before we count it. */
5381 group_case_nodes (thiscase->data.case_stmt.case_list);
5383 /* Get upper and lower bounds of case values.
5384 Also convert all the case values to the index expr's data type. */
5386 count = 0;
5387 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5389 /* Check low and high label values are integers. */
5390 if (TREE_CODE (n->low) != INTEGER_CST)
5391 abort ();
5392 if (TREE_CODE (n->high) != INTEGER_CST)
5393 abort ();
5395 n->low = convert (index_type, n->low);
5396 n->high = convert (index_type, n->high);
5398 /* Count the elements and track the largest and smallest
5399 of them (treating them as signed even if they are not). */
5400 if (count++ == 0)
5402 minval = n->low;
5403 maxval = n->high;
5405 else
5407 if (INT_CST_LT (n->low, minval))
5408 minval = n->low;
5409 if (INT_CST_LT (maxval, n->high))
5410 maxval = n->high;
5412 /* A range counts double, since it requires two compares. */
5413 if (! tree_int_cst_equal (n->low, n->high))
5414 count++;
5417 /* Compute span of values. */
5418 if (count != 0)
5419 range = fold (build (MINUS_EXPR, index_type, maxval, minval));
5421 end_cleanup_deferral ();
5423 if (count == 0)
5425 expand_expr (index_expr, const0_rtx, VOIDmode, 0);
5426 emit_queue ();
5427 emit_jump (default_label);
5430 /* If range of values is much bigger than number of values,
5431 make a sequence of conditional branches instead of a dispatch.
5432 If the switch-index is a constant, do it this way
5433 because we can optimize it. */
5435 else if (count < case_values_threshold ()
5436 || compare_tree_int (range, 10 * count) > 0
5437 /* RANGE may be signed, and really large ranges will show up
5438 as negative numbers. */
5439 || compare_tree_int (range, 0) < 0
5440 #ifndef ASM_OUTPUT_ADDR_DIFF_ELT
5441 || flag_pic
5442 #endif
5443 || TREE_CODE (index_expr) == INTEGER_CST
5444 || (TREE_CODE (index_expr) == COMPOUND_EXPR
5445 && TREE_CODE (TREE_OPERAND (index_expr, 1)) == INTEGER_CST))
5447 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5449 /* If the index is a short or char that we do not have
5450 an insn to handle comparisons directly, convert it to
5451 a full integer now, rather than letting each comparison
5452 generate the conversion. */
5454 if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT
5455 && ! have_insn_for (COMPARE, GET_MODE (index)))
5457 enum machine_mode wider_mode;
5458 for (wider_mode = GET_MODE (index); wider_mode != VOIDmode;
5459 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
5460 if (have_insn_for (COMPARE, wider_mode))
5462 index = convert_to_mode (wider_mode, index, unsignedp);
5463 break;
5467 emit_queue ();
5468 do_pending_stack_adjust ();
5470 index = protect_from_queue (index, 0);
5471 if (GET_CODE (index) == MEM)
5472 index = copy_to_reg (index);
5473 if (GET_CODE (index) == CONST_INT
5474 || TREE_CODE (index_expr) == INTEGER_CST)
5476 /* Make a tree node with the proper constant value
5477 if we don't already have one. */
5478 if (TREE_CODE (index_expr) != INTEGER_CST)
5480 index_expr
5481 = build_int_2 (INTVAL (index),
5482 unsignedp || INTVAL (index) >= 0 ? 0 : -1);
5483 index_expr = convert (index_type, index_expr);
5486 /* For constant index expressions we need only
5487 issue an unconditional branch to the appropriate
5488 target code. The job of removing any unreachable
5489 code is left to the optimisation phase if the
5490 "-O" option is specified. */
5491 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5492 if (! tree_int_cst_lt (index_expr, n->low)
5493 && ! tree_int_cst_lt (n->high, index_expr))
5494 break;
5496 if (n)
5497 emit_jump (label_rtx (n->code_label));
5498 else
5499 emit_jump (default_label);
5501 else
5503 /* If the index expression is not constant we generate
5504 a binary decision tree to select the appropriate
5505 target code. This is done as follows:
5507 The list of cases is rearranged into a binary tree,
5508 nearly optimal assuming equal probability for each case.
5510 The tree is transformed into RTL, eliminating
5511 redundant test conditions at the same time.
5513 If program flow could reach the end of the
5514 decision tree an unconditional jump to the
5515 default code is emitted. */
5517 use_cost_table
5518 = (TREE_CODE (TREE_TYPE (orig_index)) != ENUMERAL_TYPE
5519 && estimate_case_costs (thiscase->data.case_stmt.case_list));
5520 balance_case_nodes (&thiscase->data.case_stmt.case_list, NULL);
5521 emit_case_nodes (index, thiscase->data.case_stmt.case_list,
5522 default_label, index_type);
5523 emit_jump_if_reachable (default_label);
5526 else
5528 if (! try_casesi (index_type, index_expr, minval, range,
5529 table_label, default_label))
5531 index_type = thiscase->data.case_stmt.nominal_type;
5533 /* Index jumptables from zero for suitable values of
5534 minval to avoid a subtraction. */
5535 if (! optimize_size
5536 && compare_tree_int (minval, 0) > 0
5537 && compare_tree_int (minval, 3) < 0)
5539 minval = integer_zero_node;
5540 range = maxval;
5543 if (! try_tablejump (index_type, index_expr, minval, range,
5544 table_label, default_label))
5545 abort ();
5548 /* Get table of labels to jump to, in order of case index. */
5550 ncases = tree_low_cst (range, 0) + 1;
5551 labelvec = (rtx *) alloca (ncases * sizeof (rtx));
5552 memset ((char *) labelvec, 0, ncases * sizeof (rtx));
5554 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5556 HOST_WIDE_INT i
5557 = tree_low_cst (n->low, 0) - tree_low_cst (minval, 0);
5559 while (1)
5561 labelvec[i]
5562 = gen_rtx_LABEL_REF (Pmode, label_rtx (n->code_label));
5563 if (i + tree_low_cst (minval, 0)
5564 == tree_low_cst (n->high, 0))
5565 break;
5566 i++;
5570 /* Fill in the gaps with the default. */
5571 for (i = 0; i < ncases; i++)
5572 if (labelvec[i] == 0)
5573 labelvec[i] = gen_rtx_LABEL_REF (Pmode, default_label);
5575 /* Output the table */
5576 emit_label (table_label);
5578 if (CASE_VECTOR_PC_RELATIVE || flag_pic)
5579 emit_jump_insn (gen_rtx_ADDR_DIFF_VEC (CASE_VECTOR_MODE,
5580 gen_rtx_LABEL_REF (Pmode, table_label),
5581 gen_rtvec_v (ncases, labelvec),
5582 const0_rtx, const0_rtx));
5583 else
5584 emit_jump_insn (gen_rtx_ADDR_VEC (CASE_VECTOR_MODE,
5585 gen_rtvec_v (ncases, labelvec)));
5587 /* If the case insn drops through the table,
5588 after the table we must jump to the default-label.
5589 Otherwise record no drop-through after the table. */
5590 #ifdef CASE_DROPS_THROUGH
5591 emit_jump (default_label);
5592 #else
5593 emit_barrier ();
5594 #endif
5597 before_case = NEXT_INSN (before_case);
5598 end = get_last_insn ();
5599 if (squeeze_notes (&before_case, &end))
5600 abort ();
5601 reorder_insns (before_case, end,
5602 thiscase->data.case_stmt.start);
5604 else
5605 end_cleanup_deferral ();
5607 if (thiscase->exit_label)
5608 emit_label (thiscase->exit_label);
5610 free_case_nodes (case_stack->data.case_stmt.case_list);
5611 POPSTACK (case_stack);
5613 free_temp_slots ();
5616 /* Convert the tree NODE into a list linked by the right field, with the left
5617 field zeroed. RIGHT is used for recursion; it is a list to be placed
5618 rightmost in the resulting list. */
5620 static struct case_node *
5621 case_tree2list (node, right)
5622 struct case_node *node, *right;
5624 struct case_node *left;
5626 if (node->right)
5627 right = case_tree2list (node->right, right);
5629 node->right = right;
5630 if ((left = node->left))
5632 node->left = 0;
5633 return case_tree2list (left, node);
5636 return node;
5639 /* Generate code to jump to LABEL if OP1 and OP2 are equal. */
5641 static void
5642 do_jump_if_equal (op1, op2, label, unsignedp)
5643 rtx op1, op2, label;
5644 int unsignedp;
5646 if (GET_CODE (op1) == CONST_INT && GET_CODE (op2) == CONST_INT)
5648 if (INTVAL (op1) == INTVAL (op2))
5649 emit_jump (label);
5651 else
5652 emit_cmp_and_jump_insns (op1, op2, EQ, NULL_RTX,
5653 (GET_MODE (op1) == VOIDmode
5654 ? GET_MODE (op2) : GET_MODE (op1)),
5655 unsignedp, label);
5658 /* Not all case values are encountered equally. This function
5659 uses a heuristic to weight case labels, in cases where that
5660 looks like a reasonable thing to do.
5662 Right now, all we try to guess is text, and we establish the
5663 following weights:
5665 chars above space: 16
5666 digits: 16
5667 default: 12
5668 space, punct: 8
5669 tab: 4
5670 newline: 2
5671 other "\" chars: 1
5672 remaining chars: 0
5674 If we find any cases in the switch that are not either -1 or in the range
5675 of valid ASCII characters, or are control characters other than those
5676 commonly used with "\", don't treat this switch scanning text.
5678 Return 1 if these nodes are suitable for cost estimation, otherwise
5679 return 0. */
5681 static int
5682 estimate_case_costs (node)
5683 case_node_ptr node;
5685 tree min_ascii = integer_minus_one_node;
5686 tree max_ascii = convert (TREE_TYPE (node->high), build_int_2 (127, 0));
5687 case_node_ptr n;
5688 int i;
5690 /* If we haven't already made the cost table, make it now. Note that the
5691 lower bound of the table is -1, not zero. */
5693 if (! cost_table_initialized)
5695 cost_table_initialized = 1;
5697 for (i = 0; i < 128; i++)
5699 if (ISALNUM (i))
5700 COST_TABLE (i) = 16;
5701 else if (ISPUNCT (i))
5702 COST_TABLE (i) = 8;
5703 else if (ISCNTRL (i))
5704 COST_TABLE (i) = -1;
5707 COST_TABLE (' ') = 8;
5708 COST_TABLE ('\t') = 4;
5709 COST_TABLE ('\0') = 4;
5710 COST_TABLE ('\n') = 2;
5711 COST_TABLE ('\f') = 1;
5712 COST_TABLE ('\v') = 1;
5713 COST_TABLE ('\b') = 1;
5716 /* See if all the case expressions look like text. It is text if the
5717 constant is >= -1 and the highest constant is <= 127. Do all comparisons
5718 as signed arithmetic since we don't want to ever access cost_table with a
5719 value less than -1. Also check that none of the constants in a range
5720 are strange control characters. */
5722 for (n = node; n; n = n->right)
5724 if ((INT_CST_LT (n->low, min_ascii)) || INT_CST_LT (max_ascii, n->high))
5725 return 0;
5727 for (i = (HOST_WIDE_INT) TREE_INT_CST_LOW (n->low);
5728 i <= (HOST_WIDE_INT) TREE_INT_CST_LOW (n->high); i++)
5729 if (COST_TABLE (i) < 0)
5730 return 0;
5733 /* All interesting values are within the range of interesting
5734 ASCII characters. */
5735 return 1;
5738 /* Scan an ordered list of case nodes
5739 combining those with consecutive values or ranges.
5741 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
5743 static void
5744 group_case_nodes (head)
5745 case_node_ptr head;
5747 case_node_ptr node = head;
5749 while (node)
5751 rtx lb = next_real_insn (label_rtx (node->code_label));
5752 rtx lb2;
5753 case_node_ptr np = node;
5755 /* Try to group the successors of NODE with NODE. */
5756 while (((np = np->right) != 0)
5757 /* Do they jump to the same place? */
5758 && ((lb2 = next_real_insn (label_rtx (np->code_label))) == lb
5759 || (lb != 0 && lb2 != 0
5760 && simplejump_p (lb)
5761 && simplejump_p (lb2)
5762 && rtx_equal_p (SET_SRC (PATTERN (lb)),
5763 SET_SRC (PATTERN (lb2)))))
5764 /* Are their ranges consecutive? */
5765 && tree_int_cst_equal (np->low,
5766 fold (build (PLUS_EXPR,
5767 TREE_TYPE (node->high),
5768 node->high,
5769 integer_one_node)))
5770 /* An overflow is not consecutive. */
5771 && tree_int_cst_lt (node->high,
5772 fold (build (PLUS_EXPR,
5773 TREE_TYPE (node->high),
5774 node->high,
5775 integer_one_node))))
5777 node->high = np->high;
5779 /* NP is the first node after NODE which can't be grouped with it.
5780 Delete the nodes in between, and move on to that node. */
5781 node->right = np;
5782 node = np;
5786 /* Take an ordered list of case nodes
5787 and transform them into a near optimal binary tree,
5788 on the assumption that any target code selection value is as
5789 likely as any other.
5791 The transformation is performed by splitting the ordered
5792 list into two equal sections plus a pivot. The parts are
5793 then attached to the pivot as left and right branches. Each
5794 branch is then transformed recursively. */
5796 static void
5797 balance_case_nodes (head, parent)
5798 case_node_ptr *head;
5799 case_node_ptr parent;
5801 case_node_ptr np;
5803 np = *head;
5804 if (np)
5806 int cost = 0;
5807 int i = 0;
5808 int ranges = 0;
5809 case_node_ptr *npp;
5810 case_node_ptr left;
5812 /* Count the number of entries on branch. Also count the ranges. */
5814 while (np)
5816 if (!tree_int_cst_equal (np->low, np->high))
5818 ranges++;
5819 if (use_cost_table)
5820 cost += COST_TABLE (TREE_INT_CST_LOW (np->high));
5823 if (use_cost_table)
5824 cost += COST_TABLE (TREE_INT_CST_LOW (np->low));
5826 i++;
5827 np = np->right;
5830 if (i > 2)
5832 /* Split this list if it is long enough for that to help. */
5833 npp = head;
5834 left = *npp;
5835 if (use_cost_table)
5837 /* Find the place in the list that bisects the list's total cost,
5838 Here I gets half the total cost. */
5839 int n_moved = 0;
5840 i = (cost + 1) / 2;
5841 while (1)
5843 /* Skip nodes while their cost does not reach that amount. */
5844 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5845 i -= COST_TABLE (TREE_INT_CST_LOW ((*npp)->high));
5846 i -= COST_TABLE (TREE_INT_CST_LOW ((*npp)->low));
5847 if (i <= 0)
5848 break;
5849 npp = &(*npp)->right;
5850 n_moved += 1;
5852 if (n_moved == 0)
5854 /* Leave this branch lopsided, but optimize left-hand
5855 side and fill in `parent' fields for right-hand side. */
5856 np = *head;
5857 np->parent = parent;
5858 balance_case_nodes (&np->left, np);
5859 for (; np->right; np = np->right)
5860 np->right->parent = np;
5861 return;
5864 /* If there are just three nodes, split at the middle one. */
5865 else if (i == 3)
5866 npp = &(*npp)->right;
5867 else
5869 /* Find the place in the list that bisects the list's total cost,
5870 where ranges count as 2.
5871 Here I gets half the total cost. */
5872 i = (i + ranges + 1) / 2;
5873 while (1)
5875 /* Skip nodes while their cost does not reach that amount. */
5876 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5877 i--;
5878 i--;
5879 if (i <= 0)
5880 break;
5881 npp = &(*npp)->right;
5884 *head = np = *npp;
5885 *npp = 0;
5886 np->parent = parent;
5887 np->left = left;
5889 /* Optimize each of the two split parts. */
5890 balance_case_nodes (&np->left, np);
5891 balance_case_nodes (&np->right, np);
5893 else
5895 /* Else leave this branch as one level,
5896 but fill in `parent' fields. */
5897 np = *head;
5898 np->parent = parent;
5899 for (; np->right; np = np->right)
5900 np->right->parent = np;
5905 /* Search the parent sections of the case node tree
5906 to see if a test for the lower bound of NODE would be redundant.
5907 INDEX_TYPE is the type of the index expression.
5909 The instructions to generate the case decision tree are
5910 output in the same order as nodes are processed so it is
5911 known that if a parent node checks the range of the current
5912 node minus one that the current node is bounded at its lower
5913 span. Thus the test would be redundant. */
5915 static int
5916 node_has_low_bound (node, index_type)
5917 case_node_ptr node;
5918 tree index_type;
5920 tree low_minus_one;
5921 case_node_ptr pnode;
5923 /* If the lower bound of this node is the lowest value in the index type,
5924 we need not test it. */
5926 if (tree_int_cst_equal (node->low, TYPE_MIN_VALUE (index_type)))
5927 return 1;
5929 /* If this node has a left branch, the value at the left must be less
5930 than that at this node, so it cannot be bounded at the bottom and
5931 we need not bother testing any further. */
5933 if (node->left)
5934 return 0;
5936 low_minus_one = fold (build (MINUS_EXPR, TREE_TYPE (node->low),
5937 node->low, integer_one_node));
5939 /* If the subtraction above overflowed, we can't verify anything.
5940 Otherwise, look for a parent that tests our value - 1. */
5942 if (! tree_int_cst_lt (low_minus_one, node->low))
5943 return 0;
5945 for (pnode = node->parent; pnode; pnode = pnode->parent)
5946 if (tree_int_cst_equal (low_minus_one, pnode->high))
5947 return 1;
5949 return 0;
5952 /* Search the parent sections of the case node tree
5953 to see if a test for the upper bound of NODE would be redundant.
5954 INDEX_TYPE is the type of the index expression.
5956 The instructions to generate the case decision tree are
5957 output in the same order as nodes are processed so it is
5958 known that if a parent node checks the range of the current
5959 node plus one that the current node is bounded at its upper
5960 span. Thus the test would be redundant. */
5962 static int
5963 node_has_high_bound (node, index_type)
5964 case_node_ptr node;
5965 tree index_type;
5967 tree high_plus_one;
5968 case_node_ptr pnode;
5970 /* If there is no upper bound, obviously no test is needed. */
5972 if (TYPE_MAX_VALUE (index_type) == NULL)
5973 return 1;
5975 /* If the upper bound of this node is the highest value in the type
5976 of the index expression, we need not test against it. */
5978 if (tree_int_cst_equal (node->high, TYPE_MAX_VALUE (index_type)))
5979 return 1;
5981 /* If this node has a right branch, the value at the right must be greater
5982 than that at this node, so it cannot be bounded at the top and
5983 we need not bother testing any further. */
5985 if (node->right)
5986 return 0;
5988 high_plus_one = fold (build (PLUS_EXPR, TREE_TYPE (node->high),
5989 node->high, integer_one_node));
5991 /* If the addition above overflowed, we can't verify anything.
5992 Otherwise, look for a parent that tests our value + 1. */
5994 if (! tree_int_cst_lt (node->high, high_plus_one))
5995 return 0;
5997 for (pnode = node->parent; pnode; pnode = pnode->parent)
5998 if (tree_int_cst_equal (high_plus_one, pnode->low))
5999 return 1;
6001 return 0;
6004 /* Search the parent sections of the
6005 case node tree to see if both tests for the upper and lower
6006 bounds of NODE would be redundant. */
6008 static int
6009 node_is_bounded (node, index_type)
6010 case_node_ptr node;
6011 tree index_type;
6013 return (node_has_low_bound (node, index_type)
6014 && node_has_high_bound (node, index_type));
6017 /* Emit an unconditional jump to LABEL unless it would be dead code. */
6019 static void
6020 emit_jump_if_reachable (label)
6021 rtx label;
6023 if (GET_CODE (get_last_insn ()) != BARRIER)
6024 emit_jump (label);
6027 /* Emit step-by-step code to select a case for the value of INDEX.
6028 The thus generated decision tree follows the form of the
6029 case-node binary tree NODE, whose nodes represent test conditions.
6030 INDEX_TYPE is the type of the index of the switch.
6032 Care is taken to prune redundant tests from the decision tree
6033 by detecting any boundary conditions already checked by
6034 emitted rtx. (See node_has_high_bound, node_has_low_bound
6035 and node_is_bounded, above.)
6037 Where the test conditions can be shown to be redundant we emit
6038 an unconditional jump to the target code. As a further
6039 optimization, the subordinates of a tree node are examined to
6040 check for bounded nodes. In this case conditional and/or
6041 unconditional jumps as a result of the boundary check for the
6042 current node are arranged to target the subordinates associated
6043 code for out of bound conditions on the current node.
6045 We can assume that when control reaches the code generated here,
6046 the index value has already been compared with the parents
6047 of this node, and determined to be on the same side of each parent
6048 as this node is. Thus, if this node tests for the value 51,
6049 and a parent tested for 52, we don't need to consider
6050 the possibility of a value greater than 51. If another parent
6051 tests for the value 50, then this node need not test anything. */
6053 static void
6054 emit_case_nodes (index, node, default_label, index_type)
6055 rtx index;
6056 case_node_ptr node;
6057 rtx default_label;
6058 tree index_type;
6060 /* If INDEX has an unsigned type, we must make unsigned branches. */
6061 int unsignedp = TREE_UNSIGNED (index_type);
6062 enum machine_mode mode = GET_MODE (index);
6063 enum machine_mode imode = TYPE_MODE (index_type);
6065 /* See if our parents have already tested everything for us.
6066 If they have, emit an unconditional jump for this node. */
6067 if (node_is_bounded (node, index_type))
6068 emit_jump (label_rtx (node->code_label));
6070 else if (tree_int_cst_equal (node->low, node->high))
6072 /* Node is single valued. First see if the index expression matches
6073 this node and then check our children, if any. */
6075 do_jump_if_equal (index,
6076 convert_modes (mode, imode,
6077 expand_expr (node->low, NULL_RTX,
6078 VOIDmode, 0),
6079 unsignedp),
6080 label_rtx (node->code_label), unsignedp);
6082 if (node->right != 0 && node->left != 0)
6084 /* This node has children on both sides.
6085 Dispatch to one side or the other
6086 by comparing the index value with this node's value.
6087 If one subtree is bounded, check that one first,
6088 so we can avoid real branches in the tree. */
6090 if (node_is_bounded (node->right, index_type))
6092 emit_cmp_and_jump_insns (index,
6093 convert_modes
6094 (mode, imode,
6095 expand_expr (node->high, NULL_RTX,
6096 VOIDmode, 0),
6097 unsignedp),
6098 GT, NULL_RTX, mode, unsignedp,
6099 label_rtx (node->right->code_label));
6100 emit_case_nodes (index, node->left, default_label, index_type);
6103 else if (node_is_bounded (node->left, index_type))
6105 emit_cmp_and_jump_insns (index,
6106 convert_modes
6107 (mode, imode,
6108 expand_expr (node->high, NULL_RTX,
6109 VOIDmode, 0),
6110 unsignedp),
6111 LT, NULL_RTX, mode, unsignedp,
6112 label_rtx (node->left->code_label));
6113 emit_case_nodes (index, node->right, default_label, index_type);
6116 else
6118 /* Neither node is bounded. First distinguish the two sides;
6119 then emit the code for one side at a time. */
6121 tree test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
6123 /* See if the value is on the right. */
6124 emit_cmp_and_jump_insns (index,
6125 convert_modes
6126 (mode, imode,
6127 expand_expr (node->high, NULL_RTX,
6128 VOIDmode, 0),
6129 unsignedp),
6130 GT, NULL_RTX, mode, unsignedp,
6131 label_rtx (test_label));
6133 /* Value must be on the left.
6134 Handle the left-hand subtree. */
6135 emit_case_nodes (index, node->left, default_label, index_type);
6136 /* If left-hand subtree does nothing,
6137 go to default. */
6138 emit_jump_if_reachable (default_label);
6140 /* Code branches here for the right-hand subtree. */
6141 expand_label (test_label);
6142 emit_case_nodes (index, node->right, default_label, index_type);
6146 else if (node->right != 0 && node->left == 0)
6148 /* Here we have a right child but no left so we issue conditional
6149 branch to default and process the right child.
6151 Omit the conditional branch to default if we it avoid only one
6152 right child; it costs too much space to save so little time. */
6154 if (node->right->right || node->right->left
6155 || !tree_int_cst_equal (node->right->low, node->right->high))
6157 if (!node_has_low_bound (node, index_type))
6159 emit_cmp_and_jump_insns (index,
6160 convert_modes
6161 (mode, imode,
6162 expand_expr (node->high, NULL_RTX,
6163 VOIDmode, 0),
6164 unsignedp),
6165 LT, NULL_RTX, mode, unsignedp,
6166 default_label);
6169 emit_case_nodes (index, node->right, default_label, index_type);
6171 else
6172 /* We cannot process node->right normally
6173 since we haven't ruled out the numbers less than
6174 this node's value. So handle node->right explicitly. */
6175 do_jump_if_equal (index,
6176 convert_modes
6177 (mode, imode,
6178 expand_expr (node->right->low, NULL_RTX,
6179 VOIDmode, 0),
6180 unsignedp),
6181 label_rtx (node->right->code_label), unsignedp);
6184 else if (node->right == 0 && node->left != 0)
6186 /* Just one subtree, on the left. */
6187 if (node->left->left || node->left->right
6188 || !tree_int_cst_equal (node->left->low, node->left->high))
6190 if (!node_has_high_bound (node, index_type))
6192 emit_cmp_and_jump_insns (index,
6193 convert_modes
6194 (mode, imode,
6195 expand_expr (node->high, NULL_RTX,
6196 VOIDmode, 0),
6197 unsignedp),
6198 GT, NULL_RTX, mode, unsignedp,
6199 default_label);
6202 emit_case_nodes (index, node->left, default_label, index_type);
6204 else
6205 /* We cannot process node->left normally
6206 since we haven't ruled out the numbers less than
6207 this node's value. So handle node->left explicitly. */
6208 do_jump_if_equal (index,
6209 convert_modes
6210 (mode, imode,
6211 expand_expr (node->left->low, NULL_RTX,
6212 VOIDmode, 0),
6213 unsignedp),
6214 label_rtx (node->left->code_label), unsignedp);
6217 else
6219 /* Node is a range. These cases are very similar to those for a single
6220 value, except that we do not start by testing whether this node
6221 is the one to branch to. */
6223 if (node->right != 0 && node->left != 0)
6225 /* Node has subtrees on both sides.
6226 If the right-hand subtree is bounded,
6227 test for it first, since we can go straight there.
6228 Otherwise, we need to make a branch in the control structure,
6229 then handle the two subtrees. */
6230 tree test_label = 0;
6232 if (node_is_bounded (node->right, index_type))
6233 /* Right hand node is fully bounded so we can eliminate any
6234 testing and branch directly to the target code. */
6235 emit_cmp_and_jump_insns (index,
6236 convert_modes
6237 (mode, imode,
6238 expand_expr (node->high, NULL_RTX,
6239 VOIDmode, 0),
6240 unsignedp),
6241 GT, NULL_RTX, mode, unsignedp,
6242 label_rtx (node->right->code_label));
6243 else
6245 /* Right hand node requires testing.
6246 Branch to a label where we will handle it later. */
6248 test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
6249 emit_cmp_and_jump_insns (index,
6250 convert_modes
6251 (mode, imode,
6252 expand_expr (node->high, NULL_RTX,
6253 VOIDmode, 0),
6254 unsignedp),
6255 GT, NULL_RTX, mode, unsignedp,
6256 label_rtx (test_label));
6259 /* Value belongs to this node or to the left-hand subtree. */
6261 emit_cmp_and_jump_insns (index,
6262 convert_modes
6263 (mode, imode,
6264 expand_expr (node->low, NULL_RTX,
6265 VOIDmode, 0),
6266 unsignedp),
6267 GE, NULL_RTX, mode, unsignedp,
6268 label_rtx (node->code_label));
6270 /* Handle the left-hand subtree. */
6271 emit_case_nodes (index, node->left, default_label, index_type);
6273 /* If right node had to be handled later, do that now. */
6275 if (test_label)
6277 /* If the left-hand subtree fell through,
6278 don't let it fall into the right-hand subtree. */
6279 emit_jump_if_reachable (default_label);
6281 expand_label (test_label);
6282 emit_case_nodes (index, node->right, default_label, index_type);
6286 else if (node->right != 0 && node->left == 0)
6288 /* Deal with values to the left of this node,
6289 if they are possible. */
6290 if (!node_has_low_bound (node, index_type))
6292 emit_cmp_and_jump_insns (index,
6293 convert_modes
6294 (mode, imode,
6295 expand_expr (node->low, NULL_RTX,
6296 VOIDmode, 0),
6297 unsignedp),
6298 LT, NULL_RTX, mode, unsignedp,
6299 default_label);
6302 /* Value belongs to this node or to the right-hand subtree. */
6304 emit_cmp_and_jump_insns (index,
6305 convert_modes
6306 (mode, imode,
6307 expand_expr (node->high, NULL_RTX,
6308 VOIDmode, 0),
6309 unsignedp),
6310 LE, NULL_RTX, mode, unsignedp,
6311 label_rtx (node->code_label));
6313 emit_case_nodes (index, node->right, default_label, index_type);
6316 else if (node->right == 0 && node->left != 0)
6318 /* Deal with values to the right of this node,
6319 if they are possible. */
6320 if (!node_has_high_bound (node, index_type))
6322 emit_cmp_and_jump_insns (index,
6323 convert_modes
6324 (mode, imode,
6325 expand_expr (node->high, NULL_RTX,
6326 VOIDmode, 0),
6327 unsignedp),
6328 GT, NULL_RTX, mode, unsignedp,
6329 default_label);
6332 /* Value belongs to this node or to the left-hand subtree. */
6334 emit_cmp_and_jump_insns (index,
6335 convert_modes
6336 (mode, imode,
6337 expand_expr (node->low, NULL_RTX,
6338 VOIDmode, 0),
6339 unsignedp),
6340 GE, NULL_RTX, mode, unsignedp,
6341 label_rtx (node->code_label));
6343 emit_case_nodes (index, node->left, default_label, index_type);
6346 else
6348 /* Node has no children so we check low and high bounds to remove
6349 redundant tests. Only one of the bounds can exist,
6350 since otherwise this node is bounded--a case tested already. */
6351 int high_bound = node_has_high_bound (node, index_type);
6352 int low_bound = node_has_low_bound (node, index_type);
6354 if (!high_bound && low_bound)
6356 emit_cmp_and_jump_insns (index,
6357 convert_modes
6358 (mode, imode,
6359 expand_expr (node->high, NULL_RTX,
6360 VOIDmode, 0),
6361 unsignedp),
6362 GT, NULL_RTX, mode, unsignedp,
6363 default_label);
6366 else if (!low_bound && high_bound)
6368 emit_cmp_and_jump_insns (index,
6369 convert_modes
6370 (mode, imode,
6371 expand_expr (node->low, NULL_RTX,
6372 VOIDmode, 0),
6373 unsignedp),
6374 LT, NULL_RTX, mode, unsignedp,
6375 default_label);
6377 else if (!low_bound && !high_bound)
6379 /* Widen LOW and HIGH to the same width as INDEX. */
6380 tree type = type_for_mode (mode, unsignedp);
6381 tree low = build1 (CONVERT_EXPR, type, node->low);
6382 tree high = build1 (CONVERT_EXPR, type, node->high);
6383 rtx low_rtx, new_index, new_bound;
6385 /* Instead of doing two branches, emit one unsigned branch for
6386 (index-low) > (high-low). */
6387 low_rtx = expand_expr (low, NULL_RTX, mode, 0);
6388 new_index = expand_simple_binop (mode, MINUS, index, low_rtx,
6389 NULL_RTX, unsignedp,
6390 OPTAB_WIDEN);
6391 new_bound = expand_expr (fold (build (MINUS_EXPR, type,
6392 high, low)),
6393 NULL_RTX, mode, 0);
6395 emit_cmp_and_jump_insns (new_index, new_bound, GT, NULL_RTX,
6396 mode, 1, default_label);
6399 emit_jump (label_rtx (node->code_label));