* Makefile.in (rtlanal.o): Depend on $(TM_P_H).
[official-gcc.git] / gcc / stmt.c
blob1d84eea4637c8d4d5138b95e65a6a422011ffc10
1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000, 2001 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 /* This file handles the generation of rtl code from tree structure
23 above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
24 It also creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
27 The functions whose names start with `expand_' are called by the
28 parser to generate RTL instructions for various kinds of constructs.
30 Some control and binding constructs require calling several such
31 functions at different times. For example, a simple if-then
32 is expanded by calling `expand_start_cond' (with the condition-expression
33 as argument) before parsing the then-clause and calling `expand_end_cond'
34 after parsing the then-clause. */
36 #include "config.h"
37 #include "system.h"
39 #include "rtl.h"
40 #include "tree.h"
41 #include "tm_p.h"
42 #include "flags.h"
43 #include "except.h"
44 #include "function.h"
45 #include "insn-config.h"
46 #include "expr.h"
47 #include "libfuncs.h"
48 #include "hard-reg-set.h"
49 #include "obstack.h"
50 #include "loop.h"
51 #include "recog.h"
52 #include "machmode.h"
53 #include "toplev.h"
54 #include "output.h"
55 #include "ggc.h"
57 #define obstack_chunk_alloc xmalloc
58 #define obstack_chunk_free free
59 struct obstack stmt_obstack;
61 /* Assume that case vectors are not pc-relative. */
62 #ifndef CASE_VECTOR_PC_RELATIVE
63 #define CASE_VECTOR_PC_RELATIVE 0
64 #endif
66 /* Functions and data structures for expanding case statements. */
68 /* Case label structure, used to hold info on labels within case
69 statements. We handle "range" labels; for a single-value label
70 as in C, the high and low limits are the same.
72 An AVL tree of case nodes is initially created, and later transformed
73 to a list linked via the RIGHT fields in the nodes. Nodes with
74 higher case values are later in the list.
76 Switch statements can be output in one of two forms. A branch table
77 is used if there are more than a few labels and the labels are dense
78 within the range between the smallest and largest case value. If a
79 branch table is used, no further manipulations are done with the case
80 node chain.
82 The alternative to the use of a branch table is to generate a series
83 of compare and jump insns. When that is done, we use the LEFT, RIGHT,
84 and PARENT fields to hold a binary tree. Initially the tree is
85 totally unbalanced, with everything on the right. We balance the tree
86 with nodes on the left having lower case values than the parent
87 and nodes on the right having higher values. We then output the tree
88 in order. */
90 struct case_node
92 struct case_node *left; /* Left son in binary tree */
93 struct case_node *right; /* Right son in binary tree; also node chain */
94 struct case_node *parent; /* Parent of node in binary tree */
95 tree low; /* Lowest index value for this label */
96 tree high; /* Highest index value for this label */
97 tree code_label; /* Label to jump to when node matches */
98 int balance;
101 typedef struct case_node case_node;
102 typedef struct case_node *case_node_ptr;
104 /* These are used by estimate_case_costs and balance_case_nodes. */
106 /* This must be a signed type, and non-ANSI compilers lack signed char. */
107 static short cost_table_[129];
108 static int use_cost_table;
109 static int cost_table_initialized;
111 /* Special care is needed because we allow -1, but TREE_INT_CST_LOW
112 is unsigned. */
113 #define COST_TABLE(I) cost_table_[(unsigned HOST_WIDE_INT)((I) + 1)]
115 /* Stack of control and binding constructs we are currently inside.
117 These constructs begin when you call `expand_start_WHATEVER'
118 and end when you call `expand_end_WHATEVER'. This stack records
119 info about how the construct began that tells the end-function
120 what to do. It also may provide information about the construct
121 to alter the behavior of other constructs within the body.
122 For example, they may affect the behavior of C `break' and `continue'.
124 Each construct gets one `struct nesting' object.
125 All of these objects are chained through the `all' field.
126 `nesting_stack' points to the first object (innermost construct).
127 The position of an entry on `nesting_stack' is in its `depth' field.
129 Each type of construct has its own individual stack.
130 For example, loops have `loop_stack'. Each object points to the
131 next object of the same type through the `next' field.
133 Some constructs are visible to `break' exit-statements and others
134 are not. Which constructs are visible depends on the language.
135 Therefore, the data structure allows each construct to be visible
136 or not, according to the args given when the construct is started.
137 The construct is visible if the `exit_label' field is non-null.
138 In that case, the value should be a CODE_LABEL rtx. */
140 struct nesting
142 struct nesting *all;
143 struct nesting *next;
144 int depth;
145 rtx exit_label;
146 union
148 /* For conds (if-then and if-then-else statements). */
149 struct
151 /* Label for the end of the if construct.
152 There is none if EXITFLAG was not set
153 and no `else' has been seen yet. */
154 rtx endif_label;
155 /* Label for the end of this alternative.
156 This may be the end of the if or the next else/elseif. */
157 rtx next_label;
158 } cond;
159 /* For loops. */
160 struct
162 /* Label at the top of the loop; place to loop back to. */
163 rtx start_label;
164 /* Label at the end of the whole construct. */
165 rtx end_label;
166 /* Label before a jump that branches to the end of the whole
167 construct. This is where destructors go if any. */
168 rtx alt_end_label;
169 /* Label for `continue' statement to jump to;
170 this is in front of the stepper of the loop. */
171 rtx continue_label;
172 } loop;
173 /* For variable binding contours. */
174 struct
176 /* Sequence number of this binding contour within the function,
177 in order of entry. */
178 int block_start_count;
179 /* Nonzero => value to restore stack to on exit. */
180 rtx stack_level;
181 /* The NOTE that starts this contour.
182 Used by expand_goto to check whether the destination
183 is within each contour or not. */
184 rtx first_insn;
185 /* Innermost containing binding contour that has a stack level. */
186 struct nesting *innermost_stack_block;
187 /* List of cleanups to be run on exit from this contour.
188 This is a list of expressions to be evaluated.
189 The TREE_PURPOSE of each link is the ..._DECL node
190 which the cleanup pertains to. */
191 tree cleanups;
192 /* List of cleanup-lists of blocks containing this block,
193 as they were at the locus where this block appears.
194 There is an element for each containing block,
195 ordered innermost containing block first.
196 The tail of this list can be 0,
197 if all remaining elements would be empty lists.
198 The element's TREE_VALUE is the cleanup-list of that block,
199 which may be null. */
200 tree outer_cleanups;
201 /* Chain of labels defined inside this binding contour.
202 For contours that have stack levels or cleanups. */
203 struct label_chain *label_chain;
204 /* Number of function calls seen, as of start of this block. */
205 int n_function_calls;
206 /* Nonzero if this is associated with a EH region. */
207 int exception_region;
208 /* The saved target_temp_slot_level from our outer block.
209 We may reset target_temp_slot_level to be the level of
210 this block, if that is done, target_temp_slot_level
211 reverts to the saved target_temp_slot_level at the very
212 end of the block. */
213 int block_target_temp_slot_level;
214 /* True if we are currently emitting insns in an area of
215 output code that is controlled by a conditional
216 expression. This is used by the cleanup handling code to
217 generate conditional cleanup actions. */
218 int conditional_code;
219 /* A place to move the start of the exception region for any
220 of the conditional cleanups, must be at the end or after
221 the start of the last unconditional cleanup, and before any
222 conditional branch points. */
223 rtx last_unconditional_cleanup;
224 /* When in a conditional context, this is the specific
225 cleanup list associated with last_unconditional_cleanup,
226 where we place the conditionalized cleanups. */
227 tree *cleanup_ptr;
228 } block;
229 /* For switch (C) or case (Pascal) statements,
230 and also for dummies (see `expand_start_case_dummy'). */
231 struct
233 /* The insn after which the case dispatch should finally
234 be emitted. Zero for a dummy. */
235 rtx start;
236 /* A list of case labels; it is first built as an AVL tree.
237 During expand_end_case, this is converted to a list, and may be
238 rearranged into a nearly balanced binary tree. */
239 struct case_node *case_list;
240 /* Label to jump to if no case matches. */
241 tree default_label;
242 /* The expression to be dispatched on. */
243 tree index_expr;
244 /* Type that INDEX_EXPR should be converted to. */
245 tree nominal_type;
246 /* Name of this kind of statement, for warnings. */
247 const char *printname;
248 /* Used to save no_line_numbers till we see the first case label.
249 We set this to -1 when we see the first case label in this
250 case statement. */
251 int line_number_status;
252 } case_stmt;
253 } data;
256 /* Allocate and return a new `struct nesting'. */
258 #define ALLOC_NESTING() \
259 (struct nesting *) obstack_alloc (&stmt_obstack, sizeof (struct nesting))
261 /* Pop the nesting stack element by element until we pop off
262 the element which is at the top of STACK.
263 Update all the other stacks, popping off elements from them
264 as we pop them from nesting_stack. */
266 #define POPSTACK(STACK) \
267 do { struct nesting *target = STACK; \
268 struct nesting *this; \
269 do { this = nesting_stack; \
270 if (loop_stack == this) \
271 loop_stack = loop_stack->next; \
272 if (cond_stack == this) \
273 cond_stack = cond_stack->next; \
274 if (block_stack == this) \
275 block_stack = block_stack->next; \
276 if (stack_block_stack == this) \
277 stack_block_stack = stack_block_stack->next; \
278 if (case_stack == this) \
279 case_stack = case_stack->next; \
280 nesting_depth = nesting_stack->depth - 1; \
281 nesting_stack = this->all; \
282 obstack_free (&stmt_obstack, this); } \
283 while (this != target); } while (0)
285 /* In some cases it is impossible to generate code for a forward goto
286 until the label definition is seen. This happens when it may be necessary
287 for the goto to reset the stack pointer: we don't yet know how to do that.
288 So expand_goto puts an entry on this fixup list.
289 Each time a binding contour that resets the stack is exited,
290 we check each fixup.
291 If the target label has now been defined, we can insert the proper code. */
293 struct goto_fixup
295 /* Points to following fixup. */
296 struct goto_fixup *next;
297 /* Points to the insn before the jump insn.
298 If more code must be inserted, it goes after this insn. */
299 rtx before_jump;
300 /* The LABEL_DECL that this jump is jumping to, or 0
301 for break, continue or return. */
302 tree target;
303 /* The BLOCK for the place where this goto was found. */
304 tree context;
305 /* The CODE_LABEL rtx that this is jumping to. */
306 rtx target_rtl;
307 /* Number of binding contours started in current function
308 before the label reference. */
309 int block_start_count;
310 /* The outermost stack level that should be restored for this jump.
311 Each time a binding contour that resets the stack is exited,
312 if the target label is *not* yet defined, this slot is updated. */
313 rtx stack_level;
314 /* List of lists of cleanup expressions to be run by this goto.
315 There is one element for each block that this goto is within.
316 The tail of this list can be 0,
317 if all remaining elements would be empty.
318 The TREE_VALUE contains the cleanup list of that block as of the
319 time this goto was seen.
320 The TREE_ADDRESSABLE flag is 1 for a block that has been exited. */
321 tree cleanup_list_list;
324 /* Within any binding contour that must restore a stack level,
325 all labels are recorded with a chain of these structures. */
327 struct label_chain
329 /* Points to following fixup. */
330 struct label_chain *next;
331 tree label;
334 struct stmt_status
336 /* Chain of all pending binding contours. */
337 struct nesting *x_block_stack;
339 /* If any new stacks are added here, add them to POPSTACKS too. */
341 /* Chain of all pending binding contours that restore stack levels
342 or have cleanups. */
343 struct nesting *x_stack_block_stack;
345 /* Chain of all pending conditional statements. */
346 struct nesting *x_cond_stack;
348 /* Chain of all pending loops. */
349 struct nesting *x_loop_stack;
351 /* Chain of all pending case or switch statements. */
352 struct nesting *x_case_stack;
354 /* Separate chain including all of the above,
355 chained through the `all' field. */
356 struct nesting *x_nesting_stack;
358 /* Number of entries on nesting_stack now. */
359 int x_nesting_depth;
361 /* Number of binding contours started so far in this function. */
362 int x_block_start_count;
364 /* Each time we expand an expression-statement,
365 record the expr's type and its RTL value here. */
366 tree x_last_expr_type;
367 rtx x_last_expr_value;
369 /* Nonzero if within a ({...}) grouping, in which case we must
370 always compute a value for each expr-stmt in case it is the last one. */
371 int x_expr_stmts_for_value;
373 /* Filename and line number of last line-number note,
374 whether we actually emitted it or not. */
375 const char *x_emit_filename;
376 int x_emit_lineno;
378 struct goto_fixup *x_goto_fixup_chain;
381 #define block_stack (cfun->stmt->x_block_stack)
382 #define stack_block_stack (cfun->stmt->x_stack_block_stack)
383 #define cond_stack (cfun->stmt->x_cond_stack)
384 #define loop_stack (cfun->stmt->x_loop_stack)
385 #define case_stack (cfun->stmt->x_case_stack)
386 #define nesting_stack (cfun->stmt->x_nesting_stack)
387 #define nesting_depth (cfun->stmt->x_nesting_depth)
388 #define current_block_start_count (cfun->stmt->x_block_start_count)
389 #define last_expr_type (cfun->stmt->x_last_expr_type)
390 #define last_expr_value (cfun->stmt->x_last_expr_value)
391 #define expr_stmts_for_value (cfun->stmt->x_expr_stmts_for_value)
392 #define emit_filename (cfun->stmt->x_emit_filename)
393 #define emit_lineno (cfun->stmt->x_emit_lineno)
394 #define goto_fixup_chain (cfun->stmt->x_goto_fixup_chain)
396 /* Non-zero if we are using EH to handle cleanus. */
397 static int using_eh_for_cleanups_p = 0;
399 static int n_occurrences PARAMS ((int, const char *));
400 static void expand_goto_internal PARAMS ((tree, rtx, rtx));
401 static int expand_fixup PARAMS ((tree, rtx, rtx));
402 static rtx expand_nl_handler_label PARAMS ((rtx, rtx));
403 static void expand_nl_goto_receiver PARAMS ((void));
404 static void expand_nl_goto_receivers PARAMS ((struct nesting *));
405 static void fixup_gotos PARAMS ((struct nesting *, rtx, tree,
406 rtx, int));
407 static bool check_operand_nalternatives PARAMS ((tree, tree));
408 static bool check_unique_operand_names PARAMS ((tree, tree));
409 static tree resolve_operand_names PARAMS ((tree, tree, tree,
410 const char **));
411 static char *resolve_operand_name_1 PARAMS ((char *, tree, tree));
412 static void expand_null_return_1 PARAMS ((rtx));
413 static void expand_value_return PARAMS ((rtx));
414 static int tail_recursion_args PARAMS ((tree, tree));
415 static void expand_cleanups PARAMS ((tree, tree, int, int));
416 static void check_seenlabel PARAMS ((void));
417 static void do_jump_if_equal PARAMS ((rtx, rtx, rtx, int));
418 static int estimate_case_costs PARAMS ((case_node_ptr));
419 static void group_case_nodes PARAMS ((case_node_ptr));
420 static void balance_case_nodes PARAMS ((case_node_ptr *,
421 case_node_ptr));
422 static int node_has_low_bound PARAMS ((case_node_ptr, tree));
423 static int node_has_high_bound PARAMS ((case_node_ptr, tree));
424 static int node_is_bounded PARAMS ((case_node_ptr, tree));
425 static void emit_jump_if_reachable PARAMS ((rtx));
426 static void emit_case_nodes PARAMS ((rtx, case_node_ptr, rtx, tree));
427 static struct case_node *case_tree2list PARAMS ((case_node *, case_node *));
428 static void mark_cond_nesting PARAMS ((struct nesting *));
429 static void mark_loop_nesting PARAMS ((struct nesting *));
430 static void mark_block_nesting PARAMS ((struct nesting *));
431 static void mark_case_nesting PARAMS ((struct nesting *));
432 static void mark_case_node PARAMS ((struct case_node *));
433 static void mark_goto_fixup PARAMS ((struct goto_fixup *));
434 static void free_case_nodes PARAMS ((case_node_ptr));
436 void
437 using_eh_for_cleanups ()
439 using_eh_for_cleanups_p = 1;
442 /* Mark N (known to be a cond-nesting) for GC. */
444 static void
445 mark_cond_nesting (n)
446 struct nesting *n;
448 while (n)
450 ggc_mark_rtx (n->exit_label);
451 ggc_mark_rtx (n->data.cond.endif_label);
452 ggc_mark_rtx (n->data.cond.next_label);
454 n = n->next;
458 /* Mark N (known to be a loop-nesting) for GC. */
460 static void
461 mark_loop_nesting (n)
462 struct nesting *n;
465 while (n)
467 ggc_mark_rtx (n->exit_label);
468 ggc_mark_rtx (n->data.loop.start_label);
469 ggc_mark_rtx (n->data.loop.end_label);
470 ggc_mark_rtx (n->data.loop.alt_end_label);
471 ggc_mark_rtx (n->data.loop.continue_label);
473 n = n->next;
477 /* Mark N (known to be a block-nesting) for GC. */
479 static void
480 mark_block_nesting (n)
481 struct nesting *n;
483 while (n)
485 struct label_chain *l;
487 ggc_mark_rtx (n->exit_label);
488 ggc_mark_rtx (n->data.block.stack_level);
489 ggc_mark_rtx (n->data.block.first_insn);
490 ggc_mark_tree (n->data.block.cleanups);
491 ggc_mark_tree (n->data.block.outer_cleanups);
493 for (l = n->data.block.label_chain; l != NULL; l = l->next)
495 ggc_mark (l);
496 ggc_mark_tree (l->label);
499 ggc_mark_rtx (n->data.block.last_unconditional_cleanup);
501 /* ??? cleanup_ptr never points outside the stack, does it? */
503 n = n->next;
507 /* Mark N (known to be a case-nesting) for GC. */
509 static void
510 mark_case_nesting (n)
511 struct nesting *n;
513 while (n)
515 ggc_mark_rtx (n->exit_label);
516 ggc_mark_rtx (n->data.case_stmt.start);
518 ggc_mark_tree (n->data.case_stmt.default_label);
519 ggc_mark_tree (n->data.case_stmt.index_expr);
520 ggc_mark_tree (n->data.case_stmt.nominal_type);
522 mark_case_node (n->data.case_stmt.case_list);
523 n = n->next;
527 /* Mark C for GC. */
529 static void
530 mark_case_node (c)
531 struct case_node *c;
533 if (c != 0)
535 ggc_mark_tree (c->low);
536 ggc_mark_tree (c->high);
537 ggc_mark_tree (c->code_label);
539 mark_case_node (c->right);
540 mark_case_node (c->left);
544 /* Mark G for GC. */
546 static void
547 mark_goto_fixup (g)
548 struct goto_fixup *g;
550 while (g)
552 ggc_mark (g);
553 ggc_mark_rtx (g->before_jump);
554 ggc_mark_tree (g->target);
555 ggc_mark_tree (g->context);
556 ggc_mark_rtx (g->target_rtl);
557 ggc_mark_rtx (g->stack_level);
558 ggc_mark_tree (g->cleanup_list_list);
560 g = g->next;
564 /* Clear out all parts of the state in F that can safely be discarded
565 after the function has been compiled, to let garbage collection
566 reclaim the memory. */
568 void
569 free_stmt_status (f)
570 struct function *f;
572 /* We're about to free the function obstack. If we hold pointers to
573 things allocated there, then we'll try to mark them when we do
574 GC. So, we clear them out here explicitly. */
575 if (f->stmt)
576 free (f->stmt);
577 f->stmt = NULL;
580 /* Mark P for GC. */
582 void
583 mark_stmt_status (p)
584 struct stmt_status *p;
586 if (p == 0)
587 return;
589 mark_block_nesting (p->x_block_stack);
590 mark_cond_nesting (p->x_cond_stack);
591 mark_loop_nesting (p->x_loop_stack);
592 mark_case_nesting (p->x_case_stack);
594 ggc_mark_tree (p->x_last_expr_type);
595 /* last_epxr_value is only valid if last_expr_type is nonzero. */
596 if (p->x_last_expr_type)
597 ggc_mark_rtx (p->x_last_expr_value);
599 mark_goto_fixup (p->x_goto_fixup_chain);
602 void
603 init_stmt ()
605 gcc_obstack_init (&stmt_obstack);
608 void
609 init_stmt_for_function ()
611 cfun->stmt = (struct stmt_status *) xmalloc (sizeof (struct stmt_status));
613 /* We are not currently within any block, conditional, loop or case. */
614 block_stack = 0;
615 stack_block_stack = 0;
616 loop_stack = 0;
617 case_stack = 0;
618 cond_stack = 0;
619 nesting_stack = 0;
620 nesting_depth = 0;
622 current_block_start_count = 0;
624 /* No gotos have been expanded yet. */
625 goto_fixup_chain = 0;
627 /* We are not processing a ({...}) grouping. */
628 expr_stmts_for_value = 0;
629 last_expr_type = 0;
630 last_expr_value = NULL_RTX;
633 /* Return nonzero if anything is pushed on the loop, condition, or case
634 stack. */
636 in_control_zone_p ()
638 return cond_stack || loop_stack || case_stack;
641 /* Record the current file and line. Called from emit_line_note. */
642 void
643 set_file_and_line_for_stmt (file, line)
644 const char *file;
645 int line;
647 /* If we're outputting an inline function, and we add a line note,
648 there may be no CFUN->STMT information. So, there's no need to
649 update it. */
650 if (cfun->stmt)
652 emit_filename = file;
653 emit_lineno = line;
657 /* Emit a no-op instruction. */
659 void
660 emit_nop ()
662 rtx last_insn;
664 last_insn = get_last_insn ();
665 if (!optimize
666 && (GET_CODE (last_insn) == CODE_LABEL
667 || (GET_CODE (last_insn) == NOTE
668 && prev_real_insn (last_insn) == 0)))
669 emit_insn (gen_nop ());
672 /* Return the rtx-label that corresponds to a LABEL_DECL,
673 creating it if necessary. */
676 label_rtx (label)
677 tree label;
679 if (TREE_CODE (label) != LABEL_DECL)
680 abort ();
682 if (!DECL_RTL_SET_P (label))
683 SET_DECL_RTL (label, gen_label_rtx ());
685 return DECL_RTL (label);
689 /* Add an unconditional jump to LABEL as the next sequential instruction. */
691 void
692 emit_jump (label)
693 rtx label;
695 do_pending_stack_adjust ();
696 emit_jump_insn (gen_jump (label));
697 emit_barrier ();
700 /* Emit code to jump to the address
701 specified by the pointer expression EXP. */
703 void
704 expand_computed_goto (exp)
705 tree exp;
707 rtx x = expand_expr (exp, NULL_RTX, VOIDmode, 0);
709 #ifdef POINTERS_EXTEND_UNSIGNED
710 x = convert_memory_address (Pmode, x);
711 #endif
713 emit_queue ();
714 /* Be sure the function is executable. */
715 if (current_function_check_memory_usage)
716 emit_library_call (chkr_check_exec_libfunc, LCT_CONST_MAKE_BLOCK,
717 VOIDmode, 1, x, ptr_mode);
719 do_pending_stack_adjust ();
720 emit_indirect_jump (x);
722 current_function_has_computed_jump = 1;
725 /* Handle goto statements and the labels that they can go to. */
727 /* Specify the location in the RTL code of a label LABEL,
728 which is a LABEL_DECL tree node.
730 This is used for the kind of label that the user can jump to with a
731 goto statement, and for alternatives of a switch or case statement.
732 RTL labels generated for loops and conditionals don't go through here;
733 they are generated directly at the RTL level, by other functions below.
735 Note that this has nothing to do with defining label *names*.
736 Languages vary in how they do that and what that even means. */
738 void
739 expand_label (label)
740 tree label;
742 struct label_chain *p;
744 do_pending_stack_adjust ();
745 emit_label (label_rtx (label));
746 if (DECL_NAME (label))
747 LABEL_NAME (DECL_RTL (label)) = IDENTIFIER_POINTER (DECL_NAME (label));
749 if (stack_block_stack != 0)
751 p = (struct label_chain *) ggc_alloc (sizeof (struct label_chain));
752 p->next = stack_block_stack->data.block.label_chain;
753 stack_block_stack->data.block.label_chain = p;
754 p->label = label;
758 /* Declare that LABEL (a LABEL_DECL) may be used for nonlocal gotos
759 from nested functions. */
761 void
762 declare_nonlocal_label (label)
763 tree label;
765 rtx slot = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
767 nonlocal_labels = tree_cons (NULL_TREE, label, nonlocal_labels);
768 LABEL_PRESERVE_P (label_rtx (label)) = 1;
769 if (nonlocal_goto_handler_slots == 0)
771 emit_stack_save (SAVE_NONLOCAL,
772 &nonlocal_goto_stack_level,
773 PREV_INSN (tail_recursion_reentry));
775 nonlocal_goto_handler_slots
776 = gen_rtx_EXPR_LIST (VOIDmode, slot, nonlocal_goto_handler_slots);
779 /* Generate RTL code for a `goto' statement with target label LABEL.
780 LABEL should be a LABEL_DECL tree node that was or will later be
781 defined with `expand_label'. */
783 void
784 expand_goto (label)
785 tree label;
787 tree context;
789 /* Check for a nonlocal goto to a containing function. */
790 context = decl_function_context (label);
791 if (context != 0 && context != current_function_decl)
793 struct function *p = find_function_data (context);
794 rtx label_ref = gen_rtx_LABEL_REF (Pmode, label_rtx (label));
795 rtx handler_slot, static_chain, save_area, insn;
796 tree link;
798 /* Find the corresponding handler slot for this label. */
799 handler_slot = p->x_nonlocal_goto_handler_slots;
800 for (link = p->x_nonlocal_labels; TREE_VALUE (link) != label;
801 link = TREE_CHAIN (link))
802 handler_slot = XEXP (handler_slot, 1);
803 handler_slot = XEXP (handler_slot, 0);
805 p->has_nonlocal_label = 1;
806 current_function_has_nonlocal_goto = 1;
807 LABEL_REF_NONLOCAL_P (label_ref) = 1;
809 /* Copy the rtl for the slots so that they won't be shared in
810 case the virtual stack vars register gets instantiated differently
811 in the parent than in the child. */
813 static_chain = copy_to_reg (lookup_static_chain (label));
815 /* Get addr of containing function's current nonlocal goto handler,
816 which will do any cleanups and then jump to the label. */
817 handler_slot = copy_to_reg (replace_rtx (copy_rtx (handler_slot),
818 virtual_stack_vars_rtx,
819 static_chain));
821 /* Get addr of containing function's nonlocal save area. */
822 save_area = p->x_nonlocal_goto_stack_level;
823 if (save_area)
824 save_area = replace_rtx (copy_rtx (save_area),
825 virtual_stack_vars_rtx, static_chain);
827 #if HAVE_nonlocal_goto
828 if (HAVE_nonlocal_goto)
829 emit_insn (gen_nonlocal_goto (static_chain, handler_slot,
830 save_area, label_ref));
831 else
832 #endif
834 /* Restore frame pointer for containing function.
835 This sets the actual hard register used for the frame pointer
836 to the location of the function's incoming static chain info.
837 The non-local goto handler will then adjust it to contain the
838 proper value and reload the argument pointer, if needed. */
839 emit_move_insn (hard_frame_pointer_rtx, static_chain);
840 emit_stack_restore (SAVE_NONLOCAL, save_area, NULL_RTX);
842 /* USE of hard_frame_pointer_rtx added for consistency;
843 not clear if really needed. */
844 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
845 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
846 emit_indirect_jump (handler_slot);
849 /* Search backwards to the jump insn and mark it as a
850 non-local goto. */
851 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
853 if (GET_CODE (insn) == JUMP_INSN)
855 REG_NOTES (insn) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO,
856 const0_rtx, REG_NOTES (insn));
857 break;
859 else if (GET_CODE (insn) == CALL_INSN)
860 break;
863 else
864 expand_goto_internal (label, label_rtx (label), NULL_RTX);
867 /* Generate RTL code for a `goto' statement with target label BODY.
868 LABEL should be a LABEL_REF.
869 LAST_INSN, if non-0, is the rtx we should consider as the last
870 insn emitted (for the purposes of cleaning up a return). */
872 static void
873 expand_goto_internal (body, label, last_insn)
874 tree body;
875 rtx label;
876 rtx last_insn;
878 struct nesting *block;
879 rtx stack_level = 0;
881 if (GET_CODE (label) != CODE_LABEL)
882 abort ();
884 /* If label has already been defined, we can tell now
885 whether and how we must alter the stack level. */
887 if (PREV_INSN (label) != 0)
889 /* Find the innermost pending block that contains the label.
890 (Check containment by comparing insn-uids.)
891 Then restore the outermost stack level within that block,
892 and do cleanups of all blocks contained in it. */
893 for (block = block_stack; block; block = block->next)
895 if (INSN_UID (block->data.block.first_insn) < INSN_UID (label))
896 break;
897 if (block->data.block.stack_level != 0)
898 stack_level = block->data.block.stack_level;
899 /* Execute the cleanups for blocks we are exiting. */
900 if (block->data.block.cleanups != 0)
902 expand_cleanups (block->data.block.cleanups, NULL_TREE, 1, 1);
903 do_pending_stack_adjust ();
907 if (stack_level)
909 /* Ensure stack adjust isn't done by emit_jump, as this
910 would clobber the stack pointer. This one should be
911 deleted as dead by flow. */
912 clear_pending_stack_adjust ();
913 do_pending_stack_adjust ();
915 /* Don't do this adjust if it's to the end label and this function
916 is to return with a depressed stack pointer. */
917 if (label == return_label
918 && (((TREE_CODE (TREE_TYPE (current_function_decl))
919 == FUNCTION_TYPE)
920 && (TYPE_RETURNS_STACK_DEPRESSED
921 (TREE_TYPE (current_function_decl))))))
923 else
924 emit_stack_restore (SAVE_BLOCK, stack_level, NULL_RTX);
927 if (body != 0 && DECL_TOO_LATE (body))
928 error ("jump to `%s' invalidly jumps into binding contour",
929 IDENTIFIER_POINTER (DECL_NAME (body)));
931 /* Label not yet defined: may need to put this goto
932 on the fixup list. */
933 else if (! expand_fixup (body, label, last_insn))
935 /* No fixup needed. Record that the label is the target
936 of at least one goto that has no fixup. */
937 if (body != 0)
938 TREE_ADDRESSABLE (body) = 1;
941 emit_jump (label);
944 /* Generate if necessary a fixup for a goto
945 whose target label in tree structure (if any) is TREE_LABEL
946 and whose target in rtl is RTL_LABEL.
948 If LAST_INSN is nonzero, we pretend that the jump appears
949 after insn LAST_INSN instead of at the current point in the insn stream.
951 The fixup will be used later to insert insns just before the goto.
952 Those insns will restore the stack level as appropriate for the
953 target label, and will (in the case of C++) also invoke any object
954 destructors which have to be invoked when we exit the scopes which
955 are exited by the goto.
957 Value is nonzero if a fixup is made. */
959 static int
960 expand_fixup (tree_label, rtl_label, last_insn)
961 tree tree_label;
962 rtx rtl_label;
963 rtx last_insn;
965 struct nesting *block, *end_block;
967 /* See if we can recognize which block the label will be output in.
968 This is possible in some very common cases.
969 If we succeed, set END_BLOCK to that block.
970 Otherwise, set it to 0. */
972 if (cond_stack
973 && (rtl_label == cond_stack->data.cond.endif_label
974 || rtl_label == cond_stack->data.cond.next_label))
975 end_block = cond_stack;
976 /* If we are in a loop, recognize certain labels which
977 are likely targets. This reduces the number of fixups
978 we need to create. */
979 else if (loop_stack
980 && (rtl_label == loop_stack->data.loop.start_label
981 || rtl_label == loop_stack->data.loop.end_label
982 || rtl_label == loop_stack->data.loop.continue_label))
983 end_block = loop_stack;
984 else
985 end_block = 0;
987 /* Now set END_BLOCK to the binding level to which we will return. */
989 if (end_block)
991 struct nesting *next_block = end_block->all;
992 block = block_stack;
994 /* First see if the END_BLOCK is inside the innermost binding level.
995 If so, then no cleanups or stack levels are relevant. */
996 while (next_block && next_block != block)
997 next_block = next_block->all;
999 if (next_block)
1000 return 0;
1002 /* Otherwise, set END_BLOCK to the innermost binding level
1003 which is outside the relevant control-structure nesting. */
1004 next_block = block_stack->next;
1005 for (block = block_stack; block != end_block; block = block->all)
1006 if (block == next_block)
1007 next_block = next_block->next;
1008 end_block = next_block;
1011 /* Does any containing block have a stack level or cleanups?
1012 If not, no fixup is needed, and that is the normal case
1013 (the only case, for standard C). */
1014 for (block = block_stack; block != end_block; block = block->next)
1015 if (block->data.block.stack_level != 0
1016 || block->data.block.cleanups != 0)
1017 break;
1019 if (block != end_block)
1021 /* Ok, a fixup is needed. Add a fixup to the list of such. */
1022 struct goto_fixup *fixup
1023 = (struct goto_fixup *) ggc_alloc (sizeof (struct goto_fixup));
1024 /* In case an old stack level is restored, make sure that comes
1025 after any pending stack adjust. */
1026 /* ?? If the fixup isn't to come at the present position,
1027 doing the stack adjust here isn't useful. Doing it with our
1028 settings at that location isn't useful either. Let's hope
1029 someone does it! */
1030 if (last_insn == 0)
1031 do_pending_stack_adjust ();
1032 fixup->target = tree_label;
1033 fixup->target_rtl = rtl_label;
1035 /* Create a BLOCK node and a corresponding matched set of
1036 NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes at
1037 this point. The notes will encapsulate any and all fixup
1038 code which we might later insert at this point in the insn
1039 stream. Also, the BLOCK node will be the parent (i.e. the
1040 `SUPERBLOCK') of any other BLOCK nodes which we might create
1041 later on when we are expanding the fixup code.
1043 Note that optimization passes (including expand_end_loop)
1044 might move the *_BLOCK notes away, so we use a NOTE_INSN_DELETED
1045 as a placeholder. */
1048 rtx original_before_jump
1049 = last_insn ? last_insn : get_last_insn ();
1050 rtx start;
1051 rtx end;
1052 tree block;
1054 block = make_node (BLOCK);
1055 TREE_USED (block) = 1;
1057 if (!cfun->x_whole_function_mode_p)
1058 insert_block (block);
1059 else
1061 BLOCK_CHAIN (block)
1062 = BLOCK_CHAIN (DECL_INITIAL (current_function_decl));
1063 BLOCK_CHAIN (DECL_INITIAL (current_function_decl))
1064 = block;
1067 start_sequence ();
1068 start = emit_note (NULL, NOTE_INSN_BLOCK_BEG);
1069 if (cfun->x_whole_function_mode_p)
1070 NOTE_BLOCK (start) = block;
1071 fixup->before_jump = emit_note (NULL, NOTE_INSN_DELETED);
1072 end = emit_note (NULL, NOTE_INSN_BLOCK_END);
1073 if (cfun->x_whole_function_mode_p)
1074 NOTE_BLOCK (end) = block;
1075 fixup->context = block;
1076 end_sequence ();
1077 emit_insns_after (start, original_before_jump);
1080 fixup->block_start_count = current_block_start_count;
1081 fixup->stack_level = 0;
1082 fixup->cleanup_list_list
1083 = ((block->data.block.outer_cleanups
1084 || block->data.block.cleanups)
1085 ? tree_cons (NULL_TREE, block->data.block.cleanups,
1086 block->data.block.outer_cleanups)
1087 : 0);
1088 fixup->next = goto_fixup_chain;
1089 goto_fixup_chain = fixup;
1092 return block != 0;
1095 /* Expand any needed fixups in the outputmost binding level of the
1096 function. FIRST_INSN is the first insn in the function. */
1098 void
1099 expand_fixups (first_insn)
1100 rtx first_insn;
1102 fixup_gotos (NULL, NULL_RTX, NULL_TREE, first_insn, 0);
1105 /* When exiting a binding contour, process all pending gotos requiring fixups.
1106 THISBLOCK is the structure that describes the block being exited.
1107 STACK_LEVEL is the rtx for the stack level to restore exiting this contour.
1108 CLEANUP_LIST is a list of expressions to evaluate on exiting this contour.
1109 FIRST_INSN is the insn that began this contour.
1111 Gotos that jump out of this contour must restore the
1112 stack level and do the cleanups before actually jumping.
1114 DONT_JUMP_IN nonzero means report error there is a jump into this
1115 contour from before the beginning of the contour.
1116 This is also done if STACK_LEVEL is nonzero. */
1118 static void
1119 fixup_gotos (thisblock, stack_level, cleanup_list, first_insn, dont_jump_in)
1120 struct nesting *thisblock;
1121 rtx stack_level;
1122 tree cleanup_list;
1123 rtx first_insn;
1124 int dont_jump_in;
1126 struct goto_fixup *f, *prev;
1128 /* F is the fixup we are considering; PREV is the previous one. */
1129 /* We run this loop in two passes so that cleanups of exited blocks
1130 are run first, and blocks that are exited are marked so
1131 afterwards. */
1133 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1135 /* Test for a fixup that is inactive because it is already handled. */
1136 if (f->before_jump == 0)
1138 /* Delete inactive fixup from the chain, if that is easy to do. */
1139 if (prev != 0)
1140 prev->next = f->next;
1142 /* Has this fixup's target label been defined?
1143 If so, we can finalize it. */
1144 else if (PREV_INSN (f->target_rtl) != 0)
1146 rtx cleanup_insns;
1148 /* If this fixup jumped into this contour from before the beginning
1149 of this contour, report an error. This code used to use
1150 the first non-label insn after f->target_rtl, but that's
1151 wrong since such can be added, by things like put_var_into_stack
1152 and have INSN_UIDs that are out of the range of the block. */
1153 /* ??? Bug: this does not detect jumping in through intermediate
1154 blocks that have stack levels or cleanups.
1155 It detects only a problem with the innermost block
1156 around the label. */
1157 if (f->target != 0
1158 && (dont_jump_in || stack_level || cleanup_list)
1159 && INSN_UID (first_insn) < INSN_UID (f->target_rtl)
1160 && INSN_UID (first_insn) > INSN_UID (f->before_jump)
1161 && ! DECL_ERROR_ISSUED (f->target))
1163 error_with_decl (f->target,
1164 "label `%s' used before containing binding contour");
1165 /* Prevent multiple errors for one label. */
1166 DECL_ERROR_ISSUED (f->target) = 1;
1169 /* We will expand the cleanups into a sequence of their own and
1170 then later on we will attach this new sequence to the insn
1171 stream just ahead of the actual jump insn. */
1173 start_sequence ();
1175 /* Temporarily restore the lexical context where we will
1176 logically be inserting the fixup code. We do this for the
1177 sake of getting the debugging information right. */
1179 pushlevel (0);
1180 set_block (f->context);
1182 /* Expand the cleanups for blocks this jump exits. */
1183 if (f->cleanup_list_list)
1185 tree lists;
1186 for (lists = f->cleanup_list_list; lists; lists = TREE_CHAIN (lists))
1187 /* Marked elements correspond to blocks that have been closed.
1188 Do their cleanups. */
1189 if (TREE_ADDRESSABLE (lists)
1190 && TREE_VALUE (lists) != 0)
1192 expand_cleanups (TREE_VALUE (lists), NULL_TREE, 1, 1);
1193 /* Pop any pushes done in the cleanups,
1194 in case function is about to return. */
1195 do_pending_stack_adjust ();
1199 /* Restore stack level for the biggest contour that this
1200 jump jumps out of. */
1201 if (f->stack_level
1202 && ! (f->target_rtl == return_label
1203 && ((TREE_CODE (TREE_TYPE (current_function_decl))
1204 == FUNCTION_TYPE)
1205 && (TYPE_RETURNS_STACK_DEPRESSED
1206 (TREE_TYPE (current_function_decl))))))
1207 emit_stack_restore (SAVE_BLOCK, f->stack_level, f->before_jump);
1209 /* Finish up the sequence containing the insns which implement the
1210 necessary cleanups, and then attach that whole sequence to the
1211 insn stream just ahead of the actual jump insn. Attaching it
1212 at that point insures that any cleanups which are in fact
1213 implicit C++ object destructions (which must be executed upon
1214 leaving the block) appear (to the debugger) to be taking place
1215 in an area of the generated code where the object(s) being
1216 destructed are still "in scope". */
1218 cleanup_insns = get_insns ();
1219 poplevel (1, 0, 0);
1221 end_sequence ();
1222 emit_insns_after (cleanup_insns, f->before_jump);
1224 f->before_jump = 0;
1228 /* For any still-undefined labels, do the cleanups for this block now.
1229 We must do this now since items in the cleanup list may go out
1230 of scope when the block ends. */
1231 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1232 if (f->before_jump != 0
1233 && PREV_INSN (f->target_rtl) == 0
1234 /* Label has still not appeared. If we are exiting a block with
1235 a stack level to restore, that started before the fixup,
1236 mark this stack level as needing restoration
1237 when the fixup is later finalized. */
1238 && thisblock != 0
1239 /* Note: if THISBLOCK == 0 and we have a label that hasn't appeared, it
1240 means the label is undefined. That's erroneous, but possible. */
1241 && (thisblock->data.block.block_start_count
1242 <= f->block_start_count))
1244 tree lists = f->cleanup_list_list;
1245 rtx cleanup_insns;
1247 for (; lists; lists = TREE_CHAIN (lists))
1248 /* If the following elt. corresponds to our containing block
1249 then the elt. must be for this block. */
1250 if (TREE_CHAIN (lists) == thisblock->data.block.outer_cleanups)
1252 start_sequence ();
1253 pushlevel (0);
1254 set_block (f->context);
1255 expand_cleanups (TREE_VALUE (lists), NULL_TREE, 1, 1);
1256 do_pending_stack_adjust ();
1257 cleanup_insns = get_insns ();
1258 poplevel (1, 0, 0);
1259 end_sequence ();
1260 if (cleanup_insns != 0)
1261 f->before_jump
1262 = emit_insns_after (cleanup_insns, f->before_jump);
1264 f->cleanup_list_list = TREE_CHAIN (lists);
1267 if (stack_level)
1268 f->stack_level = stack_level;
1272 /* Return the number of times character C occurs in string S. */
1273 static int
1274 n_occurrences (c, s)
1275 int c;
1276 const char *s;
1278 int n = 0;
1279 while (*s)
1280 n += (*s++ == c);
1281 return n;
1284 /* Generate RTL for an asm statement (explicit assembler code).
1285 BODY is a STRING_CST node containing the assembler code text,
1286 or an ADDR_EXPR containing a STRING_CST. */
1288 void
1289 expand_asm (body)
1290 tree body;
1292 if (current_function_check_memory_usage)
1294 error ("`asm' cannot be used in function where memory usage is checked");
1295 return;
1298 if (TREE_CODE (body) == ADDR_EXPR)
1299 body = TREE_OPERAND (body, 0);
1301 emit_insn (gen_rtx_ASM_INPUT (VOIDmode,
1302 TREE_STRING_POINTER (body)));
1303 last_expr_type = 0;
1306 /* Parse the output constraint pointed to by *CONSTRAINT_P. It is the
1307 OPERAND_NUMth output operand, indexed from zero. There are NINPUTS
1308 inputs and NOUTPUTS outputs to this extended-asm. Upon return,
1309 *ALLOWS_MEM will be TRUE iff the constraint allows the use of a
1310 memory operand. Similarly, *ALLOWS_REG will be TRUE iff the
1311 constraint allows the use of a register operand. And, *IS_INOUT
1312 will be true if the operand is read-write, i.e., if it is used as
1313 an input as well as an output. If *CONSTRAINT_P is not in
1314 canonical form, it will be made canonical. (Note that `+' will be
1315 rpelaced with `=' as part of this process.)
1317 Returns TRUE if all went well; FALSE if an error occurred. */
1319 bool
1320 parse_output_constraint (constraint_p,
1321 operand_num,
1322 ninputs,
1323 noutputs,
1324 allows_mem,
1325 allows_reg,
1326 is_inout)
1327 const char **constraint_p;
1328 int operand_num;
1329 int ninputs;
1330 int noutputs;
1331 bool *allows_mem;
1332 bool *allows_reg;
1333 bool *is_inout;
1335 const char *constraint = *constraint_p;
1336 const char *p;
1338 /* Assume the constraint doesn't allow the use of either a register
1339 or memory. */
1340 *allows_mem = false;
1341 *allows_reg = false;
1343 /* Allow the `=' or `+' to not be at the beginning of the string,
1344 since it wasn't explicitly documented that way, and there is a
1345 large body of code that puts it last. Swap the character to
1346 the front, so as not to uglify any place else. */
1347 p = strchr (constraint, '=');
1348 if (!p)
1349 p = strchr (constraint, '+');
1351 /* If the string doesn't contain an `=', issue an error
1352 message. */
1353 if (!p)
1355 error ("output operand constraint lacks `='");
1356 return false;
1359 /* If the constraint begins with `+', then the operand is both read
1360 from and written to. */
1361 *is_inout = (*p == '+');
1363 /* Canonicalize the output constraint so that it begins with `='. */
1364 if (p != constraint || is_inout)
1366 char *buf;
1367 size_t c_len = strlen (constraint);
1369 if (p != constraint)
1370 warning ("output constraint `%c' for operand %d is not at the beginning",
1371 *p, operand_num);
1373 /* Make a copy of the constraint. */
1374 buf = alloca (c_len + 1);
1375 strcpy (buf, constraint);
1376 /* Swap the first character and the `=' or `+'. */
1377 buf[p - constraint] = buf[0];
1378 /* Make sure the first character is an `='. (Until we do this,
1379 it might be a `+'.) */
1380 buf[0] = '=';
1381 /* Replace the constraint with the canonicalized string. */
1382 *constraint_p = ggc_alloc_string (buf, c_len);
1383 constraint = *constraint_p;
1386 /* Loop through the constraint string. */
1387 for (p = constraint + 1; *p; ++p)
1388 switch (*p)
1390 case '+':
1391 case '=':
1392 error ("operand constraint contains '+' or '=' at illegal position.");
1393 return false;
1395 case '%':
1396 if (operand_num + 1 == ninputs + noutputs)
1398 error ("`%%' constraint used with last operand");
1399 return false;
1401 break;
1403 case 'V': case 'm': case 'o':
1404 *allows_mem = true;
1405 break;
1407 case '?': case '!': case '*': case '&': case '#':
1408 case 'E': case 'F': case 'G': case 'H':
1409 case 's': case 'i': case 'n':
1410 case 'I': case 'J': case 'K': case 'L': case 'M':
1411 case 'N': case 'O': case 'P': case ',':
1412 break;
1414 case '0': case '1': case '2': case '3': case '4':
1415 case '5': case '6': case '7': case '8': case '9':
1416 case '[':
1417 error ("matching constraint not valid in output operand");
1418 return false;
1420 case '<': case '>':
1421 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
1422 excepting those that expand_call created. So match memory
1423 and hope. */
1424 *allows_mem = true;
1425 break;
1427 case 'g': case 'X':
1428 *allows_reg = true;
1429 *allows_mem = true;
1430 break;
1432 case 'p': case 'r':
1433 *allows_reg = true;
1434 break;
1436 default:
1437 if (!ISALPHA (*p))
1438 break;
1439 if (REG_CLASS_FROM_LETTER (*p) != NO_REGS)
1440 *allows_reg = true;
1441 #ifdef EXTRA_CONSTRAINT
1442 else
1444 /* Otherwise we can't assume anything about the nature of
1445 the constraint except that it isn't purely registers.
1446 Treat it like "g" and hope for the best. */
1447 *allows_reg = true;
1448 *allows_mem = true;
1450 #endif
1451 break;
1454 return true;
1457 /* Generate RTL for an asm statement with arguments.
1458 STRING is the instruction template.
1459 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
1460 Each output or input has an expression in the TREE_VALUE and
1461 and a tree list in TREE_PURPOSE which in turn contains a constraint
1462 name in TREE_VALUE (or NULL_TREE) and a constraint string
1463 in TREE_PURPOSE.
1464 CLOBBERS is a list of STRING_CST nodes each naming a hard register
1465 that is clobbered by this insn.
1467 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
1468 Some elements of OUTPUTS may be replaced with trees representing temporary
1469 values. The caller should copy those temporary values to the originally
1470 specified lvalues.
1472 VOL nonzero means the insn is volatile; don't optimize it. */
1474 void
1475 expand_asm_operands (string, outputs, inputs, clobbers, vol, filename, line)
1476 tree string, outputs, inputs, clobbers;
1477 int vol;
1478 const char *filename;
1479 int line;
1481 rtvec argvec, constraintvec;
1482 rtx body;
1483 int ninputs = list_length (inputs);
1484 int noutputs = list_length (outputs);
1485 int ninout = 0;
1486 int nclobbers;
1487 tree tail;
1488 int i;
1489 /* Vector of RTX's of evaluated output operands. */
1490 rtx *output_rtx = (rtx *) alloca (noutputs * sizeof (rtx));
1491 int *inout_opnum = (int *) alloca (noutputs * sizeof (int));
1492 rtx *real_output_rtx = (rtx *) alloca (noutputs * sizeof (rtx));
1493 enum machine_mode *inout_mode
1494 = (enum machine_mode *) alloca (noutputs * sizeof (enum machine_mode));
1495 const char **constraints
1496 = (const char **) alloca ((noutputs + ninputs) * sizeof (const char *));
1497 /* The insn we have emitted. */
1498 rtx insn;
1499 int old_generating_concat_p = generating_concat_p;
1501 /* An ASM with no outputs needs to be treated as volatile, for now. */
1502 if (noutputs == 0)
1503 vol = 1;
1505 if (current_function_check_memory_usage)
1507 error ("`asm' cannot be used in function where memory usage is checked");
1508 return;
1511 if (! check_operand_nalternatives (outputs, inputs))
1512 return;
1514 if (! check_unique_operand_names (outputs, inputs))
1515 return;
1517 string = resolve_operand_names (string, outputs, inputs, constraints);
1519 #ifdef MD_ASM_CLOBBERS
1520 /* Sometimes we wish to automatically clobber registers across an asm.
1521 Case in point is when the i386 backend moved from cc0 to a hard reg --
1522 maintaining source-level compatability means automatically clobbering
1523 the flags register. */
1524 MD_ASM_CLOBBERS (clobbers);
1525 #endif
1527 /* Count the number of meaningful clobbered registers, ignoring what
1528 we would ignore later. */
1529 nclobbers = 0;
1530 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1532 const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1534 i = decode_reg_name (regname);
1535 if (i >= 0 || i == -4)
1536 ++nclobbers;
1537 else if (i == -2)
1538 error ("unknown register name `%s' in `asm'", regname);
1541 last_expr_type = 0;
1543 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1545 tree val = TREE_VALUE (tail);
1546 tree type = TREE_TYPE (val);
1547 bool is_inout;
1548 bool allows_reg;
1549 bool allows_mem;
1551 /* If there's an erroneous arg, emit no insn. */
1552 if (type == error_mark_node)
1553 return;
1555 /* Make sure constraint has `=' and does not have `+'. Also, see
1556 if it allows any register. Be liberal on the latter test, since
1557 the worst that happens if we get it wrong is we issue an error
1558 message. */
1560 /* Try to parse the output constraint. If that fails, there's
1561 no point in going further. */
1562 if (!parse_output_constraint (&constraints[i],
1564 ninputs,
1565 noutputs,
1566 &allows_mem,
1567 &allows_reg,
1568 &is_inout))
1569 return;
1571 /* If an output operand is not a decl or indirect ref and our constraint
1572 allows a register, make a temporary to act as an intermediate.
1573 Make the asm insn write into that, then our caller will copy it to
1574 the real output operand. Likewise for promoted variables. */
1576 generating_concat_p = 0;
1578 real_output_rtx[i] = NULL_RTX;
1579 if ((TREE_CODE (val) == INDIRECT_REF
1580 && allows_mem)
1581 || (DECL_P (val)
1582 && (allows_mem || GET_CODE (DECL_RTL (val)) == REG)
1583 && ! (GET_CODE (DECL_RTL (val)) == REG
1584 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
1585 || ! allows_reg
1586 || is_inout)
1588 if (! allows_reg)
1589 mark_addressable (TREE_VALUE (tail));
1591 output_rtx[i]
1592 = expand_expr (TREE_VALUE (tail), NULL_RTX, VOIDmode,
1593 EXPAND_MEMORY_USE_WO);
1595 if (! allows_reg && GET_CODE (output_rtx[i]) != MEM)
1596 error ("output number %d not directly addressable", i);
1597 if ((! allows_mem && GET_CODE (output_rtx[i]) == MEM)
1598 || GET_CODE (output_rtx[i]) == CONCAT)
1600 real_output_rtx[i] = protect_from_queue (output_rtx[i], 1);
1601 output_rtx[i] = gen_reg_rtx (GET_MODE (output_rtx[i]));
1602 if (is_inout)
1603 emit_move_insn (output_rtx[i], real_output_rtx[i]);
1606 else
1608 output_rtx[i] = assign_temp (type, 0, 0, 1);
1609 TREE_VALUE (tail) = make_tree (type, output_rtx[i]);
1612 generating_concat_p = old_generating_concat_p;
1614 if (is_inout)
1616 inout_mode[ninout] = TYPE_MODE (TREE_TYPE (TREE_VALUE (tail)));
1617 inout_opnum[ninout++] = i;
1621 ninputs += ninout;
1622 if (ninputs + noutputs > MAX_RECOG_OPERANDS)
1624 error ("more than %d operands in `asm'", MAX_RECOG_OPERANDS);
1625 return;
1628 /* Make vectors for the expression-rtx, constraint strings,
1629 and named operands. */
1631 argvec = rtvec_alloc (ninputs);
1632 constraintvec = rtvec_alloc (ninputs);
1634 body = gen_rtx_ASM_OPERANDS ((noutputs == 0 ? VOIDmode
1635 : GET_MODE (output_rtx[0])),
1636 TREE_STRING_POINTER (string),
1637 empty_string, 0, argvec, constraintvec,
1638 filename, line);
1640 MEM_VOLATILE_P (body) = vol;
1642 /* Eval the inputs and put them into ARGVEC.
1643 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
1645 for (i = 0, tail = inputs; tail; tail = TREE_CHAIN (tail), ++i)
1647 int j;
1648 int allows_reg = 0, allows_mem = 0;
1649 const char *constraint, *orig_constraint;
1650 int c_len;
1651 rtx op;
1653 /* If there's an erroneous arg, emit no insn,
1654 because the ASM_INPUT would get VOIDmode
1655 and that could cause a crash in reload. */
1656 if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
1657 return;
1659 /* ??? Can this happen, and does the error message make any sense? */
1660 if (TREE_PURPOSE (tail) == NULL_TREE)
1662 error ("hard register `%s' listed as input operand to `asm'",
1663 TREE_STRING_POINTER (TREE_VALUE (tail)) );
1664 return;
1667 orig_constraint = constraint = constraints[i + noutputs];
1668 c_len = strlen (constraint);
1670 /* Make sure constraint has neither `=', `+', nor '&'. */
1672 for (j = 0; j < c_len; j++)
1673 switch (constraint[j])
1675 case '+': case '=': case '&':
1676 if (constraint == orig_constraint)
1678 error ("input operand constraint contains `%c'",
1679 constraint[j]);
1680 return;
1682 break;
1684 case '%':
1685 if (constraint == orig_constraint
1686 && i + 1 == ninputs - ninout)
1688 error ("`%%' constraint used with last operand");
1689 return;
1691 break;
1693 case 'V': case 'm': case 'o':
1694 allows_mem = 1;
1695 break;
1697 case '<': case '>':
1698 case '?': case '!': case '*': case '#':
1699 case 'E': case 'F': case 'G': case 'H':
1700 case 's': case 'i': case 'n':
1701 case 'I': case 'J': case 'K': case 'L': case 'M':
1702 case 'N': case 'O': case 'P': case ',':
1703 break;
1705 /* Whether or not a numeric constraint allows a register is
1706 decided by the matching constraint, and so there is no need
1707 to do anything special with them. We must handle them in
1708 the default case, so that we don't unnecessarily force
1709 operands to memory. */
1710 case '0': case '1': case '2': case '3': case '4':
1711 case '5': case '6': case '7': case '8': case '9':
1713 char *end;
1714 unsigned long match;
1716 match = strtoul (constraint + j, &end, 10);
1717 if (match >= (unsigned long) noutputs)
1719 error ("matching constraint references invalid operand number");
1720 return;
1723 /* Try and find the real constraint for this dup. Only do
1724 this if the matching constraint is the only alternative. */
1725 if (*end == '\0'
1726 && (j == 0 || (j == 1 && constraint[0] == '%')))
1728 constraint = constraints[match];
1729 c_len = strlen (constraint);
1730 j = 0;
1731 break;
1733 else
1734 j = end - constraint;
1736 /* Fall through. */
1738 case 'p': case 'r':
1739 allows_reg = 1;
1740 break;
1742 case 'g': case 'X':
1743 allows_reg = 1;
1744 allows_mem = 1;
1745 break;
1747 default:
1748 if (! ISALPHA (constraint[j]))
1750 error ("invalid punctuation `%c' in constraint",
1751 constraint[j]);
1752 return;
1754 if (REG_CLASS_FROM_LETTER (constraint[j]) != NO_REGS)
1755 allows_reg = 1;
1756 #ifdef EXTRA_CONSTRAINT
1757 else
1759 /* Otherwise we can't assume anything about the nature of
1760 the constraint except that it isn't purely registers.
1761 Treat it like "g" and hope for the best. */
1762 allows_reg = 1;
1763 allows_mem = 1;
1765 #endif
1766 break;
1769 if (! allows_reg && allows_mem)
1770 mark_addressable (TREE_VALUE (tail));
1772 op = expand_expr (TREE_VALUE (tail), NULL_RTX, VOIDmode, 0);
1774 /* Never pass a CONCAT to an ASM. */
1775 generating_concat_p = 0;
1776 if (GET_CODE (op) == CONCAT)
1777 op = force_reg (GET_MODE (op), op);
1779 if (asm_operand_ok (op, constraint) <= 0)
1781 if (allows_reg)
1782 op = force_reg (TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))), op);
1783 else if (!allows_mem)
1784 warning ("asm operand %d probably doesn't match constraints",
1785 i + noutputs);
1786 else if (CONSTANT_P (op))
1787 op = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))),
1788 op);
1789 else if (GET_CODE (op) == REG
1790 || GET_CODE (op) == SUBREG
1791 || GET_CODE (op) == ADDRESSOF
1792 || GET_CODE (op) == CONCAT)
1794 tree type = TREE_TYPE (TREE_VALUE (tail));
1795 tree qual_type = build_qualified_type (type,
1796 (TYPE_QUALS (type)
1797 | TYPE_QUAL_CONST));
1798 rtx memloc = assign_temp (qual_type, 1, 1, 1);
1800 emit_move_insn (memloc, op);
1801 op = memloc;
1804 else if (GET_CODE (op) == MEM && MEM_VOLATILE_P (op))
1805 /* We won't recognize volatile memory as available a
1806 memory_operand at this point. Ignore it. */
1808 else if (queued_subexp_p (op))
1810 else
1811 /* ??? Leave this only until we have experience with what
1812 happens in combine and elsewhere when constraints are
1813 not satisfied. */
1814 warning ("asm operand %d probably doesn't match constraints",
1815 i + noutputs);
1817 generating_concat_p = old_generating_concat_p;
1818 ASM_OPERANDS_INPUT (body, i) = op;
1820 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, i)
1821 = gen_rtx_ASM_INPUT (TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))),
1822 orig_constraint);
1825 /* Protect all the operands from the queue now that they have all been
1826 evaluated. */
1828 generating_concat_p = 0;
1830 for (i = 0; i < ninputs - ninout; i++)
1831 ASM_OPERANDS_INPUT (body, i)
1832 = protect_from_queue (ASM_OPERANDS_INPUT (body, i), 0);
1834 for (i = 0; i < noutputs; i++)
1835 output_rtx[i] = protect_from_queue (output_rtx[i], 1);
1837 /* For in-out operands, copy output rtx to input rtx. */
1838 for (i = 0; i < ninout; i++)
1840 int j = inout_opnum[i];
1841 char buffer[16];
1843 ASM_OPERANDS_INPUT (body, ninputs - ninout + i)
1844 = output_rtx[j];
1846 sprintf (buffer, "%d", j);
1847 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, ninputs - ninout + i)
1848 = gen_rtx_ASM_INPUT (inout_mode[i], ggc_alloc_string (buffer, -1));
1851 generating_concat_p = old_generating_concat_p;
1853 /* Now, for each output, construct an rtx
1854 (set OUTPUT (asm_operands INSN OUTPUTCONSTRAINT OUTPUTNUMBER
1855 ARGVEC CONSTRAINTS OPNAMES))
1856 If there is more than one, put them inside a PARALLEL. */
1858 if (noutputs == 1 && nclobbers == 0)
1860 ASM_OPERANDS_OUTPUT_CONSTRAINT (body) = constraints[0];
1861 insn = emit_insn (gen_rtx_SET (VOIDmode, output_rtx[0], body));
1864 else if (noutputs == 0 && nclobbers == 0)
1866 /* No output operands: put in a raw ASM_OPERANDS rtx. */
1867 insn = emit_insn (body);
1870 else
1872 rtx obody = body;
1873 int num = noutputs;
1875 if (num == 0)
1876 num = 1;
1878 body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
1880 /* For each output operand, store a SET. */
1881 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1883 XVECEXP (body, 0, i)
1884 = gen_rtx_SET (VOIDmode,
1885 output_rtx[i],
1886 gen_rtx_ASM_OPERANDS
1887 (GET_MODE (output_rtx[i]),
1888 TREE_STRING_POINTER (string),
1889 constraints[i], i, argvec, constraintvec,
1890 filename, line));
1892 MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
1895 /* If there are no outputs (but there are some clobbers)
1896 store the bare ASM_OPERANDS into the PARALLEL. */
1898 if (i == 0)
1899 XVECEXP (body, 0, i++) = obody;
1901 /* Store (clobber REG) for each clobbered register specified. */
1903 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1905 const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1906 int j = decode_reg_name (regname);
1908 if (j < 0)
1910 if (j == -3) /* `cc', which is not a register */
1911 continue;
1913 if (j == -4) /* `memory', don't cache memory across asm */
1915 XVECEXP (body, 0, i++)
1916 = gen_rtx_CLOBBER (VOIDmode,
1917 gen_rtx_MEM
1918 (BLKmode,
1919 gen_rtx_SCRATCH (VOIDmode)));
1920 continue;
1923 /* Ignore unknown register, error already signaled. */
1924 continue;
1927 /* Use QImode since that's guaranteed to clobber just one reg. */
1928 XVECEXP (body, 0, i++)
1929 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (QImode, j));
1932 insn = emit_insn (body);
1935 /* For any outputs that needed reloading into registers, spill them
1936 back to where they belong. */
1937 for (i = 0; i < noutputs; ++i)
1938 if (real_output_rtx[i])
1939 emit_move_insn (real_output_rtx[i], output_rtx[i]);
1941 free_temp_slots ();
1944 /* A subroutine of expand_asm_operands. Check that all operands have
1945 the same number of alternatives. Return true if so. */
1947 static bool
1948 check_operand_nalternatives (outputs, inputs)
1949 tree outputs, inputs;
1951 if (outputs || inputs)
1953 tree tmp = TREE_PURPOSE (outputs ? outputs : inputs);
1954 int nalternatives
1955 = n_occurrences (',', TREE_STRING_POINTER (TREE_VALUE (tmp)));
1956 tree next = inputs;
1958 if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
1960 error ("too many alternatives in `asm'");
1961 return false;
1964 tmp = outputs;
1965 while (tmp)
1967 const char *constraint
1968 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (tmp)));
1970 if (n_occurrences (',', constraint) != nalternatives)
1972 error ("operand constraints for `asm' differ in number of alternatives");
1973 return false;
1976 if (TREE_CHAIN (tmp))
1977 tmp = TREE_CHAIN (tmp);
1978 else
1979 tmp = next, next = 0;
1983 return true;
1986 /* A subroutine of expand_asm_operands. Check that all operand names
1987 are unique. Return true if so. We rely on the fact that these names
1988 are identifiers, and so have been canonicalized by get_identifier,
1989 so all we need are pointer comparisons. */
1991 static bool
1992 check_unique_operand_names (outputs, inputs)
1993 tree outputs, inputs;
1995 tree i, j;
1997 for (i = outputs; i ; i = TREE_CHAIN (i))
1999 tree i_name = TREE_PURPOSE (TREE_PURPOSE (i));
2000 if (! i_name)
2001 continue;
2003 for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
2004 if (i_name == TREE_PURPOSE (TREE_PURPOSE (j)))
2005 goto failure;
2008 for (i = inputs; i ; i = TREE_CHAIN (i))
2010 tree i_name = TREE_PURPOSE (TREE_PURPOSE (i));
2011 if (! i_name)
2012 continue;
2014 for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
2015 if (i_name == TREE_PURPOSE (TREE_PURPOSE (j)))
2016 goto failure;
2017 for (j = outputs; j ; j = TREE_CHAIN (j))
2018 if (i_name == TREE_PURPOSE (TREE_PURPOSE (j)))
2019 goto failure;
2022 return true;
2024 failure:
2025 error ("duplicate asm operand name '%s'",
2026 IDENTIFIER_POINTER (TREE_PURPOSE (TREE_PURPOSE (i))));
2027 return false;
2030 /* A subroutine of expand_asm_operands. Resolve the names of the operands
2031 in *POUTPUTS and *PINPUTS to numbers, and replace the name expansions in
2032 STRING and in the constraints to those numbers. */
2034 static tree
2035 resolve_operand_names (string, outputs, inputs, pconstraints)
2036 tree string;
2037 tree outputs, inputs;
2038 const char **pconstraints;
2040 char *buffer = xstrdup (TREE_STRING_POINTER (string));
2041 char *p;
2042 tree t;
2044 /* Assume that we will not need extra space to perform the substitution.
2045 This because we get to remove '[' and ']', which means we cannot have
2046 a problem until we have more than 999 operands. */
2048 p = buffer;
2049 while ((p = strchr (p, '%')) != NULL)
2051 if (*++p != '[')
2052 continue;
2053 p = resolve_operand_name_1 (p, outputs, inputs);
2056 string = build_string (strlen (buffer), buffer);
2057 free (buffer);
2059 /* Collect output constraints here because it's convenient.
2060 There should be no named operands here; this is verified
2061 in expand_asm_operand. */
2062 for (t = outputs; t ; t = TREE_CHAIN (t), pconstraints++)
2063 *pconstraints = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
2065 /* Substitute [<name>] in input constraint strings. */
2066 for (t = inputs; t ; t = TREE_CHAIN (t), pconstraints++)
2068 const char *c = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
2069 if (strchr (c, '[') == NULL)
2070 *pconstraints = c;
2071 else
2073 p = buffer = xstrdup (c);
2074 while ((p = strchr (p, '[')) != NULL)
2075 p = resolve_operand_name_1 (p, outputs, inputs);
2077 *pconstraints = ggc_alloc_string (buffer, -1);
2078 free (buffer);
2082 return string;
2085 /* A subroutine of resolve_operand_names. P points to the '[' for a
2086 potential named operand of the form [<name>]. In place, replace
2087 the name and brackets with a number. Return a pointer to the
2088 balance of the string after substitution. */
2090 static char *
2091 resolve_operand_name_1 (p, outputs, inputs)
2092 char *p;
2093 tree outputs, inputs;
2095 char *q;
2096 int op;
2097 tree t;
2098 size_t len;
2100 /* Collect the operand name. */
2101 q = strchr (p, ']');
2102 if (!q)
2104 error ("missing close brace for named operand");
2105 return strchr (p, '\0');
2107 len = q - p - 1;
2109 /* Resolve the name to a number. */
2110 for (op = 0, t = outputs; t ; t = TREE_CHAIN (t), op++)
2112 const char *c = IDENTIFIER_POINTER (TREE_PURPOSE (TREE_PURPOSE (t)));
2113 if (strncmp (c, p + 1, len) == 0 && c[len] == '\0')
2114 goto found;
2116 for (t = inputs; t ; t = TREE_CHAIN (t), op++)
2118 const char *c = IDENTIFIER_POINTER (TREE_PURPOSE (TREE_PURPOSE (t)));
2119 if (strncmp (c, p + 1, len) == 0 && c[len] == '\0')
2120 goto found;
2123 *q = '\0';
2124 error ("undefined named operand '%s'", p + 1);
2125 op = 0;
2126 found:
2128 /* Replace the name with the number. Unfortunately, not all libraries
2129 get the return value of sprintf correct, so search for the end of the
2130 generated string by hand. */
2131 sprintf (p, "%d", op);
2132 p = strchr (p, '\0');
2134 /* Verify the no extra buffer space assumption. */
2135 if (p > q)
2136 abort ();
2138 /* Shift the rest of the buffer down to fill the gap. */
2139 memmove (p, q + 1, strlen (q + 1) + 1);
2141 return p;
2144 /* Generate RTL to evaluate the expression EXP
2145 and remember it in case this is the VALUE in a ({... VALUE; }) constr. */
2147 void
2148 expand_expr_stmt (exp)
2149 tree exp;
2151 /* If -W, warn about statements with no side effects,
2152 except for an explicit cast to void (e.g. for assert()), and
2153 except inside a ({...}) where they may be useful. */
2154 if (expr_stmts_for_value == 0 && exp != error_mark_node)
2156 if (! TREE_SIDE_EFFECTS (exp))
2158 if ((extra_warnings || warn_unused_value)
2159 && !(TREE_CODE (exp) == CONVERT_EXPR
2160 && VOID_TYPE_P (TREE_TYPE (exp))))
2161 warning_with_file_and_line (emit_filename, emit_lineno,
2162 "statement with no effect");
2164 else if (warn_unused_value)
2165 warn_if_unused_value (exp);
2168 /* If EXP is of function type and we are expanding statements for
2169 value, convert it to pointer-to-function. */
2170 if (expr_stmts_for_value && TREE_CODE (TREE_TYPE (exp)) == FUNCTION_TYPE)
2171 exp = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (exp)), exp);
2173 /* The call to `expand_expr' could cause last_expr_type and
2174 last_expr_value to get reset. Therefore, we set last_expr_value
2175 and last_expr_type *after* calling expand_expr. */
2176 last_expr_value = expand_expr (exp,
2177 (expr_stmts_for_value
2178 ? NULL_RTX : const0_rtx),
2179 VOIDmode, 0);
2180 last_expr_type = TREE_TYPE (exp);
2182 /* If all we do is reference a volatile value in memory,
2183 copy it to a register to be sure it is actually touched. */
2184 if (last_expr_value != 0 && GET_CODE (last_expr_value) == MEM
2185 && TREE_THIS_VOLATILE (exp))
2187 if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode)
2189 else if (TYPE_MODE (TREE_TYPE (exp)) != BLKmode)
2190 copy_to_reg (last_expr_value);
2191 else
2193 rtx lab = gen_label_rtx ();
2195 /* Compare the value with itself to reference it. */
2196 emit_cmp_and_jump_insns (last_expr_value, last_expr_value, EQ,
2197 expand_expr (TYPE_SIZE (last_expr_type),
2198 NULL_RTX, VOIDmode, 0),
2199 BLKmode, 0,
2200 TYPE_ALIGN (last_expr_type) / BITS_PER_UNIT,
2201 lab);
2202 emit_label (lab);
2206 /* If this expression is part of a ({...}) and is in memory, we may have
2207 to preserve temporaries. */
2208 preserve_temp_slots (last_expr_value);
2210 /* Free any temporaries used to evaluate this expression. Any temporary
2211 used as a result of this expression will already have been preserved
2212 above. */
2213 free_temp_slots ();
2215 emit_queue ();
2218 /* Warn if EXP contains any computations whose results are not used.
2219 Return 1 if a warning is printed; 0 otherwise. */
2222 warn_if_unused_value (exp)
2223 tree exp;
2225 if (TREE_USED (exp))
2226 return 0;
2228 /* Don't warn about void constructs. This includes casting to void,
2229 void function calls, and statement expressions with a final cast
2230 to void. */
2231 if (VOID_TYPE_P (TREE_TYPE (exp)))
2232 return 0;
2234 /* If this is an expression with side effects, don't warn. */
2235 if (TREE_SIDE_EFFECTS (exp))
2236 return 0;
2238 switch (TREE_CODE (exp))
2240 case PREINCREMENT_EXPR:
2241 case POSTINCREMENT_EXPR:
2242 case PREDECREMENT_EXPR:
2243 case POSTDECREMENT_EXPR:
2244 case MODIFY_EXPR:
2245 case INIT_EXPR:
2246 case TARGET_EXPR:
2247 case CALL_EXPR:
2248 case METHOD_CALL_EXPR:
2249 case RTL_EXPR:
2250 case TRY_CATCH_EXPR:
2251 case WITH_CLEANUP_EXPR:
2252 case EXIT_EXPR:
2253 return 0;
2255 case BIND_EXPR:
2256 /* For a binding, warn if no side effect within it. */
2257 return warn_if_unused_value (TREE_OPERAND (exp, 1));
2259 case SAVE_EXPR:
2260 return warn_if_unused_value (TREE_OPERAND (exp, 1));
2262 case TRUTH_ORIF_EXPR:
2263 case TRUTH_ANDIF_EXPR:
2264 /* In && or ||, warn if 2nd operand has no side effect. */
2265 return warn_if_unused_value (TREE_OPERAND (exp, 1));
2267 case COMPOUND_EXPR:
2268 if (TREE_NO_UNUSED_WARNING (exp))
2269 return 0;
2270 if (warn_if_unused_value (TREE_OPERAND (exp, 0)))
2271 return 1;
2272 /* Let people do `(foo (), 0)' without a warning. */
2273 if (TREE_CONSTANT (TREE_OPERAND (exp, 1)))
2274 return 0;
2275 return warn_if_unused_value (TREE_OPERAND (exp, 1));
2277 case NOP_EXPR:
2278 case CONVERT_EXPR:
2279 case NON_LVALUE_EXPR:
2280 /* Don't warn about conversions not explicit in the user's program. */
2281 if (TREE_NO_UNUSED_WARNING (exp))
2282 return 0;
2283 /* Assignment to a cast usually results in a cast of a modify.
2284 Don't complain about that. There can be an arbitrary number of
2285 casts before the modify, so we must loop until we find the first
2286 non-cast expression and then test to see if that is a modify. */
2288 tree tem = TREE_OPERAND (exp, 0);
2290 while (TREE_CODE (tem) == CONVERT_EXPR || TREE_CODE (tem) == NOP_EXPR)
2291 tem = TREE_OPERAND (tem, 0);
2293 if (TREE_CODE (tem) == MODIFY_EXPR || TREE_CODE (tem) == INIT_EXPR
2294 || TREE_CODE (tem) == CALL_EXPR)
2295 return 0;
2297 goto warn;
2299 case INDIRECT_REF:
2300 /* Don't warn about automatic dereferencing of references, since
2301 the user cannot control it. */
2302 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == REFERENCE_TYPE)
2303 return warn_if_unused_value (TREE_OPERAND (exp, 0));
2304 /* Fall through. */
2306 default:
2307 /* Referencing a volatile value is a side effect, so don't warn. */
2308 if ((DECL_P (exp)
2309 || TREE_CODE_CLASS (TREE_CODE (exp)) == 'r')
2310 && TREE_THIS_VOLATILE (exp))
2311 return 0;
2313 /* If this is an expression which has no operands, there is no value
2314 to be unused. There are no such language-independent codes,
2315 but front ends may define such. */
2316 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'e'
2317 && TREE_CODE_LENGTH (TREE_CODE (exp)) == 0)
2318 return 0;
2320 warn:
2321 warning_with_file_and_line (emit_filename, emit_lineno,
2322 "value computed is not used");
2323 return 1;
2327 /* Clear out the memory of the last expression evaluated. */
2329 void
2330 clear_last_expr ()
2332 last_expr_type = 0;
2335 /* Begin a statement which will return a value.
2336 Return the RTL_EXPR for this statement expr.
2337 The caller must save that value and pass it to expand_end_stmt_expr. */
2339 tree
2340 expand_start_stmt_expr ()
2342 tree t;
2344 /* Make the RTL_EXPR node temporary, not momentary,
2345 so that rtl_expr_chain doesn't become garbage. */
2346 t = make_node (RTL_EXPR);
2347 do_pending_stack_adjust ();
2348 start_sequence_for_rtl_expr (t);
2349 NO_DEFER_POP;
2350 expr_stmts_for_value++;
2351 return t;
2354 /* Restore the previous state at the end of a statement that returns a value.
2355 Returns a tree node representing the statement's value and the
2356 insns to compute the value.
2358 The nodes of that expression have been freed by now, so we cannot use them.
2359 But we don't want to do that anyway; the expression has already been
2360 evaluated and now we just want to use the value. So generate a RTL_EXPR
2361 with the proper type and RTL value.
2363 If the last substatement was not an expression,
2364 return something with type `void'. */
2366 tree
2367 expand_end_stmt_expr (t)
2368 tree t;
2370 OK_DEFER_POP;
2372 if (last_expr_type == 0)
2374 last_expr_type = void_type_node;
2375 last_expr_value = const0_rtx;
2377 else if (last_expr_value == 0)
2378 /* There are some cases where this can happen, such as when the
2379 statement is void type. */
2380 last_expr_value = const0_rtx;
2381 else if (GET_CODE (last_expr_value) != REG && ! CONSTANT_P (last_expr_value))
2382 /* Remove any possible QUEUED. */
2383 last_expr_value = protect_from_queue (last_expr_value, 0);
2385 emit_queue ();
2387 TREE_TYPE (t) = last_expr_type;
2388 RTL_EXPR_RTL (t) = last_expr_value;
2389 RTL_EXPR_SEQUENCE (t) = get_insns ();
2391 rtl_expr_chain = tree_cons (NULL_TREE, t, rtl_expr_chain);
2393 end_sequence ();
2395 /* Don't consider deleting this expr or containing exprs at tree level. */
2396 TREE_SIDE_EFFECTS (t) = 1;
2397 /* Propagate volatility of the actual RTL expr. */
2398 TREE_THIS_VOLATILE (t) = volatile_refs_p (last_expr_value);
2400 last_expr_type = 0;
2401 expr_stmts_for_value--;
2403 return t;
2406 /* Generate RTL for the start of an if-then. COND is the expression
2407 whose truth should be tested.
2409 If EXITFLAG is nonzero, this conditional is visible to
2410 `exit_something'. */
2412 void
2413 expand_start_cond (cond, exitflag)
2414 tree cond;
2415 int exitflag;
2417 struct nesting *thiscond = ALLOC_NESTING ();
2419 /* Make an entry on cond_stack for the cond we are entering. */
2421 thiscond->next = cond_stack;
2422 thiscond->all = nesting_stack;
2423 thiscond->depth = ++nesting_depth;
2424 thiscond->data.cond.next_label = gen_label_rtx ();
2425 /* Before we encounter an `else', we don't need a separate exit label
2426 unless there are supposed to be exit statements
2427 to exit this conditional. */
2428 thiscond->exit_label = exitflag ? gen_label_rtx () : 0;
2429 thiscond->data.cond.endif_label = thiscond->exit_label;
2430 cond_stack = thiscond;
2431 nesting_stack = thiscond;
2433 do_jump (cond, thiscond->data.cond.next_label, NULL_RTX);
2436 /* Generate RTL between then-clause and the elseif-clause
2437 of an if-then-elseif-.... */
2439 void
2440 expand_start_elseif (cond)
2441 tree cond;
2443 if (cond_stack->data.cond.endif_label == 0)
2444 cond_stack->data.cond.endif_label = gen_label_rtx ();
2445 emit_jump (cond_stack->data.cond.endif_label);
2446 emit_label (cond_stack->data.cond.next_label);
2447 cond_stack->data.cond.next_label = gen_label_rtx ();
2448 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
2451 /* Generate RTL between the then-clause and the else-clause
2452 of an if-then-else. */
2454 void
2455 expand_start_else ()
2457 if (cond_stack->data.cond.endif_label == 0)
2458 cond_stack->data.cond.endif_label = gen_label_rtx ();
2460 emit_jump (cond_stack->data.cond.endif_label);
2461 emit_label (cond_stack->data.cond.next_label);
2462 cond_stack->data.cond.next_label = 0; /* No more _else or _elseif calls. */
2465 /* After calling expand_start_else, turn this "else" into an "else if"
2466 by providing another condition. */
2468 void
2469 expand_elseif (cond)
2470 tree cond;
2472 cond_stack->data.cond.next_label = gen_label_rtx ();
2473 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
2476 /* Generate RTL for the end of an if-then.
2477 Pop the record for it off of cond_stack. */
2479 void
2480 expand_end_cond ()
2482 struct nesting *thiscond = cond_stack;
2484 do_pending_stack_adjust ();
2485 if (thiscond->data.cond.next_label)
2486 emit_label (thiscond->data.cond.next_label);
2487 if (thiscond->data.cond.endif_label)
2488 emit_label (thiscond->data.cond.endif_label);
2490 POPSTACK (cond_stack);
2491 last_expr_type = 0;
2494 /* Generate RTL for the start of a loop. EXIT_FLAG is nonzero if this
2495 loop should be exited by `exit_something'. This is a loop for which
2496 `expand_continue' will jump to the top of the loop.
2498 Make an entry on loop_stack to record the labels associated with
2499 this loop. */
2501 struct nesting *
2502 expand_start_loop (exit_flag)
2503 int exit_flag;
2505 struct nesting *thisloop = ALLOC_NESTING ();
2507 /* Make an entry on loop_stack for the loop we are entering. */
2509 thisloop->next = loop_stack;
2510 thisloop->all = nesting_stack;
2511 thisloop->depth = ++nesting_depth;
2512 thisloop->data.loop.start_label = gen_label_rtx ();
2513 thisloop->data.loop.end_label = gen_label_rtx ();
2514 thisloop->data.loop.alt_end_label = 0;
2515 thisloop->data.loop.continue_label = thisloop->data.loop.start_label;
2516 thisloop->exit_label = exit_flag ? thisloop->data.loop.end_label : 0;
2517 loop_stack = thisloop;
2518 nesting_stack = thisloop;
2520 do_pending_stack_adjust ();
2521 emit_queue ();
2522 emit_note (NULL, NOTE_INSN_LOOP_BEG);
2523 emit_label (thisloop->data.loop.start_label);
2525 return thisloop;
2528 /* Like expand_start_loop but for a loop where the continuation point
2529 (for expand_continue_loop) will be specified explicitly. */
2531 struct nesting *
2532 expand_start_loop_continue_elsewhere (exit_flag)
2533 int exit_flag;
2535 struct nesting *thisloop = expand_start_loop (exit_flag);
2536 loop_stack->data.loop.continue_label = gen_label_rtx ();
2537 return thisloop;
2540 /* Begin a null, aka do { } while (0) "loop". But since the contents
2541 of said loop can still contain a break, we must frob the loop nest. */
2543 struct nesting *
2544 expand_start_null_loop ()
2546 struct nesting *thisloop = ALLOC_NESTING ();
2548 /* Make an entry on loop_stack for the loop we are entering. */
2550 thisloop->next = loop_stack;
2551 thisloop->all = nesting_stack;
2552 thisloop->depth = ++nesting_depth;
2553 thisloop->data.loop.start_label = emit_note (NULL, NOTE_INSN_DELETED);
2554 thisloop->data.loop.end_label = gen_label_rtx ();
2555 thisloop->data.loop.alt_end_label = NULL_RTX;
2556 thisloop->data.loop.continue_label = thisloop->data.loop.end_label;
2557 thisloop->exit_label = thisloop->data.loop.end_label;
2558 loop_stack = thisloop;
2559 nesting_stack = thisloop;
2561 return thisloop;
2564 /* Specify the continuation point for a loop started with
2565 expand_start_loop_continue_elsewhere.
2566 Use this at the point in the code to which a continue statement
2567 should jump. */
2569 void
2570 expand_loop_continue_here ()
2572 do_pending_stack_adjust ();
2573 emit_note (NULL, NOTE_INSN_LOOP_CONT);
2574 emit_label (loop_stack->data.loop.continue_label);
2577 /* Finish a loop. Generate a jump back to the top and the loop-exit label.
2578 Pop the block off of loop_stack. */
2580 void
2581 expand_end_loop ()
2583 rtx start_label = loop_stack->data.loop.start_label;
2584 rtx insn = get_last_insn ();
2585 int needs_end_jump = 1;
2587 /* Mark the continue-point at the top of the loop if none elsewhere. */
2588 if (start_label == loop_stack->data.loop.continue_label)
2589 emit_note_before (NOTE_INSN_LOOP_CONT, start_label);
2591 do_pending_stack_adjust ();
2593 /* If optimizing, perhaps reorder the loop.
2594 First, try to use a condjump near the end.
2595 expand_exit_loop_if_false ends loops with unconditional jumps,
2596 like this:
2598 if (test) goto label;
2599 optional: cleanup
2600 goto loop_stack->data.loop.end_label
2601 barrier
2602 label:
2604 If we find such a pattern, we can end the loop earlier. */
2606 if (optimize
2607 && GET_CODE (insn) == CODE_LABEL
2608 && LABEL_NAME (insn) == NULL
2609 && GET_CODE (PREV_INSN (insn)) == BARRIER)
2611 rtx label = insn;
2612 rtx jump = PREV_INSN (PREV_INSN (label));
2614 if (GET_CODE (jump) == JUMP_INSN
2615 && GET_CODE (PATTERN (jump)) == SET
2616 && SET_DEST (PATTERN (jump)) == pc_rtx
2617 && GET_CODE (SET_SRC (PATTERN (jump))) == LABEL_REF
2618 && (XEXP (SET_SRC (PATTERN (jump)), 0)
2619 == loop_stack->data.loop.end_label))
2621 rtx prev;
2623 /* The test might be complex and reference LABEL multiple times,
2624 like the loop in loop_iterations to set vtop. To handle this,
2625 we move LABEL. */
2626 insn = PREV_INSN (label);
2627 reorder_insns (label, label, start_label);
2629 for (prev = PREV_INSN (jump);; prev = PREV_INSN (prev))
2631 /* We ignore line number notes, but if we see any other note,
2632 in particular NOTE_INSN_BLOCK_*, NOTE_INSN_EH_REGION_*,
2633 NOTE_INSN_LOOP_*, we disable this optimization. */
2634 if (GET_CODE (prev) == NOTE)
2636 if (NOTE_LINE_NUMBER (prev) < 0)
2637 break;
2638 continue;
2640 if (GET_CODE (prev) == CODE_LABEL)
2641 break;
2642 if (GET_CODE (prev) == JUMP_INSN)
2644 if (GET_CODE (PATTERN (prev)) == SET
2645 && SET_DEST (PATTERN (prev)) == pc_rtx
2646 && GET_CODE (SET_SRC (PATTERN (prev))) == IF_THEN_ELSE
2647 && (GET_CODE (XEXP (SET_SRC (PATTERN (prev)), 1))
2648 == LABEL_REF)
2649 && XEXP (XEXP (SET_SRC (PATTERN (prev)), 1), 0) == label)
2651 XEXP (XEXP (SET_SRC (PATTERN (prev)), 1), 0)
2652 = start_label;
2653 emit_note_after (NOTE_INSN_LOOP_END, prev);
2654 needs_end_jump = 0;
2656 break;
2662 /* If the loop starts with a loop exit, roll that to the end where
2663 it will optimize together with the jump back.
2665 We look for the conditional branch to the exit, except that once
2666 we find such a branch, we don't look past 30 instructions.
2668 In more detail, if the loop presently looks like this (in pseudo-C):
2670 start_label:
2671 if (test) goto end_label;
2672 body;
2673 goto start_label;
2674 end_label:
2676 transform it to look like:
2678 goto start_label;
2679 newstart_label:
2680 body;
2681 start_label:
2682 if (test) goto end_label;
2683 goto newstart_label;
2684 end_label:
2686 Here, the `test' may actually consist of some reasonably complex
2687 code, terminating in a test. */
2689 if (optimize
2690 && needs_end_jump
2692 ! (GET_CODE (insn) == JUMP_INSN
2693 && GET_CODE (PATTERN (insn)) == SET
2694 && SET_DEST (PATTERN (insn)) == pc_rtx
2695 && GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE))
2697 int eh_regions = 0;
2698 int num_insns = 0;
2699 rtx last_test_insn = NULL_RTX;
2701 /* Scan insns from the top of the loop looking for a qualified
2702 conditional exit. */
2703 for (insn = NEXT_INSN (loop_stack->data.loop.start_label); insn;
2704 insn = NEXT_INSN (insn))
2706 if (GET_CODE (insn) == NOTE)
2708 if (optimize < 2
2709 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2710 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
2711 /* The code that actually moves the exit test will
2712 carefully leave BLOCK notes in their original
2713 location. That means, however, that we can't debug
2714 the exit test itself. So, we refuse to move code
2715 containing BLOCK notes at low optimization levels. */
2716 break;
2718 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
2719 ++eh_regions;
2720 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END)
2722 --eh_regions;
2723 if (eh_regions < 0)
2724 /* We've come to the end of an EH region, but
2725 never saw the beginning of that region. That
2726 means that an EH region begins before the top
2727 of the loop, and ends in the middle of it. The
2728 existence of such a situation violates a basic
2729 assumption in this code, since that would imply
2730 that even when EH_REGIONS is zero, we might
2731 move code out of an exception region. */
2732 abort ();
2735 /* We must not walk into a nested loop. */
2736 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
2737 break;
2739 /* We already know this INSN is a NOTE, so there's no
2740 point in looking at it to see if it's a JUMP. */
2741 continue;
2744 if (GET_CODE (insn) == JUMP_INSN || GET_CODE (insn) == INSN)
2745 num_insns++;
2747 if (last_test_insn && num_insns > 30)
2748 break;
2750 if (eh_regions > 0)
2751 /* We don't want to move a partial EH region. Consider:
2753 while ( ( { try {
2754 if (cond ()) 0;
2755 else {
2756 bar();
2759 } catch (...) {
2761 } )) {
2762 body;
2765 This isn't legal C++, but here's what it's supposed to
2766 mean: if cond() is true, stop looping. Otherwise,
2767 call bar, and keep looping. In addition, if cond
2768 throws an exception, catch it and keep looping. Such
2769 constructs are certainy legal in LISP.
2771 We should not move the `if (cond()) 0' test since then
2772 the EH-region for the try-block would be broken up.
2773 (In this case we would the EH_BEG note for the `try'
2774 and `if cond()' but not the call to bar() or the
2775 EH_END note.)
2777 So we don't look for tests within an EH region. */
2778 continue;
2780 if (GET_CODE (insn) == JUMP_INSN
2781 && GET_CODE (PATTERN (insn)) == SET
2782 && SET_DEST (PATTERN (insn)) == pc_rtx)
2784 /* This is indeed a jump. */
2785 rtx dest1 = NULL_RTX;
2786 rtx dest2 = NULL_RTX;
2787 rtx potential_last_test;
2788 if (GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE)
2790 /* A conditional jump. */
2791 dest1 = XEXP (SET_SRC (PATTERN (insn)), 1);
2792 dest2 = XEXP (SET_SRC (PATTERN (insn)), 2);
2793 potential_last_test = insn;
2795 else
2797 /* An unconditional jump. */
2798 dest1 = SET_SRC (PATTERN (insn));
2799 /* Include the BARRIER after the JUMP. */
2800 potential_last_test = NEXT_INSN (insn);
2803 do {
2804 if (dest1 && GET_CODE (dest1) == LABEL_REF
2805 && ((XEXP (dest1, 0)
2806 == loop_stack->data.loop.alt_end_label)
2807 || (XEXP (dest1, 0)
2808 == loop_stack->data.loop.end_label)))
2810 last_test_insn = potential_last_test;
2811 break;
2814 /* If this was a conditional jump, there may be
2815 another label at which we should look. */
2816 dest1 = dest2;
2817 dest2 = NULL_RTX;
2818 } while (dest1);
2822 if (last_test_insn != 0 && last_test_insn != get_last_insn ())
2824 /* We found one. Move everything from there up
2825 to the end of the loop, and add a jump into the loop
2826 to jump to there. */
2827 rtx newstart_label = gen_label_rtx ();
2828 rtx start_move = start_label;
2829 rtx next_insn;
2831 /* If the start label is preceded by a NOTE_INSN_LOOP_CONT note,
2832 then we want to move this note also. */
2833 if (GET_CODE (PREV_INSN (start_move)) == NOTE
2834 && (NOTE_LINE_NUMBER (PREV_INSN (start_move))
2835 == NOTE_INSN_LOOP_CONT))
2836 start_move = PREV_INSN (start_move);
2838 emit_label_after (newstart_label, PREV_INSN (start_move));
2840 /* Actually move the insns. Start at the beginning, and
2841 keep copying insns until we've copied the
2842 last_test_insn. */
2843 for (insn = start_move; insn; insn = next_insn)
2845 /* Figure out which insn comes after this one. We have
2846 to do this before we move INSN. */
2847 if (insn == last_test_insn)
2848 /* We've moved all the insns. */
2849 next_insn = NULL_RTX;
2850 else
2851 next_insn = NEXT_INSN (insn);
2853 if (GET_CODE (insn) == NOTE
2854 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2855 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
2856 /* We don't want to move NOTE_INSN_BLOCK_BEGs or
2857 NOTE_INSN_BLOCK_ENDs because the correct generation
2858 of debugging information depends on these appearing
2859 in the same order in the RTL and in the tree
2860 structure, where they are represented as BLOCKs.
2861 So, we don't move block notes. Of course, moving
2862 the code inside the block is likely to make it
2863 impossible to debug the instructions in the exit
2864 test, but such is the price of optimization. */
2865 continue;
2867 /* Move the INSN. */
2868 reorder_insns (insn, insn, get_last_insn ());
2871 emit_jump_insn_after (gen_jump (start_label),
2872 PREV_INSN (newstart_label));
2873 emit_barrier_after (PREV_INSN (newstart_label));
2874 start_label = newstart_label;
2878 if (needs_end_jump)
2880 emit_jump (start_label);
2881 emit_note (NULL, NOTE_INSN_LOOP_END);
2883 emit_label (loop_stack->data.loop.end_label);
2885 POPSTACK (loop_stack);
2887 last_expr_type = 0;
2890 /* Finish a null loop, aka do { } while (0). */
2892 void
2893 expand_end_null_loop ()
2895 do_pending_stack_adjust ();
2896 emit_label (loop_stack->data.loop.end_label);
2898 POPSTACK (loop_stack);
2900 last_expr_type = 0;
2903 /* Generate a jump to the current loop's continue-point.
2904 This is usually the top of the loop, but may be specified
2905 explicitly elsewhere. If not currently inside a loop,
2906 return 0 and do nothing; caller will print an error message. */
2909 expand_continue_loop (whichloop)
2910 struct nesting *whichloop;
2912 last_expr_type = 0;
2913 if (whichloop == 0)
2914 whichloop = loop_stack;
2915 if (whichloop == 0)
2916 return 0;
2917 expand_goto_internal (NULL_TREE, whichloop->data.loop.continue_label,
2918 NULL_RTX);
2919 return 1;
2922 /* Generate a jump to exit the current loop. If not currently inside a loop,
2923 return 0 and do nothing; caller will print an error message. */
2926 expand_exit_loop (whichloop)
2927 struct nesting *whichloop;
2929 last_expr_type = 0;
2930 if (whichloop == 0)
2931 whichloop = loop_stack;
2932 if (whichloop == 0)
2933 return 0;
2934 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label, NULL_RTX);
2935 return 1;
2938 /* Generate a conditional jump to exit the current loop if COND
2939 evaluates to zero. If not currently inside a loop,
2940 return 0 and do nothing; caller will print an error message. */
2943 expand_exit_loop_if_false (whichloop, cond)
2944 struct nesting *whichloop;
2945 tree cond;
2947 rtx label = gen_label_rtx ();
2948 rtx last_insn;
2949 last_expr_type = 0;
2951 if (whichloop == 0)
2952 whichloop = loop_stack;
2953 if (whichloop == 0)
2954 return 0;
2955 /* In order to handle fixups, we actually create a conditional jump
2956 around an unconditional branch to exit the loop. If fixups are
2957 necessary, they go before the unconditional branch. */
2959 do_jump (cond, NULL_RTX, label);
2960 last_insn = get_last_insn ();
2961 if (GET_CODE (last_insn) == CODE_LABEL)
2962 whichloop->data.loop.alt_end_label = last_insn;
2963 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label,
2964 NULL_RTX);
2965 emit_label (label);
2967 return 1;
2970 /* Return nonzero if the loop nest is empty. Else return zero. */
2973 stmt_loop_nest_empty ()
2975 /* cfun->stmt can be NULL if we are building a call to get the
2976 EH context for a setjmp/longjmp EH target and the current
2977 function was a deferred inline function. */
2978 return (cfun->stmt == NULL || loop_stack == NULL);
2981 /* Return non-zero if we should preserve sub-expressions as separate
2982 pseudos. We never do so if we aren't optimizing. We always do so
2983 if -fexpensive-optimizations.
2985 Otherwise, we only do so if we are in the "early" part of a loop. I.e.,
2986 the loop may still be a small one. */
2989 preserve_subexpressions_p ()
2991 rtx insn;
2993 if (flag_expensive_optimizations)
2994 return 1;
2996 if (optimize == 0 || cfun == 0 || cfun->stmt == 0 || loop_stack == 0)
2997 return 0;
2999 insn = get_last_insn_anywhere ();
3001 return (insn
3002 && (INSN_UID (insn) - INSN_UID (loop_stack->data.loop.start_label)
3003 < n_non_fixed_regs * 3));
3007 /* Generate a jump to exit the current loop, conditional, binding contour
3008 or case statement. Not all such constructs are visible to this function,
3009 only those started with EXIT_FLAG nonzero. Individual languages use
3010 the EXIT_FLAG parameter to control which kinds of constructs you can
3011 exit this way.
3013 If not currently inside anything that can be exited,
3014 return 0 and do nothing; caller will print an error message. */
3017 expand_exit_something ()
3019 struct nesting *n;
3020 last_expr_type = 0;
3021 for (n = nesting_stack; n; n = n->all)
3022 if (n->exit_label != 0)
3024 expand_goto_internal (NULL_TREE, n->exit_label, NULL_RTX);
3025 return 1;
3028 return 0;
3031 /* Generate RTL to return from the current function, with no value.
3032 (That is, we do not do anything about returning any value.) */
3034 void
3035 expand_null_return ()
3037 rtx last_insn = get_last_insn ();
3039 /* If this function was declared to return a value, but we
3040 didn't, clobber the return registers so that they are not
3041 propogated live to the rest of the function. */
3042 clobber_return_register ();
3044 expand_null_return_1 (last_insn);
3047 /* Generate RTL to return from the current function, with value VAL. */
3049 static void
3050 expand_value_return (val)
3051 rtx val;
3053 rtx last_insn = get_last_insn ();
3054 rtx return_reg = DECL_RTL (DECL_RESULT (current_function_decl));
3056 /* Copy the value to the return location
3057 unless it's already there. */
3059 if (return_reg != val)
3061 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
3062 #ifdef PROMOTE_FUNCTION_RETURN
3063 int unsignedp = TREE_UNSIGNED (type);
3064 enum machine_mode old_mode
3065 = DECL_MODE (DECL_RESULT (current_function_decl));
3066 enum machine_mode mode
3067 = promote_mode (type, old_mode, &unsignedp, 1);
3069 if (mode != old_mode)
3070 val = convert_modes (mode, old_mode, val, unsignedp);
3071 #endif
3072 if (GET_CODE (return_reg) == PARALLEL)
3073 emit_group_load (return_reg, val, int_size_in_bytes (type),
3074 TYPE_ALIGN (type));
3075 else
3076 emit_move_insn (return_reg, val);
3079 expand_null_return_1 (last_insn);
3082 /* Output a return with no value. If LAST_INSN is nonzero,
3083 pretend that the return takes place after LAST_INSN. */
3085 static void
3086 expand_null_return_1 (last_insn)
3087 rtx last_insn;
3089 rtx end_label = cleanup_label ? cleanup_label : return_label;
3091 clear_pending_stack_adjust ();
3092 do_pending_stack_adjust ();
3093 last_expr_type = 0;
3095 if (end_label == 0)
3096 end_label = return_label = gen_label_rtx ();
3097 expand_goto_internal (NULL_TREE, end_label, last_insn);
3100 /* Generate RTL to evaluate the expression RETVAL and return it
3101 from the current function. */
3103 void
3104 expand_return (retval)
3105 tree retval;
3107 /* If there are any cleanups to be performed, then they will
3108 be inserted following LAST_INSN. It is desirable
3109 that the last_insn, for such purposes, should be the
3110 last insn before computing the return value. Otherwise, cleanups
3111 which call functions can clobber the return value. */
3112 /* ??? rms: I think that is erroneous, because in C++ it would
3113 run destructors on variables that might be used in the subsequent
3114 computation of the return value. */
3115 rtx last_insn = 0;
3116 rtx result_rtl;
3117 rtx val = 0;
3118 tree retval_rhs;
3120 /* If function wants no value, give it none. */
3121 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
3123 expand_expr (retval, NULL_RTX, VOIDmode, 0);
3124 emit_queue ();
3125 expand_null_return ();
3126 return;
3129 if (retval == error_mark_node)
3131 /* Treat this like a return of no value from a function that
3132 returns a value. */
3133 expand_null_return ();
3134 return;
3136 else if (TREE_CODE (retval) == RESULT_DECL)
3137 retval_rhs = retval;
3138 else if ((TREE_CODE (retval) == MODIFY_EXPR || TREE_CODE (retval) == INIT_EXPR)
3139 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
3140 retval_rhs = TREE_OPERAND (retval, 1);
3141 else if (VOID_TYPE_P (TREE_TYPE (retval)))
3142 /* Recognize tail-recursive call to void function. */
3143 retval_rhs = retval;
3144 else
3145 retval_rhs = NULL_TREE;
3147 last_insn = get_last_insn ();
3149 /* Distribute return down conditional expr if either of the sides
3150 may involve tail recursion (see test below). This enhances the number
3151 of tail recursions we see. Don't do this always since it can produce
3152 sub-optimal code in some cases and we distribute assignments into
3153 conditional expressions when it would help. */
3155 if (optimize && retval_rhs != 0
3156 && frame_offset == 0
3157 && TREE_CODE (retval_rhs) == COND_EXPR
3158 && (TREE_CODE (TREE_OPERAND (retval_rhs, 1)) == CALL_EXPR
3159 || TREE_CODE (TREE_OPERAND (retval_rhs, 2)) == CALL_EXPR))
3161 rtx label = gen_label_rtx ();
3162 tree expr;
3164 do_jump (TREE_OPERAND (retval_rhs, 0), label, NULL_RTX);
3165 start_cleanup_deferral ();
3166 expr = build (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (current_function_decl)),
3167 DECL_RESULT (current_function_decl),
3168 TREE_OPERAND (retval_rhs, 1));
3169 TREE_SIDE_EFFECTS (expr) = 1;
3170 expand_return (expr);
3171 emit_label (label);
3173 expr = build (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (current_function_decl)),
3174 DECL_RESULT (current_function_decl),
3175 TREE_OPERAND (retval_rhs, 2));
3176 TREE_SIDE_EFFECTS (expr) = 1;
3177 expand_return (expr);
3178 end_cleanup_deferral ();
3179 return;
3182 result_rtl = DECL_RTL (DECL_RESULT (current_function_decl));
3184 /* If the result is an aggregate that is being returned in one (or more)
3185 registers, load the registers here. The compiler currently can't handle
3186 copying a BLKmode value into registers. We could put this code in a
3187 more general area (for use by everyone instead of just function
3188 call/return), but until this feature is generally usable it is kept here
3189 (and in expand_call). The value must go into a pseudo in case there
3190 are cleanups that will clobber the real return register. */
3192 if (retval_rhs != 0
3193 && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
3194 && GET_CODE (result_rtl) == REG)
3196 int i;
3197 unsigned HOST_WIDE_INT bitpos, xbitpos;
3198 unsigned HOST_WIDE_INT big_endian_correction = 0;
3199 unsigned HOST_WIDE_INT bytes
3200 = int_size_in_bytes (TREE_TYPE (retval_rhs));
3201 int n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
3202 unsigned int bitsize
3203 = MIN (TYPE_ALIGN (TREE_TYPE (retval_rhs)), BITS_PER_WORD);
3204 rtx *result_pseudos = (rtx *) alloca (sizeof (rtx) * n_regs);
3205 rtx result_reg, src = NULL_RTX, dst = NULL_RTX;
3206 rtx result_val = expand_expr (retval_rhs, NULL_RTX, VOIDmode, 0);
3207 enum machine_mode tmpmode, result_reg_mode;
3209 if (bytes == 0)
3211 expand_null_return ();
3212 return;
3215 /* Structures whose size is not a multiple of a word are aligned
3216 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
3217 machine, this means we must skip the empty high order bytes when
3218 calculating the bit offset. */
3219 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
3220 big_endian_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
3221 * BITS_PER_UNIT));
3223 /* Copy the structure BITSIZE bits at a time. */
3224 for (bitpos = 0, xbitpos = big_endian_correction;
3225 bitpos < bytes * BITS_PER_UNIT;
3226 bitpos += bitsize, xbitpos += bitsize)
3228 /* We need a new destination pseudo each time xbitpos is
3229 on a word boundary and when xbitpos == big_endian_correction
3230 (the first time through). */
3231 if (xbitpos % BITS_PER_WORD == 0
3232 || xbitpos == big_endian_correction)
3234 /* Generate an appropriate register. */
3235 dst = gen_reg_rtx (word_mode);
3236 result_pseudos[xbitpos / BITS_PER_WORD] = dst;
3238 /* Clobber the destination before we move anything into it. */
3239 emit_insn (gen_rtx_CLOBBER (VOIDmode, dst));
3242 /* We need a new source operand each time bitpos is on a word
3243 boundary. */
3244 if (bitpos % BITS_PER_WORD == 0)
3245 src = operand_subword_force (result_val,
3246 bitpos / BITS_PER_WORD,
3247 BLKmode);
3249 /* Use bitpos for the source extraction (left justified) and
3250 xbitpos for the destination store (right justified). */
3251 store_bit_field (dst, bitsize, xbitpos % BITS_PER_WORD, word_mode,
3252 extract_bit_field (src, bitsize,
3253 bitpos % BITS_PER_WORD, 1,
3254 NULL_RTX, word_mode, word_mode,
3255 bitsize, BITS_PER_WORD),
3256 bitsize, BITS_PER_WORD);
3259 /* Find the smallest integer mode large enough to hold the
3260 entire structure and use that mode instead of BLKmode
3261 on the USE insn for the return register. */
3262 for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3263 tmpmode != VOIDmode;
3264 tmpmode = GET_MODE_WIDER_MODE (tmpmode))
3265 /* Have we found a large enough mode? */
3266 if (GET_MODE_SIZE (tmpmode) >= bytes)
3267 break;
3269 /* No suitable mode found. */
3270 if (tmpmode == VOIDmode)
3271 abort ();
3273 PUT_MODE (result_rtl, tmpmode);
3275 if (GET_MODE_SIZE (tmpmode) < GET_MODE_SIZE (word_mode))
3276 result_reg_mode = word_mode;
3277 else
3278 result_reg_mode = tmpmode;
3279 result_reg = gen_reg_rtx (result_reg_mode);
3281 emit_queue ();
3282 for (i = 0; i < n_regs; i++)
3283 emit_move_insn (operand_subword (result_reg, i, 0, result_reg_mode),
3284 result_pseudos[i]);
3286 if (tmpmode != result_reg_mode)
3287 result_reg = gen_lowpart (tmpmode, result_reg);
3289 expand_value_return (result_reg);
3291 else if (retval_rhs != 0
3292 && !VOID_TYPE_P (TREE_TYPE (retval_rhs))
3293 && (GET_CODE (result_rtl) == REG
3294 || (GET_CODE (result_rtl) == PARALLEL)))
3296 /* Calculate the return value into a temporary (usually a pseudo
3297 reg). */
3298 tree ot = TREE_TYPE (DECL_RESULT (current_function_decl));
3299 tree nt = build_qualified_type (ot, TYPE_QUALS (ot) | TYPE_QUAL_CONST);
3301 val = assign_temp (nt, 0, 0, 1);
3302 val = expand_expr (retval_rhs, val, GET_MODE (val), 0);
3303 val = force_not_mem (val);
3304 emit_queue ();
3305 /* Return the calculated value, doing cleanups first. */
3306 expand_value_return (val);
3308 else
3310 /* No cleanups or no hard reg used;
3311 calculate value into hard return reg. */
3312 expand_expr (retval, const0_rtx, VOIDmode, 0);
3313 emit_queue ();
3314 expand_value_return (result_rtl);
3318 /* Return 1 if the end of the generated RTX is not a barrier.
3319 This means code already compiled can drop through. */
3322 drop_through_at_end_p ()
3324 rtx insn = get_last_insn ();
3325 while (insn && GET_CODE (insn) == NOTE)
3326 insn = PREV_INSN (insn);
3327 return insn && GET_CODE (insn) != BARRIER;
3330 /* Attempt to optimize a potential tail recursion call into a goto.
3331 ARGUMENTS are the arguments to a CALL_EXPR; LAST_INSN indicates
3332 where to place the jump to the tail recursion label.
3334 Return TRUE if the call was optimized into a goto. */
3337 optimize_tail_recursion (arguments, last_insn)
3338 tree arguments;
3339 rtx last_insn;
3341 /* Finish checking validity, and if valid emit code to set the
3342 argument variables for the new call. */
3343 if (tail_recursion_args (arguments, DECL_ARGUMENTS (current_function_decl)))
3345 if (tail_recursion_label == 0)
3347 tail_recursion_label = gen_label_rtx ();
3348 emit_label_after (tail_recursion_label,
3349 tail_recursion_reentry);
3351 emit_queue ();
3352 expand_goto_internal (NULL_TREE, tail_recursion_label, last_insn);
3353 emit_barrier ();
3354 return 1;
3356 return 0;
3359 /* Emit code to alter this function's formal parms for a tail-recursive call.
3360 ACTUALS is a list of actual parameter expressions (chain of TREE_LISTs).
3361 FORMALS is the chain of decls of formals.
3362 Return 1 if this can be done;
3363 otherwise return 0 and do not emit any code. */
3365 static int
3366 tail_recursion_args (actuals, formals)
3367 tree actuals, formals;
3369 tree a = actuals, f = formals;
3370 int i;
3371 rtx *argvec;
3373 /* Check that number and types of actuals are compatible
3374 with the formals. This is not always true in valid C code.
3375 Also check that no formal needs to be addressable
3376 and that all formals are scalars. */
3378 /* Also count the args. */
3380 for (a = actuals, f = formals, i = 0; a && f; a = TREE_CHAIN (a), f = TREE_CHAIN (f), i++)
3382 if (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_VALUE (a)))
3383 != TYPE_MAIN_VARIANT (TREE_TYPE (f)))
3384 return 0;
3385 if (GET_CODE (DECL_RTL (f)) != REG || DECL_MODE (f) == BLKmode)
3386 return 0;
3388 if (a != 0 || f != 0)
3389 return 0;
3391 /* Compute all the actuals. */
3393 argvec = (rtx *) alloca (i * sizeof (rtx));
3395 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
3396 argvec[i] = expand_expr (TREE_VALUE (a), NULL_RTX, VOIDmode, 0);
3398 /* Find which actual values refer to current values of previous formals.
3399 Copy each of them now, before any formal is changed. */
3401 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
3403 int copy = 0;
3404 int j;
3405 for (f = formals, j = 0; j < i; f = TREE_CHAIN (f), j++)
3406 if (reg_mentioned_p (DECL_RTL (f), argvec[i]))
3408 copy = 1;
3409 break;
3411 if (copy)
3412 argvec[i] = copy_to_reg (argvec[i]);
3415 /* Store the values of the actuals into the formals. */
3417 for (f = formals, a = actuals, i = 0; f;
3418 f = TREE_CHAIN (f), a = TREE_CHAIN (a), i++)
3420 if (GET_MODE (DECL_RTL (f)) == GET_MODE (argvec[i]))
3421 emit_move_insn (DECL_RTL (f), argvec[i]);
3422 else
3423 convert_move (DECL_RTL (f), argvec[i],
3424 TREE_UNSIGNED (TREE_TYPE (TREE_VALUE (a))));
3427 free_temp_slots ();
3428 return 1;
3431 /* Generate the RTL code for entering a binding contour.
3432 The variables are declared one by one, by calls to `expand_decl'.
3434 FLAGS is a bitwise or of the following flags:
3436 1 - Nonzero if this construct should be visible to
3437 `exit_something'.
3439 2 - Nonzero if this contour does not require a
3440 NOTE_INSN_BLOCK_BEG note. Virtually all calls from
3441 language-independent code should set this flag because they
3442 will not create corresponding BLOCK nodes. (There should be
3443 a one-to-one correspondence between NOTE_INSN_BLOCK_BEG notes
3444 and BLOCKs.) If this flag is set, MARK_ENDS should be zero
3445 when expand_end_bindings is called.
3447 If we are creating a NOTE_INSN_BLOCK_BEG note, a BLOCK may
3448 optionally be supplied. If so, it becomes the NOTE_BLOCK for the
3449 note. */
3451 void
3452 expand_start_bindings_and_block (flags, block)
3453 int flags;
3454 tree block;
3456 struct nesting *thisblock = ALLOC_NESTING ();
3457 rtx note;
3458 int exit_flag = ((flags & 1) != 0);
3459 int block_flag = ((flags & 2) == 0);
3461 /* If a BLOCK is supplied, then the caller should be requesting a
3462 NOTE_INSN_BLOCK_BEG note. */
3463 if (!block_flag && block)
3464 abort ();
3466 /* Create a note to mark the beginning of the block. */
3467 if (block_flag)
3469 note = emit_note (NULL, NOTE_INSN_BLOCK_BEG);
3470 NOTE_BLOCK (note) = block;
3472 else
3473 note = emit_note (NULL, NOTE_INSN_DELETED);
3475 /* Make an entry on block_stack for the block we are entering. */
3477 thisblock->next = block_stack;
3478 thisblock->all = nesting_stack;
3479 thisblock->depth = ++nesting_depth;
3480 thisblock->data.block.stack_level = 0;
3481 thisblock->data.block.cleanups = 0;
3482 thisblock->data.block.n_function_calls = 0;
3483 thisblock->data.block.exception_region = 0;
3484 thisblock->data.block.block_target_temp_slot_level = target_temp_slot_level;
3486 thisblock->data.block.conditional_code = 0;
3487 thisblock->data.block.last_unconditional_cleanup = note;
3488 /* When we insert instructions after the last unconditional cleanup,
3489 we don't adjust last_insn. That means that a later add_insn will
3490 clobber the instructions we've just added. The easiest way to
3491 fix this is to just insert another instruction here, so that the
3492 instructions inserted after the last unconditional cleanup are
3493 never the last instruction. */
3494 emit_note (NULL, NOTE_INSN_DELETED);
3495 thisblock->data.block.cleanup_ptr = &thisblock->data.block.cleanups;
3497 if (block_stack
3498 && !(block_stack->data.block.cleanups == NULL_TREE
3499 && block_stack->data.block.outer_cleanups == NULL_TREE))
3500 thisblock->data.block.outer_cleanups
3501 = tree_cons (NULL_TREE, block_stack->data.block.cleanups,
3502 block_stack->data.block.outer_cleanups);
3503 else
3504 thisblock->data.block.outer_cleanups = 0;
3505 thisblock->data.block.label_chain = 0;
3506 thisblock->data.block.innermost_stack_block = stack_block_stack;
3507 thisblock->data.block.first_insn = note;
3508 thisblock->data.block.block_start_count = ++current_block_start_count;
3509 thisblock->exit_label = exit_flag ? gen_label_rtx () : 0;
3510 block_stack = thisblock;
3511 nesting_stack = thisblock;
3513 /* Make a new level for allocating stack slots. */
3514 push_temp_slots ();
3517 /* Specify the scope of temporaries created by TARGET_EXPRs. Similar
3518 to CLEANUP_POINT_EXPR, but handles cases when a series of calls to
3519 expand_expr are made. After we end the region, we know that all
3520 space for all temporaries that were created by TARGET_EXPRs will be
3521 destroyed and their space freed for reuse. */
3523 void
3524 expand_start_target_temps ()
3526 /* This is so that even if the result is preserved, the space
3527 allocated will be freed, as we know that it is no longer in use. */
3528 push_temp_slots ();
3530 /* Start a new binding layer that will keep track of all cleanup
3531 actions to be performed. */
3532 expand_start_bindings (2);
3534 target_temp_slot_level = temp_slot_level;
3537 void
3538 expand_end_target_temps ()
3540 expand_end_bindings (NULL_TREE, 0, 0);
3542 /* This is so that even if the result is preserved, the space
3543 allocated will be freed, as we know that it is no longer in use. */
3544 pop_temp_slots ();
3547 /* Given a pointer to a BLOCK node return non-zero if (and only if) the node
3548 in question represents the outermost pair of curly braces (i.e. the "body
3549 block") of a function or method.
3551 For any BLOCK node representing a "body block" of a function or method, the
3552 BLOCK_SUPERCONTEXT of the node will point to another BLOCK node which
3553 represents the outermost (function) scope for the function or method (i.e.
3554 the one which includes the formal parameters). The BLOCK_SUPERCONTEXT of
3555 *that* node in turn will point to the relevant FUNCTION_DECL node. */
3558 is_body_block (stmt)
3559 tree stmt;
3561 if (TREE_CODE (stmt) == BLOCK)
3563 tree parent = BLOCK_SUPERCONTEXT (stmt);
3565 if (parent && TREE_CODE (parent) == BLOCK)
3567 tree grandparent = BLOCK_SUPERCONTEXT (parent);
3569 if (grandparent && TREE_CODE (grandparent) == FUNCTION_DECL)
3570 return 1;
3574 return 0;
3577 /* True if we are currently emitting insns in an area of output code
3578 that is controlled by a conditional expression. This is used by
3579 the cleanup handling code to generate conditional cleanup actions. */
3582 conditional_context ()
3584 return block_stack && block_stack->data.block.conditional_code;
3587 /* Return an opaque pointer to the current nesting level, so frontend code
3588 can check its own sanity. */
3590 struct nesting *
3591 current_nesting_level ()
3593 return cfun ? block_stack : 0;
3596 /* Emit a handler label for a nonlocal goto handler.
3597 Also emit code to store the handler label in SLOT before BEFORE_INSN. */
3599 static rtx
3600 expand_nl_handler_label (slot, before_insn)
3601 rtx slot, before_insn;
3603 rtx insns;
3604 rtx handler_label = gen_label_rtx ();
3606 /* Don't let cleanup_cfg delete the handler. */
3607 LABEL_PRESERVE_P (handler_label) = 1;
3609 start_sequence ();
3610 emit_move_insn (slot, gen_rtx_LABEL_REF (Pmode, handler_label));
3611 insns = get_insns ();
3612 end_sequence ();
3613 emit_insns_before (insns, before_insn);
3615 emit_label (handler_label);
3617 return handler_label;
3620 /* Emit code to restore vital registers at the beginning of a nonlocal goto
3621 handler. */
3622 static void
3623 expand_nl_goto_receiver ()
3625 #ifdef HAVE_nonlocal_goto
3626 if (! HAVE_nonlocal_goto)
3627 #endif
3628 /* First adjust our frame pointer to its actual value. It was
3629 previously set to the start of the virtual area corresponding to
3630 the stacked variables when we branched here and now needs to be
3631 adjusted to the actual hardware fp value.
3633 Assignments are to virtual registers are converted by
3634 instantiate_virtual_regs into the corresponding assignment
3635 to the underlying register (fp in this case) that makes
3636 the original assignment true.
3637 So the following insn will actually be
3638 decrementing fp by STARTING_FRAME_OFFSET. */
3639 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
3641 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3642 if (fixed_regs[ARG_POINTER_REGNUM])
3644 #ifdef ELIMINABLE_REGS
3645 /* If the argument pointer can be eliminated in favor of the
3646 frame pointer, we don't need to restore it. We assume here
3647 that if such an elimination is present, it can always be used.
3648 This is the case on all known machines; if we don't make this
3649 assumption, we do unnecessary saving on many machines. */
3650 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
3651 size_t i;
3653 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
3654 if (elim_regs[i].from == ARG_POINTER_REGNUM
3655 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
3656 break;
3658 if (i == ARRAY_SIZE (elim_regs))
3659 #endif
3661 /* Now restore our arg pointer from the address at which it
3662 was saved in our stack frame. */
3663 emit_move_insn (virtual_incoming_args_rtx,
3664 copy_to_reg (get_arg_pointer_save_area (cfun)));
3667 #endif
3669 #ifdef HAVE_nonlocal_goto_receiver
3670 if (HAVE_nonlocal_goto_receiver)
3671 emit_insn (gen_nonlocal_goto_receiver ());
3672 #endif
3675 /* Make handlers for nonlocal gotos taking place in the function calls in
3676 block THISBLOCK. */
3678 static void
3679 expand_nl_goto_receivers (thisblock)
3680 struct nesting *thisblock;
3682 tree link;
3683 rtx afterward = gen_label_rtx ();
3684 rtx insns, slot;
3685 rtx label_list;
3686 int any_invalid;
3688 /* Record the handler address in the stack slot for that purpose,
3689 during this block, saving and restoring the outer value. */
3690 if (thisblock->next != 0)
3691 for (slot = nonlocal_goto_handler_slots; slot; slot = XEXP (slot, 1))
3693 rtx save_receiver = gen_reg_rtx (Pmode);
3694 emit_move_insn (XEXP (slot, 0), save_receiver);
3696 start_sequence ();
3697 emit_move_insn (save_receiver, XEXP (slot, 0));
3698 insns = get_insns ();
3699 end_sequence ();
3700 emit_insns_before (insns, thisblock->data.block.first_insn);
3703 /* Jump around the handlers; they run only when specially invoked. */
3704 emit_jump (afterward);
3706 /* Make a separate handler for each label. */
3707 link = nonlocal_labels;
3708 slot = nonlocal_goto_handler_slots;
3709 label_list = NULL_RTX;
3710 for (; link; link = TREE_CHAIN (link), slot = XEXP (slot, 1))
3711 /* Skip any labels we shouldn't be able to jump to from here,
3712 we generate one special handler for all of them below which just calls
3713 abort. */
3714 if (! DECL_TOO_LATE (TREE_VALUE (link)))
3716 rtx lab;
3717 lab = expand_nl_handler_label (XEXP (slot, 0),
3718 thisblock->data.block.first_insn);
3719 label_list = gen_rtx_EXPR_LIST (VOIDmode, lab, label_list);
3721 expand_nl_goto_receiver ();
3723 /* Jump to the "real" nonlocal label. */
3724 expand_goto (TREE_VALUE (link));
3727 /* A second pass over all nonlocal labels; this time we handle those
3728 we should not be able to jump to at this point. */
3729 link = nonlocal_labels;
3730 slot = nonlocal_goto_handler_slots;
3731 any_invalid = 0;
3732 for (; link; link = TREE_CHAIN (link), slot = XEXP (slot, 1))
3733 if (DECL_TOO_LATE (TREE_VALUE (link)))
3735 rtx lab;
3736 lab = expand_nl_handler_label (XEXP (slot, 0),
3737 thisblock->data.block.first_insn);
3738 label_list = gen_rtx_EXPR_LIST (VOIDmode, lab, label_list);
3739 any_invalid = 1;
3742 if (any_invalid)
3744 expand_nl_goto_receiver ();
3745 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "abort"), 0,
3746 VOIDmode, 0);
3747 emit_barrier ();
3750 nonlocal_goto_handler_labels = label_list;
3751 emit_label (afterward);
3754 /* Warn about any unused VARS (which may contain nodes other than
3755 VAR_DECLs, but such nodes are ignored). The nodes are connected
3756 via the TREE_CHAIN field. */
3758 void
3759 warn_about_unused_variables (vars)
3760 tree vars;
3762 tree decl;
3764 if (warn_unused_variable)
3765 for (decl = vars; decl; decl = TREE_CHAIN (decl))
3766 if (TREE_CODE (decl) == VAR_DECL
3767 && ! TREE_USED (decl)
3768 && ! DECL_IN_SYSTEM_HEADER (decl)
3769 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
3770 warning_with_decl (decl, "unused variable `%s'");
3773 /* Generate RTL code to terminate a binding contour.
3775 VARS is the chain of VAR_DECL nodes for the variables bound in this
3776 contour. There may actually be other nodes in this chain, but any
3777 nodes other than VAR_DECLS are ignored.
3779 MARK_ENDS is nonzero if we should put a note at the beginning
3780 and end of this binding contour.
3782 DONT_JUMP_IN is nonzero if it is not valid to jump into this contour.
3783 (That is true automatically if the contour has a saved stack level.) */
3785 void
3786 expand_end_bindings (vars, mark_ends, dont_jump_in)
3787 tree vars;
3788 int mark_ends;
3789 int dont_jump_in;
3791 struct nesting *thisblock = block_stack;
3793 /* If any of the variables in this scope were not used, warn the
3794 user. */
3795 warn_about_unused_variables (vars);
3797 if (thisblock->exit_label)
3799 do_pending_stack_adjust ();
3800 emit_label (thisblock->exit_label);
3803 /* If necessary, make handlers for nonlocal gotos taking
3804 place in the function calls in this block. */
3805 if (function_call_count != thisblock->data.block.n_function_calls
3806 && nonlocal_labels
3807 /* Make handler for outermost block
3808 if there were any nonlocal gotos to this function. */
3809 && (thisblock->next == 0 ? current_function_has_nonlocal_label
3810 /* Make handler for inner block if it has something
3811 special to do when you jump out of it. */
3812 : (thisblock->data.block.cleanups != 0
3813 || thisblock->data.block.stack_level != 0)))
3814 expand_nl_goto_receivers (thisblock);
3816 /* Don't allow jumping into a block that has a stack level.
3817 Cleanups are allowed, though. */
3818 if (dont_jump_in
3819 || thisblock->data.block.stack_level != 0)
3821 struct label_chain *chain;
3823 /* Any labels in this block are no longer valid to go to.
3824 Mark them to cause an error message. */
3825 for (chain = thisblock->data.block.label_chain; chain; chain = chain->next)
3827 DECL_TOO_LATE (chain->label) = 1;
3828 /* If any goto without a fixup came to this label,
3829 that must be an error, because gotos without fixups
3830 come from outside all saved stack-levels. */
3831 if (TREE_ADDRESSABLE (chain->label))
3832 error_with_decl (chain->label,
3833 "label `%s' used before containing binding contour");
3837 /* Restore stack level in effect before the block
3838 (only if variable-size objects allocated). */
3839 /* Perform any cleanups associated with the block. */
3841 if (thisblock->data.block.stack_level != 0
3842 || thisblock->data.block.cleanups != 0)
3844 int reachable;
3845 rtx insn;
3847 /* Don't let cleanups affect ({...}) constructs. */
3848 int old_expr_stmts_for_value = expr_stmts_for_value;
3849 rtx old_last_expr_value = last_expr_value;
3850 tree old_last_expr_type = last_expr_type;
3851 expr_stmts_for_value = 0;
3853 /* Only clean up here if this point can actually be reached. */
3854 insn = get_last_insn ();
3855 if (GET_CODE (insn) == NOTE)
3856 insn = prev_nonnote_insn (insn);
3857 reachable = (! insn || GET_CODE (insn) != BARRIER);
3859 /* Do the cleanups. */
3860 expand_cleanups (thisblock->data.block.cleanups, NULL_TREE, 0, reachable);
3861 if (reachable)
3862 do_pending_stack_adjust ();
3864 expr_stmts_for_value = old_expr_stmts_for_value;
3865 last_expr_value = old_last_expr_value;
3866 last_expr_type = old_last_expr_type;
3868 /* Restore the stack level. */
3870 if (reachable && thisblock->data.block.stack_level != 0)
3872 emit_stack_restore (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3873 thisblock->data.block.stack_level, NULL_RTX);
3874 if (nonlocal_goto_handler_slots != 0)
3875 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level,
3876 NULL_RTX);
3879 /* Any gotos out of this block must also do these things.
3880 Also report any gotos with fixups that came to labels in this
3881 level. */
3882 fixup_gotos (thisblock,
3883 thisblock->data.block.stack_level,
3884 thisblock->data.block.cleanups,
3885 thisblock->data.block.first_insn,
3886 dont_jump_in);
3889 /* Mark the beginning and end of the scope if requested.
3890 We do this now, after running cleanups on the variables
3891 just going out of scope, so they are in scope for their cleanups. */
3893 if (mark_ends)
3895 rtx note = emit_note (NULL, NOTE_INSN_BLOCK_END);
3896 NOTE_BLOCK (note) = NOTE_BLOCK (thisblock->data.block.first_insn);
3898 else
3899 /* Get rid of the beginning-mark if we don't make an end-mark. */
3900 NOTE_LINE_NUMBER (thisblock->data.block.first_insn) = NOTE_INSN_DELETED;
3902 /* Restore the temporary level of TARGET_EXPRs. */
3903 target_temp_slot_level = thisblock->data.block.block_target_temp_slot_level;
3905 /* Restore block_stack level for containing block. */
3907 stack_block_stack = thisblock->data.block.innermost_stack_block;
3908 POPSTACK (block_stack);
3910 /* Pop the stack slot nesting and free any slots at this level. */
3911 pop_temp_slots ();
3914 /* Generate code to save the stack pointer at the start of the current block
3915 and set up to restore it on exit. */
3917 void
3918 save_stack_pointer ()
3920 struct nesting *thisblock = block_stack;
3922 if (thisblock->data.block.stack_level == 0)
3924 emit_stack_save (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3925 &thisblock->data.block.stack_level,
3926 thisblock->data.block.first_insn);
3927 stack_block_stack = thisblock;
3931 /* Generate RTL for the automatic variable declaration DECL.
3932 (Other kinds of declarations are simply ignored if seen here.) */
3934 void
3935 expand_decl (decl)
3936 tree decl;
3938 struct nesting *thisblock;
3939 tree type;
3941 type = TREE_TYPE (decl);
3943 /* For a CONST_DECL, set mode, alignment, and sizes from those of the
3944 type in case this node is used in a reference. */
3945 if (TREE_CODE (decl) == CONST_DECL)
3947 DECL_MODE (decl) = TYPE_MODE (type);
3948 DECL_ALIGN (decl) = TYPE_ALIGN (type);
3949 DECL_SIZE (decl) = TYPE_SIZE (type);
3950 DECL_SIZE_UNIT (decl) = TYPE_SIZE_UNIT (type);
3951 return;
3954 /* Otherwise, only automatic variables need any expansion done. Static and
3955 external variables, and external functions, will be handled by
3956 `assemble_variable' (called from finish_decl). TYPE_DECL requires
3957 nothing. PARM_DECLs are handled in `assign_parms'. */
3958 if (TREE_CODE (decl) != VAR_DECL)
3959 return;
3961 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
3962 return;
3964 thisblock = block_stack;
3966 /* Create the RTL representation for the variable. */
3968 if (type == error_mark_node)
3969 SET_DECL_RTL (decl, gen_rtx_MEM (BLKmode, const0_rtx));
3971 else if (DECL_SIZE (decl) == 0)
3972 /* Variable with incomplete type. */
3974 rtx x;
3975 if (DECL_INITIAL (decl) == 0)
3976 /* Error message was already done; now avoid a crash. */
3977 x = gen_rtx_MEM (BLKmode, const0_rtx);
3978 else
3979 /* An initializer is going to decide the size of this array.
3980 Until we know the size, represent its address with a reg. */
3981 x = gen_rtx_MEM (BLKmode, gen_reg_rtx (Pmode));
3983 set_mem_attributes (x, decl, 1);
3984 SET_DECL_RTL (decl, x);
3986 else if (DECL_MODE (decl) != BLKmode
3987 /* If -ffloat-store, don't put explicit float vars
3988 into regs. */
3989 && !(flag_float_store
3990 && TREE_CODE (type) == REAL_TYPE)
3991 && ! TREE_THIS_VOLATILE (decl)
3992 && (DECL_REGISTER (decl) || optimize)
3993 /* if -fcheck-memory-usage, check all variables. */
3994 && ! current_function_check_memory_usage)
3996 /* Automatic variable that can go in a register. */
3997 int unsignedp = TREE_UNSIGNED (type);
3998 enum machine_mode reg_mode
3999 = promote_mode (type, DECL_MODE (decl), &unsignedp, 0);
4001 SET_DECL_RTL (decl, gen_reg_rtx (reg_mode));
4003 if (GET_CODE (DECL_RTL (decl)) == REG)
4004 REGNO_DECL (REGNO (DECL_RTL (decl))) = decl;
4005 else if (GET_CODE (DECL_RTL (decl)) == CONCAT)
4007 REGNO_DECL (REGNO (XEXP (DECL_RTL (decl), 0))) = decl;
4008 REGNO_DECL (REGNO (XEXP (DECL_RTL (decl), 1))) = decl;
4011 mark_user_reg (DECL_RTL (decl));
4013 if (POINTER_TYPE_P (type))
4014 mark_reg_pointer (DECL_RTL (decl),
4015 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (decl))));
4017 maybe_set_unchanging (DECL_RTL (decl), decl);
4019 /* If something wants our address, try to use ADDRESSOF. */
4020 if (TREE_ADDRESSABLE (decl))
4021 put_var_into_stack (decl);
4024 else if (TREE_CODE (DECL_SIZE_UNIT (decl)) == INTEGER_CST
4025 && ! (flag_stack_check && ! STACK_CHECK_BUILTIN
4026 && 0 < compare_tree_int (DECL_SIZE_UNIT (decl),
4027 STACK_CHECK_MAX_VAR_SIZE)))
4029 /* Variable of fixed size that goes on the stack. */
4030 rtx oldaddr = 0;
4031 rtx addr;
4032 rtx x;
4034 /* If we previously made RTL for this decl, it must be an array
4035 whose size was determined by the initializer.
4036 The old address was a register; set that register now
4037 to the proper address. */
4038 if (DECL_RTL_SET_P (decl))
4040 if (GET_CODE (DECL_RTL (decl)) != MEM
4041 || GET_CODE (XEXP (DECL_RTL (decl), 0)) != REG)
4042 abort ();
4043 oldaddr = XEXP (DECL_RTL (decl), 0);
4046 /* Set alignment we actually gave this decl. */
4047 DECL_ALIGN (decl) = (DECL_MODE (decl) == BLKmode ? BIGGEST_ALIGNMENT
4048 : GET_MODE_BITSIZE (DECL_MODE (decl)));
4049 DECL_USER_ALIGN (decl) = 0;
4051 x = assign_temp (TREE_TYPE (decl), 1, 1, 1);
4052 set_mem_attributes (x, decl, 1);
4053 SET_DECL_RTL (decl, x);
4055 if (oldaddr)
4057 addr = force_operand (XEXP (DECL_RTL (decl), 0), oldaddr);
4058 if (addr != oldaddr)
4059 emit_move_insn (oldaddr, addr);
4062 else
4063 /* Dynamic-size object: must push space on the stack. */
4065 rtx address, size, x;
4067 /* Record the stack pointer on entry to block, if have
4068 not already done so. */
4069 do_pending_stack_adjust ();
4070 save_stack_pointer ();
4072 /* In function-at-a-time mode, variable_size doesn't expand this,
4073 so do it now. */
4074 if (TREE_CODE (type) == ARRAY_TYPE && TYPE_DOMAIN (type))
4075 expand_expr (TYPE_MAX_VALUE (TYPE_DOMAIN (type)),
4076 const0_rtx, VOIDmode, 0);
4078 /* Compute the variable's size, in bytes. */
4079 size = expand_expr (DECL_SIZE_UNIT (decl), NULL_RTX, VOIDmode, 0);
4080 free_temp_slots ();
4082 /* Allocate space on the stack for the variable. Note that
4083 DECL_ALIGN says how the variable is to be aligned and we
4084 cannot use it to conclude anything about the alignment of
4085 the size. */
4086 address = allocate_dynamic_stack_space (size, NULL_RTX,
4087 TYPE_ALIGN (TREE_TYPE (decl)));
4089 /* Reference the variable indirect through that rtx. */
4090 x = gen_rtx_MEM (DECL_MODE (decl), address);
4091 set_mem_attributes (x, decl, 1);
4092 SET_DECL_RTL (decl, x);
4095 /* Indicate the alignment we actually gave this variable. */
4096 #ifdef STACK_BOUNDARY
4097 DECL_ALIGN (decl) = STACK_BOUNDARY;
4098 #else
4099 DECL_ALIGN (decl) = BIGGEST_ALIGNMENT;
4100 #endif
4101 DECL_USER_ALIGN (decl) = 0;
4105 /* Emit code to perform the initialization of a declaration DECL. */
4107 void
4108 expand_decl_init (decl)
4109 tree decl;
4111 int was_used = TREE_USED (decl);
4113 /* If this is a CONST_DECL, we don't have to generate any code, but
4114 if DECL_INITIAL is a constant, call expand_expr to force TREE_CST_RTL
4115 to be set while in the obstack containing the constant. If we don't
4116 do this, we can lose if we have functions nested three deep and the middle
4117 function makes a CONST_DECL whose DECL_INITIAL is a STRING_CST while
4118 the innermost function is the first to expand that STRING_CST. */
4119 if (TREE_CODE (decl) == CONST_DECL)
4121 if (DECL_INITIAL (decl) && TREE_CONSTANT (DECL_INITIAL (decl)))
4122 expand_expr (DECL_INITIAL (decl), NULL_RTX, VOIDmode,
4123 EXPAND_INITIALIZER);
4124 return;
4127 if (TREE_STATIC (decl))
4128 return;
4130 /* Compute and store the initial value now. */
4132 if (DECL_INITIAL (decl) == error_mark_node)
4134 enum tree_code code = TREE_CODE (TREE_TYPE (decl));
4136 if (code == INTEGER_TYPE || code == REAL_TYPE || code == ENUMERAL_TYPE
4137 || code == POINTER_TYPE || code == REFERENCE_TYPE)
4138 expand_assignment (decl, convert (TREE_TYPE (decl), integer_zero_node),
4139 0, 0);
4140 emit_queue ();
4142 else if (DECL_INITIAL (decl) && TREE_CODE (DECL_INITIAL (decl)) != TREE_LIST)
4144 emit_line_note (DECL_SOURCE_FILE (decl), DECL_SOURCE_LINE (decl));
4145 expand_assignment (decl, DECL_INITIAL (decl), 0, 0);
4146 emit_queue ();
4149 /* Don't let the initialization count as "using" the variable. */
4150 TREE_USED (decl) = was_used;
4152 /* Free any temporaries we made while initializing the decl. */
4153 preserve_temp_slots (NULL_RTX);
4154 free_temp_slots ();
4157 /* CLEANUP is an expression to be executed at exit from this binding contour;
4158 for example, in C++, it might call the destructor for this variable.
4160 We wrap CLEANUP in an UNSAVE_EXPR node, so that we can expand the
4161 CLEANUP multiple times, and have the correct semantics. This
4162 happens in exception handling, for gotos, returns, breaks that
4163 leave the current scope.
4165 If CLEANUP is nonzero and DECL is zero, we record a cleanup
4166 that is not associated with any particular variable. */
4169 expand_decl_cleanup (decl, cleanup)
4170 tree decl, cleanup;
4172 struct nesting *thisblock;
4174 /* Error if we are not in any block. */
4175 if (cfun == 0 || block_stack == 0)
4176 return 0;
4178 thisblock = block_stack;
4180 /* Record the cleanup if there is one. */
4182 if (cleanup != 0)
4184 tree t;
4185 rtx seq;
4186 tree *cleanups = &thisblock->data.block.cleanups;
4187 int cond_context = conditional_context ();
4189 if (cond_context)
4191 rtx flag = gen_reg_rtx (word_mode);
4192 rtx set_flag_0;
4193 tree cond;
4195 start_sequence ();
4196 emit_move_insn (flag, const0_rtx);
4197 set_flag_0 = get_insns ();
4198 end_sequence ();
4200 thisblock->data.block.last_unconditional_cleanup
4201 = emit_insns_after (set_flag_0,
4202 thisblock->data.block.last_unconditional_cleanup);
4204 emit_move_insn (flag, const1_rtx);
4206 cond = build_decl (VAR_DECL, NULL_TREE, type_for_mode (word_mode, 1));
4207 SET_DECL_RTL (cond, flag);
4209 /* Conditionalize the cleanup. */
4210 cleanup = build (COND_EXPR, void_type_node,
4211 truthvalue_conversion (cond),
4212 cleanup, integer_zero_node);
4213 cleanup = fold (cleanup);
4215 cleanups = thisblock->data.block.cleanup_ptr;
4218 cleanup = unsave_expr (cleanup);
4220 t = *cleanups = tree_cons (decl, cleanup, *cleanups);
4222 if (! cond_context)
4223 /* If this block has a cleanup, it belongs in stack_block_stack. */
4224 stack_block_stack = thisblock;
4226 if (cond_context)
4228 start_sequence ();
4231 if (! using_eh_for_cleanups_p)
4232 TREE_ADDRESSABLE (t) = 1;
4233 else
4234 expand_eh_region_start ();
4236 if (cond_context)
4238 seq = get_insns ();
4239 end_sequence ();
4240 if (seq)
4241 thisblock->data.block.last_unconditional_cleanup
4242 = emit_insns_after (seq,
4243 thisblock->data.block.last_unconditional_cleanup);
4245 else
4247 thisblock->data.block.last_unconditional_cleanup
4248 = get_last_insn ();
4249 /* When we insert instructions after the last unconditional cleanup,
4250 we don't adjust last_insn. That means that a later add_insn will
4251 clobber the instructions we've just added. The easiest way to
4252 fix this is to just insert another instruction here, so that the
4253 instructions inserted after the last unconditional cleanup are
4254 never the last instruction. */
4255 emit_note (NULL, NOTE_INSN_DELETED);
4256 thisblock->data.block.cleanup_ptr = &thisblock->data.block.cleanups;
4259 return 1;
4262 /* DECL is an anonymous union. CLEANUP is a cleanup for DECL.
4263 DECL_ELTS is the list of elements that belong to DECL's type.
4264 In each, the TREE_VALUE is a VAR_DECL, and the TREE_PURPOSE a cleanup. */
4266 void
4267 expand_anon_union_decl (decl, cleanup, decl_elts)
4268 tree decl, cleanup, decl_elts;
4270 struct nesting *thisblock = cfun == 0 ? 0 : block_stack;
4271 rtx x;
4272 tree t;
4274 /* If any of the elements are addressable, so is the entire union. */
4275 for (t = decl_elts; t; t = TREE_CHAIN (t))
4276 if (TREE_ADDRESSABLE (TREE_VALUE (t)))
4278 TREE_ADDRESSABLE (decl) = 1;
4279 break;
4282 expand_decl (decl);
4283 expand_decl_cleanup (decl, cleanup);
4284 x = DECL_RTL (decl);
4286 /* Go through the elements, assigning RTL to each. */
4287 for (t = decl_elts; t; t = TREE_CHAIN (t))
4289 tree decl_elt = TREE_VALUE (t);
4290 tree cleanup_elt = TREE_PURPOSE (t);
4291 enum machine_mode mode = TYPE_MODE (TREE_TYPE (decl_elt));
4293 /* Propagate the union's alignment to the elements. */
4294 DECL_ALIGN (decl_elt) = DECL_ALIGN (decl);
4295 DECL_USER_ALIGN (decl_elt) = DECL_USER_ALIGN (decl);
4297 /* If the element has BLKmode and the union doesn't, the union is
4298 aligned such that the element doesn't need to have BLKmode, so
4299 change the element's mode to the appropriate one for its size. */
4300 if (mode == BLKmode && DECL_MODE (decl) != BLKmode)
4301 DECL_MODE (decl_elt) = mode
4302 = mode_for_size_tree (DECL_SIZE (decl_elt), MODE_INT, 1);
4304 /* (SUBREG (MEM ...)) at RTL generation time is invalid, so we
4305 instead create a new MEM rtx with the proper mode. */
4306 if (GET_CODE (x) == MEM)
4308 if (mode == GET_MODE (x))
4309 SET_DECL_RTL (decl_elt, x);
4310 else
4311 SET_DECL_RTL (decl_elt, adjust_address_nv (x, mode, 0));
4313 else if (GET_CODE (x) == REG)
4315 if (mode == GET_MODE (x))
4316 SET_DECL_RTL (decl_elt, x);
4317 else
4318 SET_DECL_RTL (decl_elt, gen_lowpart_SUBREG (mode, x));
4320 else
4321 abort ();
4323 /* Record the cleanup if there is one. */
4325 if (cleanup != 0)
4326 thisblock->data.block.cleanups
4327 = tree_cons (decl_elt, cleanup_elt,
4328 thisblock->data.block.cleanups);
4332 /* Expand a list of cleanups LIST.
4333 Elements may be expressions or may be nested lists.
4335 If DONT_DO is nonnull, then any list-element
4336 whose TREE_PURPOSE matches DONT_DO is omitted.
4337 This is sometimes used to avoid a cleanup associated with
4338 a value that is being returned out of the scope.
4340 If IN_FIXUP is non-zero, we are generating this cleanup for a fixup
4341 goto and handle protection regions specially in that case.
4343 If REACHABLE, we emit code, otherwise just inform the exception handling
4344 code about this finalization. */
4346 static void
4347 expand_cleanups (list, dont_do, in_fixup, reachable)
4348 tree list;
4349 tree dont_do;
4350 int in_fixup;
4351 int reachable;
4353 tree tail;
4354 for (tail = list; tail; tail = TREE_CHAIN (tail))
4355 if (dont_do == 0 || TREE_PURPOSE (tail) != dont_do)
4357 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
4358 expand_cleanups (TREE_VALUE (tail), dont_do, in_fixup, reachable);
4359 else
4361 if (! in_fixup && using_eh_for_cleanups_p)
4362 expand_eh_region_end_cleanup (TREE_VALUE (tail));
4364 if (reachable)
4366 /* Cleanups may be run multiple times. For example,
4367 when exiting a binding contour, we expand the
4368 cleanups associated with that contour. When a goto
4369 within that binding contour has a target outside that
4370 contour, it will expand all cleanups from its scope to
4371 the target. Though the cleanups are expanded multiple
4372 times, the control paths are non-overlapping so the
4373 cleanups will not be executed twice. */
4375 /* We may need to protect from outer cleanups. */
4376 if (in_fixup && using_eh_for_cleanups_p)
4378 expand_eh_region_start ();
4380 expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
4382 expand_eh_region_end_fixup (TREE_VALUE (tail));
4384 else
4385 expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
4387 free_temp_slots ();
4393 /* Mark when the context we are emitting RTL for as a conditional
4394 context, so that any cleanup actions we register with
4395 expand_decl_init will be properly conditionalized when those
4396 cleanup actions are later performed. Must be called before any
4397 expression (tree) is expanded that is within a conditional context. */
4399 void
4400 start_cleanup_deferral ()
4402 /* block_stack can be NULL if we are inside the parameter list. It is
4403 OK to do nothing, because cleanups aren't possible here. */
4404 if (block_stack)
4405 ++block_stack->data.block.conditional_code;
4408 /* Mark the end of a conditional region of code. Because cleanup
4409 deferrals may be nested, we may still be in a conditional region
4410 after we end the currently deferred cleanups, only after we end all
4411 deferred cleanups, are we back in unconditional code. */
4413 void
4414 end_cleanup_deferral ()
4416 /* block_stack can be NULL if we are inside the parameter list. It is
4417 OK to do nothing, because cleanups aren't possible here. */
4418 if (block_stack)
4419 --block_stack->data.block.conditional_code;
4422 /* Move all cleanups from the current block_stack
4423 to the containing block_stack, where they are assumed to
4424 have been created. If anything can cause a temporary to
4425 be created, but not expanded for more than one level of
4426 block_stacks, then this code will have to change. */
4428 void
4429 move_cleanups_up ()
4431 struct nesting *block = block_stack;
4432 struct nesting *outer = block->next;
4434 outer->data.block.cleanups
4435 = chainon (block->data.block.cleanups,
4436 outer->data.block.cleanups);
4437 block->data.block.cleanups = 0;
4440 tree
4441 last_cleanup_this_contour ()
4443 if (block_stack == 0)
4444 return 0;
4446 return block_stack->data.block.cleanups;
4449 /* Return 1 if there are any pending cleanups at this point.
4450 If THIS_CONTOUR is nonzero, check the current contour as well.
4451 Otherwise, look only at the contours that enclose this one. */
4454 any_pending_cleanups (this_contour)
4455 int this_contour;
4457 struct nesting *block;
4459 if (cfun == NULL || cfun->stmt == NULL || block_stack == 0)
4460 return 0;
4462 if (this_contour && block_stack->data.block.cleanups != NULL)
4463 return 1;
4464 if (block_stack->data.block.cleanups == 0
4465 && block_stack->data.block.outer_cleanups == 0)
4466 return 0;
4468 for (block = block_stack->next; block; block = block->next)
4469 if (block->data.block.cleanups != 0)
4470 return 1;
4472 return 0;
4475 /* Enter a case (Pascal) or switch (C) statement.
4476 Push a block onto case_stack and nesting_stack
4477 to accumulate the case-labels that are seen
4478 and to record the labels generated for the statement.
4480 EXIT_FLAG is nonzero if `exit_something' should exit this case stmt.
4481 Otherwise, this construct is transparent for `exit_something'.
4483 EXPR is the index-expression to be dispatched on.
4484 TYPE is its nominal type. We could simply convert EXPR to this type,
4485 but instead we take short cuts. */
4487 void
4488 expand_start_case (exit_flag, expr, type, printname)
4489 int exit_flag;
4490 tree expr;
4491 tree type;
4492 const char *printname;
4494 struct nesting *thiscase = ALLOC_NESTING ();
4496 /* Make an entry on case_stack for the case we are entering. */
4498 thiscase->next = case_stack;
4499 thiscase->all = nesting_stack;
4500 thiscase->depth = ++nesting_depth;
4501 thiscase->exit_label = exit_flag ? gen_label_rtx () : 0;
4502 thiscase->data.case_stmt.case_list = 0;
4503 thiscase->data.case_stmt.index_expr = expr;
4504 thiscase->data.case_stmt.nominal_type = type;
4505 thiscase->data.case_stmt.default_label = 0;
4506 thiscase->data.case_stmt.printname = printname;
4507 thiscase->data.case_stmt.line_number_status = force_line_numbers ();
4508 case_stack = thiscase;
4509 nesting_stack = thiscase;
4511 do_pending_stack_adjust ();
4513 /* Make sure case_stmt.start points to something that won't
4514 need any transformation before expand_end_case. */
4515 if (GET_CODE (get_last_insn ()) != NOTE)
4516 emit_note (NULL, NOTE_INSN_DELETED);
4518 thiscase->data.case_stmt.start = get_last_insn ();
4520 start_cleanup_deferral ();
4523 /* Start a "dummy case statement" within which case labels are invalid
4524 and are not connected to any larger real case statement.
4525 This can be used if you don't want to let a case statement jump
4526 into the middle of certain kinds of constructs. */
4528 void
4529 expand_start_case_dummy ()
4531 struct nesting *thiscase = ALLOC_NESTING ();
4533 /* Make an entry on case_stack for the dummy. */
4535 thiscase->next = case_stack;
4536 thiscase->all = nesting_stack;
4537 thiscase->depth = ++nesting_depth;
4538 thiscase->exit_label = 0;
4539 thiscase->data.case_stmt.case_list = 0;
4540 thiscase->data.case_stmt.start = 0;
4541 thiscase->data.case_stmt.nominal_type = 0;
4542 thiscase->data.case_stmt.default_label = 0;
4543 case_stack = thiscase;
4544 nesting_stack = thiscase;
4545 start_cleanup_deferral ();
4548 /* End a dummy case statement. */
4550 void
4551 expand_end_case_dummy ()
4553 end_cleanup_deferral ();
4554 POPSTACK (case_stack);
4557 /* Return the data type of the index-expression
4558 of the innermost case statement, or null if none. */
4560 tree
4561 case_index_expr_type ()
4563 if (case_stack)
4564 return TREE_TYPE (case_stack->data.case_stmt.index_expr);
4565 return 0;
4568 static void
4569 check_seenlabel ()
4571 /* If this is the first label, warn if any insns have been emitted. */
4572 if (case_stack->data.case_stmt.line_number_status >= 0)
4574 rtx insn;
4576 restore_line_number_status
4577 (case_stack->data.case_stmt.line_number_status);
4578 case_stack->data.case_stmt.line_number_status = -1;
4580 for (insn = case_stack->data.case_stmt.start;
4581 insn;
4582 insn = NEXT_INSN (insn))
4584 if (GET_CODE (insn) == CODE_LABEL)
4585 break;
4586 if (GET_CODE (insn) != NOTE
4587 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn)) != USE))
4590 insn = PREV_INSN (insn);
4591 while (insn && (GET_CODE (insn) != NOTE || NOTE_LINE_NUMBER (insn) < 0));
4593 /* If insn is zero, then there must have been a syntax error. */
4594 if (insn)
4595 warning_with_file_and_line (NOTE_SOURCE_FILE (insn),
4596 NOTE_LINE_NUMBER (insn),
4597 "unreachable code at beginning of %s",
4598 case_stack->data.case_stmt.printname);
4599 break;
4605 /* Accumulate one case or default label inside a case or switch statement.
4606 VALUE is the value of the case (a null pointer, for a default label).
4607 The function CONVERTER, when applied to arguments T and V,
4608 converts the value V to the type T.
4610 If not currently inside a case or switch statement, return 1 and do
4611 nothing. The caller will print a language-specific error message.
4612 If VALUE is a duplicate or overlaps, return 2 and do nothing
4613 except store the (first) duplicate node in *DUPLICATE.
4614 If VALUE is out of range, return 3 and do nothing.
4615 If we are jumping into the scope of a cleanup or var-sized array, return 5.
4616 Return 0 on success.
4618 Extended to handle range statements. */
4621 pushcase (value, converter, label, duplicate)
4622 tree value;
4623 tree (*converter) PARAMS ((tree, tree));
4624 tree label;
4625 tree *duplicate;
4627 tree index_type;
4628 tree nominal_type;
4630 /* Fail if not inside a real case statement. */
4631 if (! (case_stack && case_stack->data.case_stmt.start))
4632 return 1;
4634 if (stack_block_stack
4635 && stack_block_stack->depth > case_stack->depth)
4636 return 5;
4638 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4639 nominal_type = case_stack->data.case_stmt.nominal_type;
4641 /* If the index is erroneous, avoid more problems: pretend to succeed. */
4642 if (index_type == error_mark_node)
4643 return 0;
4645 /* Convert VALUE to the type in which the comparisons are nominally done. */
4646 if (value != 0)
4647 value = (*converter) (nominal_type, value);
4649 check_seenlabel ();
4651 /* Fail if this value is out of range for the actual type of the index
4652 (which may be narrower than NOMINAL_TYPE). */
4653 if (value != 0
4654 && (TREE_CONSTANT_OVERFLOW (value)
4655 || ! int_fits_type_p (value, index_type)))
4656 return 3;
4658 return add_case_node (value, value, label, duplicate);
4661 /* Like pushcase but this case applies to all values between VALUE1 and
4662 VALUE2 (inclusive). If VALUE1 is NULL, the range starts at the lowest
4663 value of the index type and ends at VALUE2. If VALUE2 is NULL, the range
4664 starts at VALUE1 and ends at the highest value of the index type.
4665 If both are NULL, this case applies to all values.
4667 The return value is the same as that of pushcase but there is one
4668 additional error code: 4 means the specified range was empty. */
4671 pushcase_range (value1, value2, converter, label, duplicate)
4672 tree value1, value2;
4673 tree (*converter) PARAMS ((tree, tree));
4674 tree label;
4675 tree *duplicate;
4677 tree index_type;
4678 tree nominal_type;
4680 /* Fail if not inside a real case statement. */
4681 if (! (case_stack && case_stack->data.case_stmt.start))
4682 return 1;
4684 if (stack_block_stack
4685 && stack_block_stack->depth > case_stack->depth)
4686 return 5;
4688 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4689 nominal_type = case_stack->data.case_stmt.nominal_type;
4691 /* If the index is erroneous, avoid more problems: pretend to succeed. */
4692 if (index_type == error_mark_node)
4693 return 0;
4695 check_seenlabel ();
4697 /* Convert VALUEs to type in which the comparisons are nominally done
4698 and replace any unspecified value with the corresponding bound. */
4699 if (value1 == 0)
4700 value1 = TYPE_MIN_VALUE (index_type);
4701 if (value2 == 0)
4702 value2 = TYPE_MAX_VALUE (index_type);
4704 /* Fail if the range is empty. Do this before any conversion since
4705 we want to allow out-of-range empty ranges. */
4706 if (value2 != 0 && tree_int_cst_lt (value2, value1))
4707 return 4;
4709 /* If the max was unbounded, use the max of the nominal_type we are
4710 converting to. Do this after the < check above to suppress false
4711 positives. */
4712 if (value2 == 0)
4713 value2 = TYPE_MAX_VALUE (nominal_type);
4715 value1 = (*converter) (nominal_type, value1);
4716 value2 = (*converter) (nominal_type, value2);
4718 /* Fail if these values are out of range. */
4719 if (TREE_CONSTANT_OVERFLOW (value1)
4720 || ! int_fits_type_p (value1, index_type))
4721 return 3;
4723 if (TREE_CONSTANT_OVERFLOW (value2)
4724 || ! int_fits_type_p (value2, index_type))
4725 return 3;
4727 return add_case_node (value1, value2, label, duplicate);
4730 /* Do the actual insertion of a case label for pushcase and pushcase_range
4731 into case_stack->data.case_stmt.case_list. Use an AVL tree to avoid
4732 slowdown for large switch statements. */
4735 add_case_node (low, high, label, duplicate)
4736 tree low, high;
4737 tree label;
4738 tree *duplicate;
4740 struct case_node *p, **q, *r;
4742 /* If there's no HIGH value, then this is not a case range; it's
4743 just a simple case label. But that's just a degenerate case
4744 range. */
4745 if (!high)
4746 high = low;
4748 /* Handle default labels specially. */
4749 if (!high && !low)
4751 if (case_stack->data.case_stmt.default_label != 0)
4753 *duplicate = case_stack->data.case_stmt.default_label;
4754 return 2;
4756 case_stack->data.case_stmt.default_label = label;
4757 expand_label (label);
4758 return 0;
4761 q = &case_stack->data.case_stmt.case_list;
4762 p = *q;
4764 while ((r = *q))
4766 p = r;
4768 /* Keep going past elements distinctly greater than HIGH. */
4769 if (tree_int_cst_lt (high, p->low))
4770 q = &p->left;
4772 /* or distinctly less than LOW. */
4773 else if (tree_int_cst_lt (p->high, low))
4774 q = &p->right;
4776 else
4778 /* We have an overlap; this is an error. */
4779 *duplicate = p->code_label;
4780 return 2;
4784 /* Add this label to the chain, and succeed. */
4786 r = (struct case_node *) xmalloc (sizeof (struct case_node));
4787 r->low = low;
4789 /* If the bounds are equal, turn this into the one-value case. */
4790 if (tree_int_cst_equal (low, high))
4791 r->high = r->low;
4792 else
4793 r->high = high;
4795 r->code_label = label;
4796 expand_label (label);
4798 *q = r;
4799 r->parent = p;
4800 r->left = 0;
4801 r->right = 0;
4802 r->balance = 0;
4804 while (p)
4806 struct case_node *s;
4808 if (r == p->left)
4810 int b;
4812 if (! (b = p->balance))
4813 /* Growth propagation from left side. */
4814 p->balance = -1;
4815 else if (b < 0)
4817 if (r->balance < 0)
4819 /* R-Rotation */
4820 if ((p->left = s = r->right))
4821 s->parent = p;
4823 r->right = p;
4824 p->balance = 0;
4825 r->balance = 0;
4826 s = p->parent;
4827 p->parent = r;
4829 if ((r->parent = s))
4831 if (s->left == p)
4832 s->left = r;
4833 else
4834 s->right = r;
4836 else
4837 case_stack->data.case_stmt.case_list = r;
4839 else
4840 /* r->balance == +1 */
4842 /* LR-Rotation */
4844 int b2;
4845 struct case_node *t = r->right;
4847 if ((p->left = s = t->right))
4848 s->parent = p;
4850 t->right = p;
4851 if ((r->right = s = t->left))
4852 s->parent = r;
4854 t->left = r;
4855 b = t->balance;
4856 b2 = b < 0;
4857 p->balance = b2;
4858 b2 = -b2 - b;
4859 r->balance = b2;
4860 t->balance = 0;
4861 s = p->parent;
4862 p->parent = t;
4863 r->parent = t;
4865 if ((t->parent = s))
4867 if (s->left == p)
4868 s->left = t;
4869 else
4870 s->right = t;
4872 else
4873 case_stack->data.case_stmt.case_list = t;
4875 break;
4878 else
4880 /* p->balance == +1; growth of left side balances the node. */
4881 p->balance = 0;
4882 break;
4885 else
4886 /* r == p->right */
4888 int b;
4890 if (! (b = p->balance))
4891 /* Growth propagation from right side. */
4892 p->balance++;
4893 else if (b > 0)
4895 if (r->balance > 0)
4897 /* L-Rotation */
4899 if ((p->right = s = r->left))
4900 s->parent = p;
4902 r->left = p;
4903 p->balance = 0;
4904 r->balance = 0;
4905 s = p->parent;
4906 p->parent = r;
4907 if ((r->parent = s))
4909 if (s->left == p)
4910 s->left = r;
4911 else
4912 s->right = r;
4915 else
4916 case_stack->data.case_stmt.case_list = r;
4919 else
4920 /* r->balance == -1 */
4922 /* RL-Rotation */
4923 int b2;
4924 struct case_node *t = r->left;
4926 if ((p->right = s = t->left))
4927 s->parent = p;
4929 t->left = p;
4931 if ((r->left = s = t->right))
4932 s->parent = r;
4934 t->right = r;
4935 b = t->balance;
4936 b2 = b < 0;
4937 r->balance = b2;
4938 b2 = -b2 - b;
4939 p->balance = b2;
4940 t->balance = 0;
4941 s = p->parent;
4942 p->parent = t;
4943 r->parent = t;
4945 if ((t->parent = s))
4947 if (s->left == p)
4948 s->left = t;
4949 else
4950 s->right = t;
4953 else
4954 case_stack->data.case_stmt.case_list = t;
4956 break;
4958 else
4960 /* p->balance == -1; growth of right side balances the node. */
4961 p->balance = 0;
4962 break;
4966 r = p;
4967 p = p->parent;
4970 return 0;
4973 /* Returns the number of possible values of TYPE.
4974 Returns -1 if the number is unknown, variable, or if the number does not
4975 fit in a HOST_WIDE_INT.
4976 Sets *SPARENESS to 2 if TYPE is an ENUMERAL_TYPE whose values
4977 do not increase monotonically (there may be duplicates);
4978 to 1 if the values increase monotonically, but not always by 1;
4979 otherwise sets it to 0. */
4981 HOST_WIDE_INT
4982 all_cases_count (type, spareness)
4983 tree type;
4984 int *spareness;
4986 tree t;
4987 HOST_WIDE_INT count, minval, lastval;
4989 *spareness = 0;
4991 switch (TREE_CODE (type))
4993 case BOOLEAN_TYPE:
4994 count = 2;
4995 break;
4997 case CHAR_TYPE:
4998 count = 1 << BITS_PER_UNIT;
4999 break;
5001 default:
5002 case INTEGER_TYPE:
5003 if (TYPE_MAX_VALUE (type) != 0
5004 && 0 != (t = fold (build (MINUS_EXPR, type, TYPE_MAX_VALUE (type),
5005 TYPE_MIN_VALUE (type))))
5006 && 0 != (t = fold (build (PLUS_EXPR, type, t,
5007 convert (type, integer_zero_node))))
5008 && host_integerp (t, 1))
5009 count = tree_low_cst (t, 1);
5010 else
5011 return -1;
5012 break;
5014 case ENUMERAL_TYPE:
5015 /* Don't waste time with enumeral types with huge values. */
5016 if (! host_integerp (TYPE_MIN_VALUE (type), 0)
5017 || TYPE_MAX_VALUE (type) == 0
5018 || ! host_integerp (TYPE_MAX_VALUE (type), 0))
5019 return -1;
5021 lastval = minval = tree_low_cst (TYPE_MIN_VALUE (type), 0);
5022 count = 0;
5024 for (t = TYPE_VALUES (type); t != NULL_TREE; t = TREE_CHAIN (t))
5026 HOST_WIDE_INT thisval = tree_low_cst (TREE_VALUE (t), 0);
5028 if (*spareness == 2 || thisval < lastval)
5029 *spareness = 2;
5030 else if (thisval != minval + count)
5031 *spareness = 1;
5033 count++;
5037 return count;
5040 #define BITARRAY_TEST(ARRAY, INDEX) \
5041 ((ARRAY)[(unsigned) (INDEX) / HOST_BITS_PER_CHAR]\
5042 & (1 << ((unsigned) (INDEX) % HOST_BITS_PER_CHAR)))
5043 #define BITARRAY_SET(ARRAY, INDEX) \
5044 ((ARRAY)[(unsigned) (INDEX) / HOST_BITS_PER_CHAR]\
5045 |= 1 << ((unsigned) (INDEX) % HOST_BITS_PER_CHAR))
5047 /* Set the elements of the bitstring CASES_SEEN (which has length COUNT),
5048 with the case values we have seen, assuming the case expression
5049 has the given TYPE.
5050 SPARSENESS is as determined by all_cases_count.
5052 The time needed is proportional to COUNT, unless
5053 SPARSENESS is 2, in which case quadratic time is needed. */
5055 void
5056 mark_seen_cases (type, cases_seen, count, sparseness)
5057 tree type;
5058 unsigned char *cases_seen;
5059 HOST_WIDE_INT count;
5060 int sparseness;
5062 tree next_node_to_try = NULL_TREE;
5063 HOST_WIDE_INT next_node_offset = 0;
5065 struct case_node *n, *root = case_stack->data.case_stmt.case_list;
5066 tree val = make_node (INTEGER_CST);
5068 TREE_TYPE (val) = type;
5069 if (! root)
5070 /* Do nothing. */
5072 else if (sparseness == 2)
5074 tree t;
5075 unsigned HOST_WIDE_INT xlo;
5077 /* This less efficient loop is only needed to handle
5078 duplicate case values (multiple enum constants
5079 with the same value). */
5080 TREE_TYPE (val) = TREE_TYPE (root->low);
5081 for (t = TYPE_VALUES (type), xlo = 0; t != NULL_TREE;
5082 t = TREE_CHAIN (t), xlo++)
5084 TREE_INT_CST_LOW (val) = TREE_INT_CST_LOW (TREE_VALUE (t));
5085 TREE_INT_CST_HIGH (val) = TREE_INT_CST_HIGH (TREE_VALUE (t));
5086 n = root;
5089 /* Keep going past elements distinctly greater than VAL. */
5090 if (tree_int_cst_lt (val, n->low))
5091 n = n->left;
5093 /* or distinctly less than VAL. */
5094 else if (tree_int_cst_lt (n->high, val))
5095 n = n->right;
5097 else
5099 /* We have found a matching range. */
5100 BITARRAY_SET (cases_seen, xlo);
5101 break;
5104 while (n);
5107 else
5109 if (root->left)
5110 case_stack->data.case_stmt.case_list = root = case_tree2list (root, 0);
5112 for (n = root; n; n = n->right)
5114 TREE_INT_CST_LOW (val) = TREE_INT_CST_LOW (n->low);
5115 TREE_INT_CST_HIGH (val) = TREE_INT_CST_HIGH (n->low);
5116 while (! tree_int_cst_lt (n->high, val))
5118 /* Calculate (into xlo) the "offset" of the integer (val).
5119 The element with lowest value has offset 0, the next smallest
5120 element has offset 1, etc. */
5122 unsigned HOST_WIDE_INT xlo;
5123 HOST_WIDE_INT xhi;
5124 tree t;
5126 if (sparseness && TYPE_VALUES (type) != NULL_TREE)
5128 /* The TYPE_VALUES will be in increasing order, so
5129 starting searching where we last ended. */
5130 t = next_node_to_try;
5131 xlo = next_node_offset;
5132 xhi = 0;
5133 for (;;)
5135 if (t == NULL_TREE)
5137 t = TYPE_VALUES (type);
5138 xlo = 0;
5140 if (tree_int_cst_equal (val, TREE_VALUE (t)))
5142 next_node_to_try = TREE_CHAIN (t);
5143 next_node_offset = xlo + 1;
5144 break;
5146 xlo++;
5147 t = TREE_CHAIN (t);
5148 if (t == next_node_to_try)
5150 xlo = -1;
5151 break;
5155 else
5157 t = TYPE_MIN_VALUE (type);
5158 if (t)
5159 neg_double (TREE_INT_CST_LOW (t), TREE_INT_CST_HIGH (t),
5160 &xlo, &xhi);
5161 else
5162 xlo = xhi = 0;
5163 add_double (xlo, xhi,
5164 TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
5165 &xlo, &xhi);
5168 if (xhi == 0 && xlo < (unsigned HOST_WIDE_INT) count)
5169 BITARRAY_SET (cases_seen, xlo);
5171 add_double (TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
5172 1, 0,
5173 &TREE_INT_CST_LOW (val), &TREE_INT_CST_HIGH (val));
5179 /* Called when the index of a switch statement is an enumerated type
5180 and there is no default label.
5182 Checks that all enumeration literals are covered by the case
5183 expressions of a switch. Also, warn if there are any extra
5184 switch cases that are *not* elements of the enumerated type.
5186 If all enumeration literals were covered by the case expressions,
5187 turn one of the expressions into the default expression since it should
5188 not be possible to fall through such a switch. */
5190 void
5191 check_for_full_enumeration_handling (type)
5192 tree type;
5194 struct case_node *n;
5195 tree chain;
5197 /* True iff the selector type is a numbered set mode. */
5198 int sparseness = 0;
5200 /* The number of possible selector values. */
5201 HOST_WIDE_INT size;
5203 /* For each possible selector value. a one iff it has been matched
5204 by a case value alternative. */
5205 unsigned char *cases_seen;
5207 /* The allocated size of cases_seen, in chars. */
5208 HOST_WIDE_INT bytes_needed;
5210 if (! warn_switch)
5211 return;
5213 size = all_cases_count (type, &sparseness);
5214 bytes_needed = (size + HOST_BITS_PER_CHAR) / HOST_BITS_PER_CHAR;
5216 if (size > 0 && size < 600000
5217 /* We deliberately use calloc here, not cmalloc, so that we can suppress
5218 this optimization if we don't have enough memory rather than
5219 aborting, as xmalloc would do. */
5220 && (cases_seen =
5221 (unsigned char *) really_call_calloc (bytes_needed, 1)) != NULL)
5223 HOST_WIDE_INT i;
5224 tree v = TYPE_VALUES (type);
5226 /* The time complexity of this code is normally O(N), where
5227 N being the number of members in the enumerated type.
5228 However, if type is a ENUMERAL_TYPE whose values do not
5229 increase monotonically, O(N*log(N)) time may be needed. */
5231 mark_seen_cases (type, cases_seen, size, sparseness);
5233 for (i = 0; v != NULL_TREE && i < size; i++, v = TREE_CHAIN (v))
5234 if (BITARRAY_TEST (cases_seen, i) == 0)
5235 warning ("enumeration value `%s' not handled in switch",
5236 IDENTIFIER_POINTER (TREE_PURPOSE (v)));
5238 free (cases_seen);
5241 /* Now we go the other way around; we warn if there are case
5242 expressions that don't correspond to enumerators. This can
5243 occur since C and C++ don't enforce type-checking of
5244 assignments to enumeration variables. */
5246 if (case_stack->data.case_stmt.case_list
5247 && case_stack->data.case_stmt.case_list->left)
5248 case_stack->data.case_stmt.case_list
5249 = case_tree2list (case_stack->data.case_stmt.case_list, 0);
5250 if (warn_switch)
5251 for (n = case_stack->data.case_stmt.case_list; n; n = n->right)
5253 for (chain = TYPE_VALUES (type);
5254 chain && !tree_int_cst_equal (n->low, TREE_VALUE (chain));
5255 chain = TREE_CHAIN (chain))
5258 if (!chain)
5260 if (TYPE_NAME (type) == 0)
5261 warning ("case value `%ld' not in enumerated type",
5262 (long) TREE_INT_CST_LOW (n->low));
5263 else
5264 warning ("case value `%ld' not in enumerated type `%s'",
5265 (long) TREE_INT_CST_LOW (n->low),
5266 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
5267 == IDENTIFIER_NODE)
5268 ? TYPE_NAME (type)
5269 : DECL_NAME (TYPE_NAME (type))));
5271 if (!tree_int_cst_equal (n->low, n->high))
5273 for (chain = TYPE_VALUES (type);
5274 chain && !tree_int_cst_equal (n->high, TREE_VALUE (chain));
5275 chain = TREE_CHAIN (chain))
5278 if (!chain)
5280 if (TYPE_NAME (type) == 0)
5281 warning ("case value `%ld' not in enumerated type",
5282 (long) TREE_INT_CST_LOW (n->high));
5283 else
5284 warning ("case value `%ld' not in enumerated type `%s'",
5285 (long) TREE_INT_CST_LOW (n->high),
5286 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
5287 == IDENTIFIER_NODE)
5288 ? TYPE_NAME (type)
5289 : DECL_NAME (TYPE_NAME (type))));
5295 /* Free CN, and its children. */
5297 static void
5298 free_case_nodes (cn)
5299 case_node_ptr cn;
5301 if (cn)
5303 free_case_nodes (cn->left);
5304 free_case_nodes (cn->right);
5305 free (cn);
5311 /* Terminate a case (Pascal) or switch (C) statement
5312 in which ORIG_INDEX is the expression to be tested.
5313 Generate the code to test it and jump to the right place. */
5315 void
5316 expand_end_case (orig_index)
5317 tree orig_index;
5319 tree minval = NULL_TREE, maxval = NULL_TREE, range = NULL_TREE, orig_minval;
5320 rtx default_label = 0;
5321 struct case_node *n;
5322 unsigned int count;
5323 rtx index;
5324 rtx table_label;
5325 int ncases;
5326 rtx *labelvec;
5327 int i;
5328 rtx before_case, end;
5329 struct nesting *thiscase = case_stack;
5330 tree index_expr, index_type;
5331 int unsignedp;
5333 /* Don't crash due to previous errors. */
5334 if (thiscase == NULL)
5335 return;
5337 table_label = gen_label_rtx ();
5338 index_expr = thiscase->data.case_stmt.index_expr;
5339 index_type = TREE_TYPE (index_expr);
5340 unsignedp = TREE_UNSIGNED (index_type);
5342 do_pending_stack_adjust ();
5344 /* This might get an spurious warning in the presence of a syntax error;
5345 it could be fixed by moving the call to check_seenlabel after the
5346 check for error_mark_node, and copying the code of check_seenlabel that
5347 deals with case_stack->data.case_stmt.line_number_status /
5348 restore_line_number_status in front of the call to end_cleanup_deferral;
5349 However, this might miss some useful warnings in the presence of
5350 non-syntax errors. */
5351 check_seenlabel ();
5353 /* An ERROR_MARK occurs for various reasons including invalid data type. */
5354 if (index_type != error_mark_node)
5356 /* If switch expression was an enumerated type, check that all
5357 enumeration literals are covered by the cases.
5358 No sense trying this if there's a default case, however. */
5360 if (!thiscase->data.case_stmt.default_label
5361 && TREE_CODE (TREE_TYPE (orig_index)) == ENUMERAL_TYPE
5362 && TREE_CODE (index_expr) != INTEGER_CST)
5363 check_for_full_enumeration_handling (TREE_TYPE (orig_index));
5365 /* If we don't have a default-label, create one here,
5366 after the body of the switch. */
5367 if (thiscase->data.case_stmt.default_label == 0)
5369 thiscase->data.case_stmt.default_label
5370 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
5371 expand_label (thiscase->data.case_stmt.default_label);
5373 default_label = label_rtx (thiscase->data.case_stmt.default_label);
5375 before_case = get_last_insn ();
5377 if (thiscase->data.case_stmt.case_list
5378 && thiscase->data.case_stmt.case_list->left)
5379 thiscase->data.case_stmt.case_list
5380 = case_tree2list (thiscase->data.case_stmt.case_list, 0);
5382 /* Simplify the case-list before we count it. */
5383 group_case_nodes (thiscase->data.case_stmt.case_list);
5385 /* Get upper and lower bounds of case values.
5386 Also convert all the case values to the index expr's data type. */
5388 count = 0;
5389 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5391 /* Check low and high label values are integers. */
5392 if (TREE_CODE (n->low) != INTEGER_CST)
5393 abort ();
5394 if (TREE_CODE (n->high) != INTEGER_CST)
5395 abort ();
5397 n->low = convert (index_type, n->low);
5398 n->high = convert (index_type, n->high);
5400 /* Count the elements and track the largest and smallest
5401 of them (treating them as signed even if they are not). */
5402 if (count++ == 0)
5404 minval = n->low;
5405 maxval = n->high;
5407 else
5409 if (INT_CST_LT (n->low, minval))
5410 minval = n->low;
5411 if (INT_CST_LT (maxval, n->high))
5412 maxval = n->high;
5414 /* A range counts double, since it requires two compares. */
5415 if (! tree_int_cst_equal (n->low, n->high))
5416 count++;
5419 orig_minval = minval;
5421 /* Compute span of values. */
5422 if (count != 0)
5423 range = fold (build (MINUS_EXPR, index_type, maxval, minval));
5425 end_cleanup_deferral ();
5427 if (count == 0)
5429 expand_expr (index_expr, const0_rtx, VOIDmode, 0);
5430 emit_queue ();
5431 emit_jump (default_label);
5434 /* If range of values is much bigger than number of values,
5435 make a sequence of conditional branches instead of a dispatch.
5436 If the switch-index is a constant, do it this way
5437 because we can optimize it. */
5439 else if (count < case_values_threshold ()
5440 || compare_tree_int (range, 10 * count) > 0
5441 /* RANGE may be signed, and really large ranges will show up
5442 as negative numbers. */
5443 || compare_tree_int (range, 0) < 0
5444 #ifndef ASM_OUTPUT_ADDR_DIFF_ELT
5445 || flag_pic
5446 #endif
5447 || TREE_CODE (index_expr) == INTEGER_CST
5448 || (TREE_CODE (index_expr) == COMPOUND_EXPR
5449 && TREE_CODE (TREE_OPERAND (index_expr, 1)) == INTEGER_CST))
5451 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5453 /* If the index is a short or char that we do not have
5454 an insn to handle comparisons directly, convert it to
5455 a full integer now, rather than letting each comparison
5456 generate the conversion. */
5458 if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT
5459 && ! have_insn_for (COMPARE, GET_MODE (index)))
5461 enum machine_mode wider_mode;
5462 for (wider_mode = GET_MODE (index); wider_mode != VOIDmode;
5463 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
5464 if (have_insn_for (COMPARE, wider_mode))
5466 index = convert_to_mode (wider_mode, index, unsignedp);
5467 break;
5471 emit_queue ();
5472 do_pending_stack_adjust ();
5474 index = protect_from_queue (index, 0);
5475 if (GET_CODE (index) == MEM)
5476 index = copy_to_reg (index);
5477 if (GET_CODE (index) == CONST_INT
5478 || TREE_CODE (index_expr) == INTEGER_CST)
5480 /* Make a tree node with the proper constant value
5481 if we don't already have one. */
5482 if (TREE_CODE (index_expr) != INTEGER_CST)
5484 index_expr
5485 = build_int_2 (INTVAL (index),
5486 unsignedp || INTVAL (index) >= 0 ? 0 : -1);
5487 index_expr = convert (index_type, index_expr);
5490 /* For constant index expressions we need only
5491 issue an unconditional branch to the appropriate
5492 target code. The job of removing any unreachable
5493 code is left to the optimisation phase if the
5494 "-O" option is specified. */
5495 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5496 if (! tree_int_cst_lt (index_expr, n->low)
5497 && ! tree_int_cst_lt (n->high, index_expr))
5498 break;
5500 if (n)
5501 emit_jump (label_rtx (n->code_label));
5502 else
5503 emit_jump (default_label);
5505 else
5507 /* If the index expression is not constant we generate
5508 a binary decision tree to select the appropriate
5509 target code. This is done as follows:
5511 The list of cases is rearranged into a binary tree,
5512 nearly optimal assuming equal probability for each case.
5514 The tree is transformed into RTL, eliminating
5515 redundant test conditions at the same time.
5517 If program flow could reach the end of the
5518 decision tree an unconditional jump to the
5519 default code is emitted. */
5521 use_cost_table
5522 = (TREE_CODE (TREE_TYPE (orig_index)) != ENUMERAL_TYPE
5523 && estimate_case_costs (thiscase->data.case_stmt.case_list));
5524 balance_case_nodes (&thiscase->data.case_stmt.case_list, NULL);
5525 emit_case_nodes (index, thiscase->data.case_stmt.case_list,
5526 default_label, index_type);
5527 emit_jump_if_reachable (default_label);
5530 else
5532 if (! try_casesi (index_type, index_expr, minval, range,
5533 table_label, default_label))
5535 index_type = thiscase->data.case_stmt.nominal_type;
5536 if (! try_tablejump (index_type, index_expr, minval, range,
5537 table_label, default_label))
5538 abort ();
5541 /* Get table of labels to jump to, in order of case index. */
5543 ncases = TREE_INT_CST_LOW (range) + 1;
5544 labelvec = (rtx *) alloca (ncases * sizeof (rtx));
5545 memset ((char *) labelvec, 0, ncases * sizeof (rtx));
5547 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5549 HOST_WIDE_INT i
5550 = TREE_INT_CST_LOW (n->low) - TREE_INT_CST_LOW (orig_minval);
5552 while (1)
5554 labelvec[i]
5555 = gen_rtx_LABEL_REF (Pmode, label_rtx (n->code_label));
5556 if (i + TREE_INT_CST_LOW (orig_minval)
5557 == TREE_INT_CST_LOW (n->high))
5558 break;
5559 i++;
5563 /* Fill in the gaps with the default. */
5564 for (i = 0; i < ncases; i++)
5565 if (labelvec[i] == 0)
5566 labelvec[i] = gen_rtx_LABEL_REF (Pmode, default_label);
5568 /* Output the table */
5569 emit_label (table_label);
5571 if (CASE_VECTOR_PC_RELATIVE || flag_pic)
5572 emit_jump_insn (gen_rtx_ADDR_DIFF_VEC (CASE_VECTOR_MODE,
5573 gen_rtx_LABEL_REF (Pmode, table_label),
5574 gen_rtvec_v (ncases, labelvec),
5575 const0_rtx, const0_rtx));
5576 else
5577 emit_jump_insn (gen_rtx_ADDR_VEC (CASE_VECTOR_MODE,
5578 gen_rtvec_v (ncases, labelvec)));
5580 /* If the case insn drops through the table,
5581 after the table we must jump to the default-label.
5582 Otherwise record no drop-through after the table. */
5583 #ifdef CASE_DROPS_THROUGH
5584 emit_jump (default_label);
5585 #else
5586 emit_barrier ();
5587 #endif
5590 before_case = NEXT_INSN (before_case);
5591 end = get_last_insn ();
5592 squeeze_notes (&before_case, &end);
5593 reorder_insns (before_case, end,
5594 thiscase->data.case_stmt.start);
5596 else
5597 end_cleanup_deferral ();
5599 if (thiscase->exit_label)
5600 emit_label (thiscase->exit_label);
5602 free_case_nodes (case_stack->data.case_stmt.case_list);
5603 POPSTACK (case_stack);
5605 free_temp_slots ();
5608 /* Convert the tree NODE into a list linked by the right field, with the left
5609 field zeroed. RIGHT is used for recursion; it is a list to be placed
5610 rightmost in the resulting list. */
5612 static struct case_node *
5613 case_tree2list (node, right)
5614 struct case_node *node, *right;
5616 struct case_node *left;
5618 if (node->right)
5619 right = case_tree2list (node->right, right);
5621 node->right = right;
5622 if ((left = node->left))
5624 node->left = 0;
5625 return case_tree2list (left, node);
5628 return node;
5631 /* Generate code to jump to LABEL if OP1 and OP2 are equal. */
5633 static void
5634 do_jump_if_equal (op1, op2, label, unsignedp)
5635 rtx op1, op2, label;
5636 int unsignedp;
5638 if (GET_CODE (op1) == CONST_INT
5639 && GET_CODE (op2) == CONST_INT)
5641 if (INTVAL (op1) == INTVAL (op2))
5642 emit_jump (label);
5644 else
5646 enum machine_mode mode = GET_MODE (op1);
5647 if (mode == VOIDmode)
5648 mode = GET_MODE (op2);
5649 emit_cmp_and_jump_insns (op1, op2, EQ, NULL_RTX, mode, unsignedp,
5650 0, label);
5654 /* Not all case values are encountered equally. This function
5655 uses a heuristic to weight case labels, in cases where that
5656 looks like a reasonable thing to do.
5658 Right now, all we try to guess is text, and we establish the
5659 following weights:
5661 chars above space: 16
5662 digits: 16
5663 default: 12
5664 space, punct: 8
5665 tab: 4
5666 newline: 2
5667 other "\" chars: 1
5668 remaining chars: 0
5670 If we find any cases in the switch that are not either -1 or in the range
5671 of valid ASCII characters, or are control characters other than those
5672 commonly used with "\", don't treat this switch scanning text.
5674 Return 1 if these nodes are suitable for cost estimation, otherwise
5675 return 0. */
5677 static int
5678 estimate_case_costs (node)
5679 case_node_ptr node;
5681 tree min_ascii = integer_minus_one_node;
5682 tree max_ascii = convert (TREE_TYPE (node->high), build_int_2 (127, 0));
5683 case_node_ptr n;
5684 int i;
5686 /* If we haven't already made the cost table, make it now. Note that the
5687 lower bound of the table is -1, not zero. */
5689 if (! cost_table_initialized)
5691 cost_table_initialized = 1;
5693 for (i = 0; i < 128; i++)
5695 if (ISALNUM (i))
5696 COST_TABLE (i) = 16;
5697 else if (ISPUNCT (i))
5698 COST_TABLE (i) = 8;
5699 else if (ISCNTRL (i))
5700 COST_TABLE (i) = -1;
5703 COST_TABLE (' ') = 8;
5704 COST_TABLE ('\t') = 4;
5705 COST_TABLE ('\0') = 4;
5706 COST_TABLE ('\n') = 2;
5707 COST_TABLE ('\f') = 1;
5708 COST_TABLE ('\v') = 1;
5709 COST_TABLE ('\b') = 1;
5712 /* See if all the case expressions look like text. It is text if the
5713 constant is >= -1 and the highest constant is <= 127. Do all comparisons
5714 as signed arithmetic since we don't want to ever access cost_table with a
5715 value less than -1. Also check that none of the constants in a range
5716 are strange control characters. */
5718 for (n = node; n; n = n->right)
5720 if ((INT_CST_LT (n->low, min_ascii)) || INT_CST_LT (max_ascii, n->high))
5721 return 0;
5723 for (i = (HOST_WIDE_INT) TREE_INT_CST_LOW (n->low);
5724 i <= (HOST_WIDE_INT) TREE_INT_CST_LOW (n->high); i++)
5725 if (COST_TABLE (i) < 0)
5726 return 0;
5729 /* All interesting values are within the range of interesting
5730 ASCII characters. */
5731 return 1;
5734 /* Scan an ordered list of case nodes
5735 combining those with consecutive values or ranges.
5737 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
5739 static void
5740 group_case_nodes (head)
5741 case_node_ptr head;
5743 case_node_ptr node = head;
5745 while (node)
5747 rtx lb = next_real_insn (label_rtx (node->code_label));
5748 rtx lb2;
5749 case_node_ptr np = node;
5751 /* Try to group the successors of NODE with NODE. */
5752 while (((np = np->right) != 0)
5753 /* Do they jump to the same place? */
5754 && ((lb2 = next_real_insn (label_rtx (np->code_label))) == lb
5755 || (lb != 0 && lb2 != 0
5756 && simplejump_p (lb)
5757 && simplejump_p (lb2)
5758 && rtx_equal_p (SET_SRC (PATTERN (lb)),
5759 SET_SRC (PATTERN (lb2)))))
5760 /* Are their ranges consecutive? */
5761 && tree_int_cst_equal (np->low,
5762 fold (build (PLUS_EXPR,
5763 TREE_TYPE (node->high),
5764 node->high,
5765 integer_one_node)))
5766 /* An overflow is not consecutive. */
5767 && tree_int_cst_lt (node->high,
5768 fold (build (PLUS_EXPR,
5769 TREE_TYPE (node->high),
5770 node->high,
5771 integer_one_node))))
5773 node->high = np->high;
5775 /* NP is the first node after NODE which can't be grouped with it.
5776 Delete the nodes in between, and move on to that node. */
5777 node->right = np;
5778 node = np;
5782 /* Take an ordered list of case nodes
5783 and transform them into a near optimal binary tree,
5784 on the assumption that any target code selection value is as
5785 likely as any other.
5787 The transformation is performed by splitting the ordered
5788 list into two equal sections plus a pivot. The parts are
5789 then attached to the pivot as left and right branches. Each
5790 branch is then transformed recursively. */
5792 static void
5793 balance_case_nodes (head, parent)
5794 case_node_ptr *head;
5795 case_node_ptr parent;
5797 case_node_ptr np;
5799 np = *head;
5800 if (np)
5802 int cost = 0;
5803 int i = 0;
5804 int ranges = 0;
5805 case_node_ptr *npp;
5806 case_node_ptr left;
5808 /* Count the number of entries on branch. Also count the ranges. */
5810 while (np)
5812 if (!tree_int_cst_equal (np->low, np->high))
5814 ranges++;
5815 if (use_cost_table)
5816 cost += COST_TABLE (TREE_INT_CST_LOW (np->high));
5819 if (use_cost_table)
5820 cost += COST_TABLE (TREE_INT_CST_LOW (np->low));
5822 i++;
5823 np = np->right;
5826 if (i > 2)
5828 /* Split this list if it is long enough for that to help. */
5829 npp = head;
5830 left = *npp;
5831 if (use_cost_table)
5833 /* Find the place in the list that bisects the list's total cost,
5834 Here I gets half the total cost. */
5835 int n_moved = 0;
5836 i = (cost + 1) / 2;
5837 while (1)
5839 /* Skip nodes while their cost does not reach that amount. */
5840 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5841 i -= COST_TABLE (TREE_INT_CST_LOW ((*npp)->high));
5842 i -= COST_TABLE (TREE_INT_CST_LOW ((*npp)->low));
5843 if (i <= 0)
5844 break;
5845 npp = &(*npp)->right;
5846 n_moved += 1;
5848 if (n_moved == 0)
5850 /* Leave this branch lopsided, but optimize left-hand
5851 side and fill in `parent' fields for right-hand side. */
5852 np = *head;
5853 np->parent = parent;
5854 balance_case_nodes (&np->left, np);
5855 for (; np->right; np = np->right)
5856 np->right->parent = np;
5857 return;
5860 /* If there are just three nodes, split at the middle one. */
5861 else if (i == 3)
5862 npp = &(*npp)->right;
5863 else
5865 /* Find the place in the list that bisects the list's total cost,
5866 where ranges count as 2.
5867 Here I gets half the total cost. */
5868 i = (i + ranges + 1) / 2;
5869 while (1)
5871 /* Skip nodes while their cost does not reach that amount. */
5872 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5873 i--;
5874 i--;
5875 if (i <= 0)
5876 break;
5877 npp = &(*npp)->right;
5880 *head = np = *npp;
5881 *npp = 0;
5882 np->parent = parent;
5883 np->left = left;
5885 /* Optimize each of the two split parts. */
5886 balance_case_nodes (&np->left, np);
5887 balance_case_nodes (&np->right, np);
5889 else
5891 /* Else leave this branch as one level,
5892 but fill in `parent' fields. */
5893 np = *head;
5894 np->parent = parent;
5895 for (; np->right; np = np->right)
5896 np->right->parent = np;
5901 /* Search the parent sections of the case node tree
5902 to see if a test for the lower bound of NODE would be redundant.
5903 INDEX_TYPE is the type of the index expression.
5905 The instructions to generate the case decision tree are
5906 output in the same order as nodes are processed so it is
5907 known that if a parent node checks the range of the current
5908 node minus one that the current node is bounded at its lower
5909 span. Thus the test would be redundant. */
5911 static int
5912 node_has_low_bound (node, index_type)
5913 case_node_ptr node;
5914 tree index_type;
5916 tree low_minus_one;
5917 case_node_ptr pnode;
5919 /* If the lower bound of this node is the lowest value in the index type,
5920 we need not test it. */
5922 if (tree_int_cst_equal (node->low, TYPE_MIN_VALUE (index_type)))
5923 return 1;
5925 /* If this node has a left branch, the value at the left must be less
5926 than that at this node, so it cannot be bounded at the bottom and
5927 we need not bother testing any further. */
5929 if (node->left)
5930 return 0;
5932 low_minus_one = fold (build (MINUS_EXPR, TREE_TYPE (node->low),
5933 node->low, integer_one_node));
5935 /* If the subtraction above overflowed, we can't verify anything.
5936 Otherwise, look for a parent that tests our value - 1. */
5938 if (! tree_int_cst_lt (low_minus_one, node->low))
5939 return 0;
5941 for (pnode = node->parent; pnode; pnode = pnode->parent)
5942 if (tree_int_cst_equal (low_minus_one, pnode->high))
5943 return 1;
5945 return 0;
5948 /* Search the parent sections of the case node tree
5949 to see if a test for the upper bound of NODE would be redundant.
5950 INDEX_TYPE is the type of the index expression.
5952 The instructions to generate the case decision tree are
5953 output in the same order as nodes are processed so it is
5954 known that if a parent node checks the range of the current
5955 node plus one that the current node is bounded at its upper
5956 span. Thus the test would be redundant. */
5958 static int
5959 node_has_high_bound (node, index_type)
5960 case_node_ptr node;
5961 tree index_type;
5963 tree high_plus_one;
5964 case_node_ptr pnode;
5966 /* If there is no upper bound, obviously no test is needed. */
5968 if (TYPE_MAX_VALUE (index_type) == NULL)
5969 return 1;
5971 /* If the upper bound of this node is the highest value in the type
5972 of the index expression, we need not test against it. */
5974 if (tree_int_cst_equal (node->high, TYPE_MAX_VALUE (index_type)))
5975 return 1;
5977 /* If this node has a right branch, the value at the right must be greater
5978 than that at this node, so it cannot be bounded at the top and
5979 we need not bother testing any further. */
5981 if (node->right)
5982 return 0;
5984 high_plus_one = fold (build (PLUS_EXPR, TREE_TYPE (node->high),
5985 node->high, integer_one_node));
5987 /* If the addition above overflowed, we can't verify anything.
5988 Otherwise, look for a parent that tests our value + 1. */
5990 if (! tree_int_cst_lt (node->high, high_plus_one))
5991 return 0;
5993 for (pnode = node->parent; pnode; pnode = pnode->parent)
5994 if (tree_int_cst_equal (high_plus_one, pnode->low))
5995 return 1;
5997 return 0;
6000 /* Search the parent sections of the
6001 case node tree to see if both tests for the upper and lower
6002 bounds of NODE would be redundant. */
6004 static int
6005 node_is_bounded (node, index_type)
6006 case_node_ptr node;
6007 tree index_type;
6009 return (node_has_low_bound (node, index_type)
6010 && node_has_high_bound (node, index_type));
6013 /* Emit an unconditional jump to LABEL unless it would be dead code. */
6015 static void
6016 emit_jump_if_reachable (label)
6017 rtx label;
6019 if (GET_CODE (get_last_insn ()) != BARRIER)
6020 emit_jump (label);
6023 /* Emit step-by-step code to select a case for the value of INDEX.
6024 The thus generated decision tree follows the form of the
6025 case-node binary tree NODE, whose nodes represent test conditions.
6026 INDEX_TYPE is the type of the index of the switch.
6028 Care is taken to prune redundant tests from the decision tree
6029 by detecting any boundary conditions already checked by
6030 emitted rtx. (See node_has_high_bound, node_has_low_bound
6031 and node_is_bounded, above.)
6033 Where the test conditions can be shown to be redundant we emit
6034 an unconditional jump to the target code. As a further
6035 optimization, the subordinates of a tree node are examined to
6036 check for bounded nodes. In this case conditional and/or
6037 unconditional jumps as a result of the boundary check for the
6038 current node are arranged to target the subordinates associated
6039 code for out of bound conditions on the current node.
6041 We can assume that when control reaches the code generated here,
6042 the index value has already been compared with the parents
6043 of this node, and determined to be on the same side of each parent
6044 as this node is. Thus, if this node tests for the value 51,
6045 and a parent tested for 52, we don't need to consider
6046 the possibility of a value greater than 51. If another parent
6047 tests for the value 50, then this node need not test anything. */
6049 static void
6050 emit_case_nodes (index, node, default_label, index_type)
6051 rtx index;
6052 case_node_ptr node;
6053 rtx default_label;
6054 tree index_type;
6056 /* If INDEX has an unsigned type, we must make unsigned branches. */
6057 int unsignedp = TREE_UNSIGNED (index_type);
6058 enum machine_mode mode = GET_MODE (index);
6059 enum machine_mode imode = TYPE_MODE (index_type);
6061 /* See if our parents have already tested everything for us.
6062 If they have, emit an unconditional jump for this node. */
6063 if (node_is_bounded (node, index_type))
6064 emit_jump (label_rtx (node->code_label));
6066 else if (tree_int_cst_equal (node->low, node->high))
6068 /* Node is single valued. First see if the index expression matches
6069 this node and then check our children, if any. */
6071 do_jump_if_equal (index,
6072 convert_modes (mode, imode,
6073 expand_expr (node->low, NULL_RTX,
6074 VOIDmode, 0),
6075 unsignedp),
6076 label_rtx (node->code_label), unsignedp);
6078 if (node->right != 0 && node->left != 0)
6080 /* This node has children on both sides.
6081 Dispatch to one side or the other
6082 by comparing the index value with this node's value.
6083 If one subtree is bounded, check that one first,
6084 so we can avoid real branches in the tree. */
6086 if (node_is_bounded (node->right, index_type))
6088 emit_cmp_and_jump_insns (index,
6089 convert_modes
6090 (mode, imode,
6091 expand_expr (node->high, NULL_RTX,
6092 VOIDmode, 0),
6093 unsignedp),
6094 GT, NULL_RTX, mode, unsignedp, 0,
6095 label_rtx (node->right->code_label));
6096 emit_case_nodes (index, node->left, default_label, index_type);
6099 else if (node_is_bounded (node->left, index_type))
6101 emit_cmp_and_jump_insns (index,
6102 convert_modes
6103 (mode, imode,
6104 expand_expr (node->high, NULL_RTX,
6105 VOIDmode, 0),
6106 unsignedp),
6107 LT, NULL_RTX, mode, unsignedp, 0,
6108 label_rtx (node->left->code_label));
6109 emit_case_nodes (index, node->right, default_label, index_type);
6112 else
6114 /* Neither node is bounded. First distinguish the two sides;
6115 then emit the code for one side at a time. */
6117 tree test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
6119 /* See if the value is on the right. */
6120 emit_cmp_and_jump_insns (index,
6121 convert_modes
6122 (mode, imode,
6123 expand_expr (node->high, NULL_RTX,
6124 VOIDmode, 0),
6125 unsignedp),
6126 GT, NULL_RTX, mode, unsignedp, 0,
6127 label_rtx (test_label));
6129 /* Value must be on the left.
6130 Handle the left-hand subtree. */
6131 emit_case_nodes (index, node->left, default_label, index_type);
6132 /* If left-hand subtree does nothing,
6133 go to default. */
6134 emit_jump_if_reachable (default_label);
6136 /* Code branches here for the right-hand subtree. */
6137 expand_label (test_label);
6138 emit_case_nodes (index, node->right, default_label, index_type);
6142 else if (node->right != 0 && node->left == 0)
6144 /* Here we have a right child but no left so we issue conditional
6145 branch to default and process the right child.
6147 Omit the conditional branch to default if we it avoid only one
6148 right child; it costs too much space to save so little time. */
6150 if (node->right->right || node->right->left
6151 || !tree_int_cst_equal (node->right->low, node->right->high))
6153 if (!node_has_low_bound (node, index_type))
6155 emit_cmp_and_jump_insns (index,
6156 convert_modes
6157 (mode, imode,
6158 expand_expr (node->high, NULL_RTX,
6159 VOIDmode, 0),
6160 unsignedp),
6161 LT, NULL_RTX, mode, unsignedp, 0,
6162 default_label);
6165 emit_case_nodes (index, node->right, default_label, index_type);
6167 else
6168 /* We cannot process node->right normally
6169 since we haven't ruled out the numbers less than
6170 this node's value. So handle node->right explicitly. */
6171 do_jump_if_equal (index,
6172 convert_modes
6173 (mode, imode,
6174 expand_expr (node->right->low, NULL_RTX,
6175 VOIDmode, 0),
6176 unsignedp),
6177 label_rtx (node->right->code_label), unsignedp);
6180 else if (node->right == 0 && node->left != 0)
6182 /* Just one subtree, on the left. */
6183 if (node->left->left || node->left->right
6184 || !tree_int_cst_equal (node->left->low, node->left->high))
6186 if (!node_has_high_bound (node, index_type))
6188 emit_cmp_and_jump_insns (index,
6189 convert_modes
6190 (mode, imode,
6191 expand_expr (node->high, NULL_RTX,
6192 VOIDmode, 0),
6193 unsignedp),
6194 GT, NULL_RTX, mode, unsignedp, 0,
6195 default_label);
6198 emit_case_nodes (index, node->left, default_label, index_type);
6200 else
6201 /* We cannot process node->left normally
6202 since we haven't ruled out the numbers less than
6203 this node's value. So handle node->left explicitly. */
6204 do_jump_if_equal (index,
6205 convert_modes
6206 (mode, imode,
6207 expand_expr (node->left->low, NULL_RTX,
6208 VOIDmode, 0),
6209 unsignedp),
6210 label_rtx (node->left->code_label), unsignedp);
6213 else
6215 /* Node is a range. These cases are very similar to those for a single
6216 value, except that we do not start by testing whether this node
6217 is the one to branch to. */
6219 if (node->right != 0 && node->left != 0)
6221 /* Node has subtrees on both sides.
6222 If the right-hand subtree is bounded,
6223 test for it first, since we can go straight there.
6224 Otherwise, we need to make a branch in the control structure,
6225 then handle the two subtrees. */
6226 tree test_label = 0;
6228 if (node_is_bounded (node->right, index_type))
6229 /* Right hand node is fully bounded so we can eliminate any
6230 testing and branch directly to the target code. */
6231 emit_cmp_and_jump_insns (index,
6232 convert_modes
6233 (mode, imode,
6234 expand_expr (node->high, NULL_RTX,
6235 VOIDmode, 0),
6236 unsignedp),
6237 GT, NULL_RTX, mode, unsignedp, 0,
6238 label_rtx (node->right->code_label));
6239 else
6241 /* Right hand node requires testing.
6242 Branch to a label where we will handle it later. */
6244 test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
6245 emit_cmp_and_jump_insns (index,
6246 convert_modes
6247 (mode, imode,
6248 expand_expr (node->high, NULL_RTX,
6249 VOIDmode, 0),
6250 unsignedp),
6251 GT, NULL_RTX, mode, unsignedp, 0,
6252 label_rtx (test_label));
6255 /* Value belongs to this node or to the left-hand subtree. */
6257 emit_cmp_and_jump_insns (index,
6258 convert_modes
6259 (mode, imode,
6260 expand_expr (node->low, NULL_RTX,
6261 VOIDmode, 0),
6262 unsignedp),
6263 GE, NULL_RTX, mode, unsignedp, 0,
6264 label_rtx (node->code_label));
6266 /* Handle the left-hand subtree. */
6267 emit_case_nodes (index, node->left, default_label, index_type);
6269 /* If right node had to be handled later, do that now. */
6271 if (test_label)
6273 /* If the left-hand subtree fell through,
6274 don't let it fall into the right-hand subtree. */
6275 emit_jump_if_reachable (default_label);
6277 expand_label (test_label);
6278 emit_case_nodes (index, node->right, default_label, index_type);
6282 else if (node->right != 0 && node->left == 0)
6284 /* Deal with values to the left of this node,
6285 if they are possible. */
6286 if (!node_has_low_bound (node, index_type))
6288 emit_cmp_and_jump_insns (index,
6289 convert_modes
6290 (mode, imode,
6291 expand_expr (node->low, NULL_RTX,
6292 VOIDmode, 0),
6293 unsignedp),
6294 LT, NULL_RTX, mode, unsignedp, 0,
6295 default_label);
6298 /* Value belongs to this node or to the right-hand subtree. */
6300 emit_cmp_and_jump_insns (index,
6301 convert_modes
6302 (mode, imode,
6303 expand_expr (node->high, NULL_RTX,
6304 VOIDmode, 0),
6305 unsignedp),
6306 LE, NULL_RTX, mode, unsignedp, 0,
6307 label_rtx (node->code_label));
6309 emit_case_nodes (index, node->right, default_label, index_type);
6312 else if (node->right == 0 && node->left != 0)
6314 /* Deal with values to the right of this node,
6315 if they are possible. */
6316 if (!node_has_high_bound (node, index_type))
6318 emit_cmp_and_jump_insns (index,
6319 convert_modes
6320 (mode, imode,
6321 expand_expr (node->high, NULL_RTX,
6322 VOIDmode, 0),
6323 unsignedp),
6324 GT, NULL_RTX, mode, unsignedp, 0,
6325 default_label);
6328 /* Value belongs to this node or to the left-hand subtree. */
6330 emit_cmp_and_jump_insns (index,
6331 convert_modes
6332 (mode, imode,
6333 expand_expr (node->low, NULL_RTX,
6334 VOIDmode, 0),
6335 unsignedp),
6336 GE, NULL_RTX, mode, unsignedp, 0,
6337 label_rtx (node->code_label));
6339 emit_case_nodes (index, node->left, default_label, index_type);
6342 else
6344 /* Node has no children so we check low and high bounds to remove
6345 redundant tests. Only one of the bounds can exist,
6346 since otherwise this node is bounded--a case tested already. */
6347 int high_bound = node_has_high_bound (node, index_type);
6348 int low_bound = node_has_low_bound (node, index_type);
6350 if (!high_bound && low_bound)
6352 emit_cmp_and_jump_insns (index,
6353 convert_modes
6354 (mode, imode,
6355 expand_expr (node->high, NULL_RTX,
6356 VOIDmode, 0),
6357 unsignedp),
6358 GT, NULL_RTX, mode, unsignedp, 0,
6359 default_label);
6362 else if (!low_bound && high_bound)
6364 emit_cmp_and_jump_insns (index,
6365 convert_modes
6366 (mode, imode,
6367 expand_expr (node->low, NULL_RTX,
6368 VOIDmode, 0),
6369 unsignedp),
6370 LT, NULL_RTX, mode, unsignedp, 0,
6371 default_label);
6373 else if (!low_bound && !high_bound)
6375 /* Widen LOW and HIGH to the same width as INDEX. */
6376 tree type = type_for_mode (mode, unsignedp);
6377 tree low = build1 (CONVERT_EXPR, type, node->low);
6378 tree high = build1 (CONVERT_EXPR, type, node->high);
6379 rtx low_rtx, new_index, new_bound;
6381 /* Instead of doing two branches, emit one unsigned branch for
6382 (index-low) > (high-low). */
6383 low_rtx = expand_expr (low, NULL_RTX, mode, 0);
6384 new_index = expand_simple_binop (mode, MINUS, index, low_rtx,
6385 NULL_RTX, unsignedp,
6386 OPTAB_WIDEN);
6387 new_bound = expand_expr (fold (build (MINUS_EXPR, type,
6388 high, low)),
6389 NULL_RTX, mode, 0);
6391 emit_cmp_and_jump_insns (new_index, new_bound, GT, NULL_RTX,
6392 mode, 1, 0, default_label);
6395 emit_jump (label_rtx (node->code_label));