PR c++/16115
[official-gcc.git] / gcc / stmt.c
blob6f7382d3df8c8a10f674661c16772d396aff2072
1 /* Expands front end tree to back end RTL for GCC
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 /* This file handles the generation of rtl code from tree structure
23 above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
24 It also creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
27 The functions whose names start with `expand_' are called by the
28 parser to generate RTL instructions for various kinds of constructs.
30 Some control and binding constructs require calling several such
31 functions at different times. For example, a simple if-then
32 is expanded by calling `expand_start_cond' (with the condition-expression
33 as argument) before parsing the then-clause and calling `expand_end_cond'
34 after parsing the then-clause. */
36 #include "config.h"
37 #include "system.h"
38 #include "coretypes.h"
39 #include "tm.h"
41 #include "rtl.h"
42 #include "tree.h"
43 #include "tm_p.h"
44 #include "flags.h"
45 #include "except.h"
46 #include "function.h"
47 #include "insn-config.h"
48 #include "expr.h"
49 #include "libfuncs.h"
50 #include "hard-reg-set.h"
51 #include "loop.h"
52 #include "recog.h"
53 #include "machmode.h"
54 #include "toplev.h"
55 #include "output.h"
56 #include "ggc.h"
57 #include "langhooks.h"
58 #include "predict.h"
59 #include "optabs.h"
60 #include "target.h"
61 #include "regs.h"
63 /* Functions and data structures for expanding case statements. */
65 /* Case label structure, used to hold info on labels within case
66 statements. We handle "range" labels; for a single-value label
67 as in C, the high and low limits are the same.
69 An AVL tree of case nodes is initially created, and later transformed
70 to a list linked via the RIGHT fields in the nodes. Nodes with
71 higher case values are later in the list.
73 Switch statements can be output in one of two forms. A branch table
74 is used if there are more than a few labels and the labels are dense
75 within the range between the smallest and largest case value. If a
76 branch table is used, no further manipulations are done with the case
77 node chain.
79 The alternative to the use of a branch table is to generate a series
80 of compare and jump insns. When that is done, we use the LEFT, RIGHT,
81 and PARENT fields to hold a binary tree. Initially the tree is
82 totally unbalanced, with everything on the right. We balance the tree
83 with nodes on the left having lower case values than the parent
84 and nodes on the right having higher values. We then output the tree
85 in order. */
87 struct case_node GTY(())
89 struct case_node *left; /* Left son in binary tree */
90 struct case_node *right; /* Right son in binary tree; also node chain */
91 struct case_node *parent; /* Parent of node in binary tree */
92 tree low; /* Lowest index value for this label */
93 tree high; /* Highest index value for this label */
94 tree code_label; /* Label to jump to when node matches */
95 int balance;
98 typedef struct case_node case_node;
99 typedef struct case_node *case_node_ptr;
101 /* These are used by estimate_case_costs and balance_case_nodes. */
103 /* This must be a signed type, and non-ANSI compilers lack signed char. */
104 static short cost_table_[129];
105 static int use_cost_table;
106 static int cost_table_initialized;
108 /* Special care is needed because we allow -1, but TREE_INT_CST_LOW
109 is unsigned. */
110 #define COST_TABLE(I) cost_table_[(unsigned HOST_WIDE_INT) ((I) + 1)]
112 /* Stack of control and binding constructs we are currently inside.
114 These constructs begin when you call `expand_start_WHATEVER'
115 and end when you call `expand_end_WHATEVER'. This stack records
116 info about how the construct began that tells the end-function
117 what to do. It also may provide information about the construct
118 to alter the behavior of other constructs within the body.
119 For example, they may affect the behavior of C `break' and `continue'.
121 Each construct gets one `struct nesting' object.
122 All of these objects are chained through the `all' field.
123 `nesting_stack' points to the first object (innermost construct).
124 The position of an entry on `nesting_stack' is in its `depth' field.
126 Each type of construct has its own individual stack.
127 For example, loops have `cond_stack'. Each object points to the
128 next object of the same type through the `next' field.
130 Some constructs are visible to `break' exit-statements and others
131 are not. Which constructs are visible depends on the language.
132 Therefore, the data structure allows each construct to be visible
133 or not, according to the args given when the construct is started.
134 The construct is visible if the `exit_label' field is non-null.
135 In that case, the value should be a CODE_LABEL rtx. */
137 struct nesting GTY(())
139 struct nesting *all;
140 struct nesting *next;
141 int depth;
142 rtx exit_label;
143 enum nesting_desc {
144 COND_NESTING,
145 BLOCK_NESTING,
146 CASE_NESTING
147 } desc;
148 union nesting_u
150 /* For conds (if-then and if-then-else statements). */
151 struct nesting_cond
153 /* Label for the end of the if construct.
154 There is none if EXITFLAG was not set
155 and no `else' has been seen yet. */
156 rtx endif_label;
157 /* Label for the end of this alternative.
158 This may be the end of the if or the next else/elseif. */
159 rtx next_label;
160 } GTY ((tag ("COND_NESTING"))) cond;
161 /* For variable binding contours. */
162 struct nesting_block
164 /* Sequence number of this binding contour within the function,
165 in order of entry. */
166 int block_start_count;
167 /* The NOTE that starts this contour.
168 Used by expand_goto to check whether the destination
169 is within each contour or not. */
170 rtx first_insn;
171 /* The saved target_temp_slot_level from our outer block.
172 We may reset target_temp_slot_level to be the level of
173 this block, if that is done, target_temp_slot_level
174 reverts to the saved target_temp_slot_level at the very
175 end of the block. */
176 int block_target_temp_slot_level;
177 } GTY ((tag ("BLOCK_NESTING"))) block;
178 /* For switch (C) or case (Pascal) statements. */
179 struct nesting_case
181 /* The insn after which the case dispatch should finally
182 be emitted. Zero for a dummy. */
183 rtx start;
184 /* A list of case labels; it is first built as an AVL tree.
185 During expand_end_case, this is converted to a list, and may be
186 rearranged into a nearly balanced binary tree. */
187 struct case_node *case_list;
188 /* Label to jump to if no case matches. */
189 tree default_label;
190 /* The expression to be dispatched on. */
191 tree index_expr;
192 } GTY ((tag ("CASE_NESTING"))) case_stmt;
193 } GTY ((desc ("%1.desc"))) data;
196 /* Allocate and return a new `struct nesting'. */
198 #define ALLOC_NESTING() ggc_alloc (sizeof (struct nesting))
200 /* Pop the nesting stack element by element until we pop off
201 the element which is at the top of STACK.
202 Update all the other stacks, popping off elements from them
203 as we pop them from nesting_stack. */
205 #define POPSTACK(STACK) \
206 do { struct nesting *target = STACK; \
207 struct nesting *this; \
208 do { this = nesting_stack; \
209 if (cond_stack == this) \
210 cond_stack = cond_stack->next; \
211 if (block_stack == this) \
212 block_stack = block_stack->next; \
213 if (case_stack == this) \
214 case_stack = case_stack->next; \
215 nesting_depth = nesting_stack->depth - 1; \
216 nesting_stack = this->all; } \
217 while (this != target); } while (0)
220 struct stmt_status GTY(())
222 /* Chain of all pending binding contours. */
223 struct nesting * x_block_stack;
225 /* If any new stacks are added here, add them to POPSTACKS too. */
227 /* Chain of all pending conditional statements. */
228 struct nesting * x_cond_stack;
230 /* Chain of all pending case or switch statements. */
231 struct nesting * x_case_stack;
233 /* Separate chain including all of the above,
234 chained through the `all' field. */
235 struct nesting * x_nesting_stack;
237 /* Number of entries on nesting_stack now. */
238 int x_nesting_depth;
240 /* Number of binding contours started so far in this function. */
241 int x_block_start_count;
243 /* Location of last line-number note, whether we actually
244 emitted it or not. */
245 location_t x_emit_locus;
248 #define block_stack (cfun->stmt->x_block_stack)
249 #define cond_stack (cfun->stmt->x_cond_stack)
250 #define case_stack (cfun->stmt->x_case_stack)
251 #define nesting_stack (cfun->stmt->x_nesting_stack)
252 #define nesting_depth (cfun->stmt->x_nesting_depth)
253 #define current_block_start_count (cfun->stmt->x_block_start_count)
254 #define emit_locus (cfun->stmt->x_emit_locus)
256 static int n_occurrences (int, const char *);
257 static bool decl_conflicts_with_clobbers_p (tree, const HARD_REG_SET);
258 static void expand_nl_goto_receiver (void);
259 static bool check_operand_nalternatives (tree, tree);
260 static bool check_unique_operand_names (tree, tree);
261 static char *resolve_operand_name_1 (char *, tree, tree);
262 static void expand_null_return_1 (void);
263 static enum br_predictor return_prediction (rtx);
264 static rtx shift_return_value (rtx);
265 static void expand_value_return (rtx);
266 static void do_jump_if_equal (rtx, rtx, rtx, int);
267 static int estimate_case_costs (case_node_ptr);
268 static bool same_case_target_p (rtx, rtx);
269 static bool lshift_cheap_p (void);
270 static int case_bit_test_cmp (const void *, const void *);
271 static void emit_case_bit_tests (tree, tree, tree, tree, case_node_ptr, rtx);
272 static void balance_case_nodes (case_node_ptr *, case_node_ptr);
273 static int node_has_low_bound (case_node_ptr, tree);
274 static int node_has_high_bound (case_node_ptr, tree);
275 static int node_is_bounded (case_node_ptr, tree);
276 static void emit_case_nodes (rtx, case_node_ptr, rtx, tree);
277 static struct case_node *case_tree2list (case_node *, case_node *);
279 void
280 init_stmt_for_function (void)
282 cfun->stmt = ggc_alloc_cleared (sizeof (struct stmt_status));
285 /* Record the current file and line. Called from emit_line_note. */
287 void
288 set_file_and_line_for_stmt (location_t location)
290 /* If we're outputting an inline function, and we add a line note,
291 there may be no CFUN->STMT information. So, there's no need to
292 update it. */
293 if (cfun->stmt)
294 emit_locus = location;
297 /* Emit a no-op instruction. */
299 void
300 emit_nop (void)
302 rtx last_insn;
304 last_insn = get_last_insn ();
305 if (!optimize
306 && (LABEL_P (last_insn)
307 || (NOTE_P (last_insn)
308 && prev_real_insn (last_insn) == 0)))
309 emit_insn (gen_nop ());
312 /* Return the rtx-label that corresponds to a LABEL_DECL,
313 creating it if necessary. */
316 label_rtx (tree label)
318 if (TREE_CODE (label) != LABEL_DECL)
319 abort ();
321 if (!DECL_RTL_SET_P (label))
323 rtx r = gen_label_rtx ();
324 SET_DECL_RTL (label, r);
325 if (FORCED_LABEL (label) || DECL_NONLOCAL (label))
326 LABEL_PRESERVE_P (r) = 1;
329 return DECL_RTL (label);
332 /* As above, but also put it on the forced-reference list of the
333 function that contains it. */
335 force_label_rtx (tree label)
337 rtx ref = label_rtx (label);
338 tree function = decl_function_context (label);
339 struct function *p;
341 if (!function)
342 abort ();
344 if (function != current_function_decl)
345 p = find_function_data (function);
346 else
347 p = cfun;
349 p->expr->x_forced_labels = gen_rtx_EXPR_LIST (VOIDmode, ref,
350 p->expr->x_forced_labels);
351 return ref;
354 /* Add an unconditional jump to LABEL as the next sequential instruction. */
356 void
357 emit_jump (rtx label)
359 do_pending_stack_adjust ();
360 emit_jump_insn (gen_jump (label));
361 emit_barrier ();
364 /* Emit code to jump to the address
365 specified by the pointer expression EXP. */
367 void
368 expand_computed_goto (tree exp)
370 rtx x = expand_expr (exp, NULL_RTX, VOIDmode, 0);
372 x = convert_memory_address (Pmode, x);
374 do_pending_stack_adjust ();
375 emit_indirect_jump (x);
378 /* Handle goto statements and the labels that they can go to. */
380 /* Specify the location in the RTL code of a label LABEL,
381 which is a LABEL_DECL tree node.
383 This is used for the kind of label that the user can jump to with a
384 goto statement, and for alternatives of a switch or case statement.
385 RTL labels generated for loops and conditionals don't go through here;
386 they are generated directly at the RTL level, by other functions below.
388 Note that this has nothing to do with defining label *names*.
389 Languages vary in how they do that and what that even means. */
391 void
392 expand_label (tree label)
394 rtx label_r = label_rtx (label);
396 do_pending_stack_adjust ();
397 emit_label (label_r);
398 if (DECL_NAME (label))
399 LABEL_NAME (DECL_RTL (label)) = IDENTIFIER_POINTER (DECL_NAME (label));
401 if (DECL_NONLOCAL (label))
403 expand_nl_goto_receiver ();
404 nonlocal_goto_handler_labels
405 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
406 nonlocal_goto_handler_labels);
409 if (FORCED_LABEL (label))
410 forced_labels = gen_rtx_EXPR_LIST (VOIDmode, label_r, forced_labels);
412 if (DECL_NONLOCAL (label) || FORCED_LABEL (label))
413 maybe_set_first_label_num (label_r);
416 /* Generate RTL code for a `goto' statement with target label LABEL.
417 LABEL should be a LABEL_DECL tree node that was or will later be
418 defined with `expand_label'. */
420 void
421 expand_goto (tree label)
423 #ifdef ENABLE_CHECKING
424 /* Check for a nonlocal goto to a containing function. Should have
425 gotten translated to __builtin_nonlocal_goto. */
426 tree context = decl_function_context (label);
427 if (context != 0 && context != current_function_decl)
428 abort ();
429 #endif
431 emit_jump (label_rtx (label));
434 /* Return the number of times character C occurs in string S. */
435 static int
436 n_occurrences (int c, const char *s)
438 int n = 0;
439 while (*s)
440 n += (*s++ == c);
441 return n;
444 /* Generate RTL for an asm statement (explicit assembler code).
445 STRING is a STRING_CST node containing the assembler code text,
446 or an ADDR_EXPR containing a STRING_CST. VOL nonzero means the
447 insn is volatile; don't optimize it. */
449 void
450 expand_asm (tree string, int vol)
452 rtx body;
454 if (TREE_CODE (string) == ADDR_EXPR)
455 string = TREE_OPERAND (string, 0);
457 body = gen_rtx_ASM_INPUT (VOIDmode, TREE_STRING_POINTER (string));
459 MEM_VOLATILE_P (body) = vol;
461 emit_insn (body);
464 /* Parse the output constraint pointed to by *CONSTRAINT_P. It is the
465 OPERAND_NUMth output operand, indexed from zero. There are NINPUTS
466 inputs and NOUTPUTS outputs to this extended-asm. Upon return,
467 *ALLOWS_MEM will be TRUE iff the constraint allows the use of a
468 memory operand. Similarly, *ALLOWS_REG will be TRUE iff the
469 constraint allows the use of a register operand. And, *IS_INOUT
470 will be true if the operand is read-write, i.e., if it is used as
471 an input as well as an output. If *CONSTRAINT_P is not in
472 canonical form, it will be made canonical. (Note that `+' will be
473 replaced with `=' as part of this process.)
475 Returns TRUE if all went well; FALSE if an error occurred. */
477 bool
478 parse_output_constraint (const char **constraint_p, int operand_num,
479 int ninputs, int noutputs, bool *allows_mem,
480 bool *allows_reg, bool *is_inout)
482 const char *constraint = *constraint_p;
483 const char *p;
485 /* Assume the constraint doesn't allow the use of either a register
486 or memory. */
487 *allows_mem = false;
488 *allows_reg = false;
490 /* Allow the `=' or `+' to not be at the beginning of the string,
491 since it wasn't explicitly documented that way, and there is a
492 large body of code that puts it last. Swap the character to
493 the front, so as not to uglify any place else. */
494 p = strchr (constraint, '=');
495 if (!p)
496 p = strchr (constraint, '+');
498 /* If the string doesn't contain an `=', issue an error
499 message. */
500 if (!p)
502 error ("output operand constraint lacks `='");
503 return false;
506 /* If the constraint begins with `+', then the operand is both read
507 from and written to. */
508 *is_inout = (*p == '+');
510 /* Canonicalize the output constraint so that it begins with `='. */
511 if (p != constraint || is_inout)
513 char *buf;
514 size_t c_len = strlen (constraint);
516 if (p != constraint)
517 warning ("output constraint `%c' for operand %d is not at the beginning",
518 *p, operand_num);
520 /* Make a copy of the constraint. */
521 buf = alloca (c_len + 1);
522 strcpy (buf, constraint);
523 /* Swap the first character and the `=' or `+'. */
524 buf[p - constraint] = buf[0];
525 /* Make sure the first character is an `='. (Until we do this,
526 it might be a `+'.) */
527 buf[0] = '=';
528 /* Replace the constraint with the canonicalized string. */
529 *constraint_p = ggc_alloc_string (buf, c_len);
530 constraint = *constraint_p;
533 /* Loop through the constraint string. */
534 for (p = constraint + 1; *p; p += CONSTRAINT_LEN (*p, p))
535 switch (*p)
537 case '+':
538 case '=':
539 error ("operand constraint contains incorrectly positioned '+' or '='");
540 return false;
542 case '%':
543 if (operand_num + 1 == ninputs + noutputs)
545 error ("`%%' constraint used with last operand");
546 return false;
548 break;
550 case 'V': case 'm': case 'o':
551 *allows_mem = true;
552 break;
554 case '?': case '!': case '*': case '&': case '#':
555 case 'E': case 'F': case 'G': case 'H':
556 case 's': case 'i': case 'n':
557 case 'I': case 'J': case 'K': case 'L': case 'M':
558 case 'N': case 'O': case 'P': case ',':
559 break;
561 case '0': case '1': case '2': case '3': case '4':
562 case '5': case '6': case '7': case '8': case '9':
563 case '[':
564 error ("matching constraint not valid in output operand");
565 return false;
567 case '<': case '>':
568 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
569 excepting those that expand_call created. So match memory
570 and hope. */
571 *allows_mem = true;
572 break;
574 case 'g': case 'X':
575 *allows_reg = true;
576 *allows_mem = true;
577 break;
579 case 'p': case 'r':
580 *allows_reg = true;
581 break;
583 default:
584 if (!ISALPHA (*p))
585 break;
586 if (REG_CLASS_FROM_CONSTRAINT (*p, p) != NO_REGS)
587 *allows_reg = true;
588 #ifdef EXTRA_CONSTRAINT_STR
589 else if (EXTRA_ADDRESS_CONSTRAINT (*p, p))
590 *allows_reg = true;
591 else if (EXTRA_MEMORY_CONSTRAINT (*p, p))
592 *allows_mem = true;
593 else
595 /* Otherwise we can't assume anything about the nature of
596 the constraint except that it isn't purely registers.
597 Treat it like "g" and hope for the best. */
598 *allows_reg = true;
599 *allows_mem = true;
601 #endif
602 break;
605 return true;
608 /* Similar, but for input constraints. */
610 bool
611 parse_input_constraint (const char **constraint_p, int input_num,
612 int ninputs, int noutputs, int ninout,
613 const char * const * constraints,
614 bool *allows_mem, bool *allows_reg)
616 const char *constraint = *constraint_p;
617 const char *orig_constraint = constraint;
618 size_t c_len = strlen (constraint);
619 size_t j;
620 bool saw_match = false;
622 /* Assume the constraint doesn't allow the use of either
623 a register or memory. */
624 *allows_mem = false;
625 *allows_reg = false;
627 /* Make sure constraint has neither `=', `+', nor '&'. */
629 for (j = 0; j < c_len; j += CONSTRAINT_LEN (constraint[j], constraint+j))
630 switch (constraint[j])
632 case '+': case '=': case '&':
633 if (constraint == orig_constraint)
635 error ("input operand constraint contains `%c'", constraint[j]);
636 return false;
638 break;
640 case '%':
641 if (constraint == orig_constraint
642 && input_num + 1 == ninputs - ninout)
644 error ("`%%' constraint used with last operand");
645 return false;
647 break;
649 case 'V': case 'm': case 'o':
650 *allows_mem = true;
651 break;
653 case '<': case '>':
654 case '?': case '!': case '*': case '#':
655 case 'E': case 'F': case 'G': case 'H':
656 case 's': case 'i': case 'n':
657 case 'I': case 'J': case 'K': case 'L': case 'M':
658 case 'N': case 'O': case 'P': case ',':
659 break;
661 /* Whether or not a numeric constraint allows a register is
662 decided by the matching constraint, and so there is no need
663 to do anything special with them. We must handle them in
664 the default case, so that we don't unnecessarily force
665 operands to memory. */
666 case '0': case '1': case '2': case '3': case '4':
667 case '5': case '6': case '7': case '8': case '9':
669 char *end;
670 unsigned long match;
672 saw_match = true;
674 match = strtoul (constraint + j, &end, 10);
675 if (match >= (unsigned long) noutputs)
677 error ("matching constraint references invalid operand number");
678 return false;
681 /* Try and find the real constraint for this dup. Only do this
682 if the matching constraint is the only alternative. */
683 if (*end == '\0'
684 && (j == 0 || (j == 1 && constraint[0] == '%')))
686 constraint = constraints[match];
687 *constraint_p = constraint;
688 c_len = strlen (constraint);
689 j = 0;
690 /* ??? At the end of the loop, we will skip the first part of
691 the matched constraint. This assumes not only that the
692 other constraint is an output constraint, but also that
693 the '=' or '+' come first. */
694 break;
696 else
697 j = end - constraint;
698 /* Anticipate increment at end of loop. */
699 j--;
701 /* Fall through. */
703 case 'p': case 'r':
704 *allows_reg = true;
705 break;
707 case 'g': case 'X':
708 *allows_reg = true;
709 *allows_mem = true;
710 break;
712 default:
713 if (! ISALPHA (constraint[j]))
715 error ("invalid punctuation `%c' in constraint", constraint[j]);
716 return false;
718 if (REG_CLASS_FROM_CONSTRAINT (constraint[j], constraint + j)
719 != NO_REGS)
720 *allows_reg = true;
721 #ifdef EXTRA_CONSTRAINT_STR
722 else if (EXTRA_ADDRESS_CONSTRAINT (constraint[j], constraint + j))
723 *allows_reg = true;
724 else if (EXTRA_MEMORY_CONSTRAINT (constraint[j], constraint + j))
725 *allows_mem = true;
726 else
728 /* Otherwise we can't assume anything about the nature of
729 the constraint except that it isn't purely registers.
730 Treat it like "g" and hope for the best. */
731 *allows_reg = true;
732 *allows_mem = true;
734 #endif
735 break;
738 if (saw_match && !*allows_reg)
739 warning ("matching constraint does not allow a register");
741 return true;
744 /* INPUT is one of the input operands from EXPR, an ASM_EXPR. Returns true
745 if it is an operand which must be passed in memory (i.e. an "m"
746 constraint), false otherwise. */
748 bool
749 asm_op_is_mem_input (tree input, tree expr)
751 const char *constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (input)));
752 tree outputs = ASM_OUTPUTS (expr);
753 int noutputs = list_length (outputs);
754 const char **constraints
755 = (const char **) alloca ((noutputs) * sizeof (const char *));
756 int i = 0;
757 bool allows_mem, allows_reg;
758 tree t;
760 /* Collect output constraints. */
761 for (t = outputs; t ; t = TREE_CHAIN (t), i++)
762 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
764 /* We pass 0 for input_num, ninputs and ninout; they are only used for
765 error checking which will be done at expand time. */
766 parse_input_constraint (&constraint, 0, 0, noutputs, 0, constraints,
767 &allows_mem, &allows_reg);
768 return (!allows_reg && allows_mem);
771 /* Check for overlap between registers marked in CLOBBERED_REGS and
772 anything inappropriate in DECL. Emit error and return TRUE for error,
773 FALSE for ok. */
775 static bool
776 decl_conflicts_with_clobbers_p (tree decl, const HARD_REG_SET clobbered_regs)
778 /* Conflicts between asm-declared register variables and the clobber
779 list are not allowed. */
780 if ((TREE_CODE (decl) == VAR_DECL || TREE_CODE (decl) == PARM_DECL)
781 && DECL_REGISTER (decl)
782 && REG_P (DECL_RTL (decl))
783 && REGNO (DECL_RTL (decl)) < FIRST_PSEUDO_REGISTER)
785 rtx reg = DECL_RTL (decl);
786 unsigned int regno;
788 for (regno = REGNO (reg);
789 regno < (REGNO (reg)
790 + hard_regno_nregs[REGNO (reg)][GET_MODE (reg)]);
791 regno++)
792 if (TEST_HARD_REG_BIT (clobbered_regs, regno))
794 error ("asm-specifier for variable `%s' conflicts with asm clobber list",
795 IDENTIFIER_POINTER (DECL_NAME (decl)));
797 /* Reset registerness to stop multiple errors emitted for a
798 single variable. */
799 DECL_REGISTER (decl) = 0;
800 return true;
803 return false;
806 /* Generate RTL for an asm statement with arguments.
807 STRING is the instruction template.
808 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
809 Each output or input has an expression in the TREE_VALUE and
810 and a tree list in TREE_PURPOSE which in turn contains a constraint
811 name in TREE_VALUE (or NULL_TREE) and a constraint string
812 in TREE_PURPOSE.
813 CLOBBERS is a list of STRING_CST nodes each naming a hard register
814 that is clobbered by this insn.
816 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
817 Some elements of OUTPUTS may be replaced with trees representing temporary
818 values. The caller should copy those temporary values to the originally
819 specified lvalues.
821 VOL nonzero means the insn is volatile; don't optimize it. */
823 void
824 expand_asm_operands (tree string, tree outputs, tree inputs,
825 tree clobbers, int vol, location_t locus)
827 rtvec argvec, constraintvec;
828 rtx body;
829 int ninputs = list_length (inputs);
830 int noutputs = list_length (outputs);
831 int ninout;
832 int nclobbers;
833 HARD_REG_SET clobbered_regs;
834 int clobber_conflict_found = 0;
835 tree tail;
836 tree t;
837 int i;
838 /* Vector of RTX's of evaluated output operands. */
839 rtx *output_rtx = alloca (noutputs * sizeof (rtx));
840 int *inout_opnum = alloca (noutputs * sizeof (int));
841 rtx *real_output_rtx = alloca (noutputs * sizeof (rtx));
842 enum machine_mode *inout_mode
843 = alloca (noutputs * sizeof (enum machine_mode));
844 const char **constraints
845 = alloca ((noutputs + ninputs) * sizeof (const char *));
846 int old_generating_concat_p = generating_concat_p;
848 /* An ASM with no outputs needs to be treated as volatile, for now. */
849 if (noutputs == 0)
850 vol = 1;
852 if (! check_operand_nalternatives (outputs, inputs))
853 return;
855 string = resolve_asm_operand_names (string, outputs, inputs);
857 /* Collect constraints. */
858 i = 0;
859 for (t = outputs; t ; t = TREE_CHAIN (t), i++)
860 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
861 for (t = inputs; t ; t = TREE_CHAIN (t), i++)
862 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
864 /* Sometimes we wish to automatically clobber registers across an asm.
865 Case in point is when the i386 backend moved from cc0 to a hard reg --
866 maintaining source-level compatibility means automatically clobbering
867 the flags register. */
868 clobbers = targetm.md_asm_clobbers (clobbers);
870 /* Count the number of meaningful clobbered registers, ignoring what
871 we would ignore later. */
872 nclobbers = 0;
873 CLEAR_HARD_REG_SET (clobbered_regs);
874 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
876 const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
878 i = decode_reg_name (regname);
879 if (i >= 0 || i == -4)
880 ++nclobbers;
881 else if (i == -2)
882 error ("unknown register name `%s' in `asm'", regname);
884 /* Mark clobbered registers. */
885 if (i >= 0)
887 /* Clobbering the PIC register is an error */
888 if (i == (int) PIC_OFFSET_TABLE_REGNUM)
890 error ("PIC register `%s' clobbered in `asm'", regname);
891 return;
894 SET_HARD_REG_BIT (clobbered_regs, i);
898 /* First pass over inputs and outputs checks validity and sets
899 mark_addressable if needed. */
901 ninout = 0;
902 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
904 tree val = TREE_VALUE (tail);
905 tree type = TREE_TYPE (val);
906 const char *constraint;
907 bool is_inout;
908 bool allows_reg;
909 bool allows_mem;
911 /* If there's an erroneous arg, emit no insn. */
912 if (type == error_mark_node)
913 return;
915 /* Try to parse the output constraint. If that fails, there's
916 no point in going further. */
917 constraint = constraints[i];
918 if (!parse_output_constraint (&constraint, i, ninputs, noutputs,
919 &allows_mem, &allows_reg, &is_inout))
920 return;
922 if (! allows_reg
923 && (allows_mem
924 || is_inout
925 || (DECL_P (val)
926 && REG_P (DECL_RTL (val))
927 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type))))
928 lang_hooks.mark_addressable (val);
930 if (is_inout)
931 ninout++;
934 ninputs += ninout;
935 if (ninputs + noutputs > MAX_RECOG_OPERANDS)
937 error ("more than %d operands in `asm'", MAX_RECOG_OPERANDS);
938 return;
941 for (i = 0, tail = inputs; tail; i++, tail = TREE_CHAIN (tail))
943 bool allows_reg, allows_mem;
944 const char *constraint;
946 /* If there's an erroneous arg, emit no insn, because the ASM_INPUT
947 would get VOIDmode and that could cause a crash in reload. */
948 if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
949 return;
951 constraint = constraints[i + noutputs];
952 if (! parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
953 constraints, &allows_mem, &allows_reg))
954 return;
956 if (! allows_reg && allows_mem)
957 lang_hooks.mark_addressable (TREE_VALUE (tail));
960 /* Second pass evaluates arguments. */
962 ninout = 0;
963 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
965 tree val = TREE_VALUE (tail);
966 tree type = TREE_TYPE (val);
967 bool is_inout;
968 bool allows_reg;
969 bool allows_mem;
970 rtx op;
972 if (!parse_output_constraint (&constraints[i], i, ninputs,
973 noutputs, &allows_mem, &allows_reg,
974 &is_inout))
975 abort ();
977 /* If an output operand is not a decl or indirect ref and our constraint
978 allows a register, make a temporary to act as an intermediate.
979 Make the asm insn write into that, then our caller will copy it to
980 the real output operand. Likewise for promoted variables. */
982 generating_concat_p = 0;
984 real_output_rtx[i] = NULL_RTX;
985 if ((TREE_CODE (val) == INDIRECT_REF
986 && allows_mem)
987 || (DECL_P (val)
988 && (allows_mem || REG_P (DECL_RTL (val)))
989 && ! (REG_P (DECL_RTL (val))
990 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
991 || ! allows_reg
992 || is_inout)
994 op = expand_expr (val, NULL_RTX, VOIDmode, EXPAND_WRITE);
995 if (MEM_P (op))
996 op = validize_mem (op);
998 if (! allows_reg && !MEM_P (op))
999 error ("output number %d not directly addressable", i);
1000 if ((! allows_mem && MEM_P (op))
1001 || GET_CODE (op) == CONCAT)
1003 real_output_rtx[i] = op;
1004 op = gen_reg_rtx (GET_MODE (op));
1005 if (is_inout)
1006 emit_move_insn (op, real_output_rtx[i]);
1009 else
1011 op = assign_temp (type, 0, 0, 1);
1012 op = validize_mem (op);
1013 TREE_VALUE (tail) = make_tree (type, op);
1015 output_rtx[i] = op;
1017 generating_concat_p = old_generating_concat_p;
1019 if (is_inout)
1021 inout_mode[ninout] = TYPE_MODE (type);
1022 inout_opnum[ninout++] = i;
1025 if (decl_conflicts_with_clobbers_p (val, clobbered_regs))
1026 clobber_conflict_found = 1;
1029 /* Make vectors for the expression-rtx, constraint strings,
1030 and named operands. */
1032 argvec = rtvec_alloc (ninputs);
1033 constraintvec = rtvec_alloc (ninputs);
1035 body = gen_rtx_ASM_OPERANDS ((noutputs == 0 ? VOIDmode
1036 : GET_MODE (output_rtx[0])),
1037 TREE_STRING_POINTER (string),
1038 empty_string, 0, argvec, constraintvec,
1039 locus);
1041 MEM_VOLATILE_P (body) = vol;
1043 /* Eval the inputs and put them into ARGVEC.
1044 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
1046 for (i = 0, tail = inputs; tail; tail = TREE_CHAIN (tail), ++i)
1048 bool allows_reg, allows_mem;
1049 const char *constraint;
1050 tree val, type;
1051 rtx op;
1053 constraint = constraints[i + noutputs];
1054 if (! parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
1055 constraints, &allows_mem, &allows_reg))
1056 abort ();
1058 generating_concat_p = 0;
1060 val = TREE_VALUE (tail);
1061 type = TREE_TYPE (val);
1062 op = expand_expr (val, NULL_RTX, VOIDmode,
1063 (allows_mem && !allows_reg
1064 ? EXPAND_MEMORY : EXPAND_NORMAL));
1066 /* Never pass a CONCAT to an ASM. */
1067 if (GET_CODE (op) == CONCAT)
1068 op = force_reg (GET_MODE (op), op);
1069 else if (MEM_P (op))
1070 op = validize_mem (op);
1072 if (asm_operand_ok (op, constraint) <= 0)
1074 if (allows_reg)
1075 op = force_reg (TYPE_MODE (type), op);
1076 else if (!allows_mem)
1077 warning ("asm operand %d probably doesn't match constraints",
1078 i + noutputs);
1079 else if (MEM_P (op))
1081 /* We won't recognize either volatile memory or memory
1082 with a queued address as available a memory_operand
1083 at this point. Ignore it: clearly this *is* a memory. */
1085 else
1087 warning ("use of memory input without lvalue in "
1088 "asm operand %d is deprecated", i + noutputs);
1090 if (CONSTANT_P (op))
1092 rtx mem = force_const_mem (TYPE_MODE (type), op);
1093 if (mem)
1094 op = validize_mem (mem);
1095 else
1096 op = force_reg (TYPE_MODE (type), op);
1098 if (REG_P (op)
1099 || GET_CODE (op) == SUBREG
1100 || GET_CODE (op) == CONCAT)
1102 tree qual_type = build_qualified_type (type,
1103 (TYPE_QUALS (type)
1104 | TYPE_QUAL_CONST));
1105 rtx memloc = assign_temp (qual_type, 1, 1, 1);
1106 memloc = validize_mem (memloc);
1107 emit_move_insn (memloc, op);
1108 op = memloc;
1113 generating_concat_p = old_generating_concat_p;
1114 ASM_OPERANDS_INPUT (body, i) = op;
1116 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, i)
1117 = gen_rtx_ASM_INPUT (TYPE_MODE (type), constraints[i + noutputs]);
1119 if (decl_conflicts_with_clobbers_p (val, clobbered_regs))
1120 clobber_conflict_found = 1;
1123 /* Protect all the operands from the queue now that they have all been
1124 evaluated. */
1126 generating_concat_p = 0;
1128 /* For in-out operands, copy output rtx to input rtx. */
1129 for (i = 0; i < ninout; i++)
1131 int j = inout_opnum[i];
1132 char buffer[16];
1134 ASM_OPERANDS_INPUT (body, ninputs - ninout + i)
1135 = output_rtx[j];
1137 sprintf (buffer, "%d", j);
1138 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, ninputs - ninout + i)
1139 = gen_rtx_ASM_INPUT (inout_mode[i], ggc_strdup (buffer));
1142 generating_concat_p = old_generating_concat_p;
1144 /* Now, for each output, construct an rtx
1145 (set OUTPUT (asm_operands INSN OUTPUTCONSTRAINT OUTPUTNUMBER
1146 ARGVEC CONSTRAINTS OPNAMES))
1147 If there is more than one, put them inside a PARALLEL. */
1149 if (noutputs == 1 && nclobbers == 0)
1151 ASM_OPERANDS_OUTPUT_CONSTRAINT (body) = constraints[0];
1152 emit_insn (gen_rtx_SET (VOIDmode, output_rtx[0], body));
1155 else if (noutputs == 0 && nclobbers == 0)
1157 /* No output operands: put in a raw ASM_OPERANDS rtx. */
1158 emit_insn (body);
1161 else
1163 rtx obody = body;
1164 int num = noutputs;
1166 if (num == 0)
1167 num = 1;
1169 body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
1171 /* For each output operand, store a SET. */
1172 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1174 XVECEXP (body, 0, i)
1175 = gen_rtx_SET (VOIDmode,
1176 output_rtx[i],
1177 gen_rtx_ASM_OPERANDS
1178 (GET_MODE (output_rtx[i]),
1179 TREE_STRING_POINTER (string),
1180 constraints[i], i, argvec, constraintvec,
1181 locus));
1183 MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
1186 /* If there are no outputs (but there are some clobbers)
1187 store the bare ASM_OPERANDS into the PARALLEL. */
1189 if (i == 0)
1190 XVECEXP (body, 0, i++) = obody;
1192 /* Store (clobber REG) for each clobbered register specified. */
1194 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1196 const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1197 int j = decode_reg_name (regname);
1198 rtx clobbered_reg;
1200 if (j < 0)
1202 if (j == -3) /* `cc', which is not a register */
1203 continue;
1205 if (j == -4) /* `memory', don't cache memory across asm */
1207 XVECEXP (body, 0, i++)
1208 = gen_rtx_CLOBBER (VOIDmode,
1209 gen_rtx_MEM
1210 (BLKmode,
1211 gen_rtx_SCRATCH (VOIDmode)));
1212 continue;
1215 /* Ignore unknown register, error already signaled. */
1216 continue;
1219 /* Use QImode since that's guaranteed to clobber just one reg. */
1220 clobbered_reg = gen_rtx_REG (QImode, j);
1222 /* Do sanity check for overlap between clobbers and respectively
1223 input and outputs that hasn't been handled. Such overlap
1224 should have been detected and reported above. */
1225 if (!clobber_conflict_found)
1227 int opno;
1229 /* We test the old body (obody) contents to avoid tripping
1230 over the under-construction body. */
1231 for (opno = 0; opno < noutputs; opno++)
1232 if (reg_overlap_mentioned_p (clobbered_reg, output_rtx[opno]))
1233 internal_error ("asm clobber conflict with output operand");
1235 for (opno = 0; opno < ninputs - ninout; opno++)
1236 if (reg_overlap_mentioned_p (clobbered_reg,
1237 ASM_OPERANDS_INPUT (obody, opno)))
1238 internal_error ("asm clobber conflict with input operand");
1241 XVECEXP (body, 0, i++)
1242 = gen_rtx_CLOBBER (VOIDmode, clobbered_reg);
1245 emit_insn (body);
1248 /* For any outputs that needed reloading into registers, spill them
1249 back to where they belong. */
1250 for (i = 0; i < noutputs; ++i)
1251 if (real_output_rtx[i])
1252 emit_move_insn (real_output_rtx[i], output_rtx[i]);
1254 free_temp_slots ();
1257 void
1258 expand_asm_expr (tree exp)
1260 int noutputs, i;
1261 tree outputs, tail;
1262 tree *o;
1264 if (ASM_INPUT_P (exp))
1266 expand_asm (ASM_STRING (exp), ASM_VOLATILE_P (exp));
1267 return;
1270 outputs = ASM_OUTPUTS (exp);
1271 noutputs = list_length (outputs);
1272 /* o[I] is the place that output number I should be written. */
1273 o = (tree *) alloca (noutputs * sizeof (tree));
1275 /* Record the contents of OUTPUTS before it is modified. */
1276 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1277 o[i] = TREE_VALUE (tail);
1279 /* Generate the ASM_OPERANDS insn; store into the TREE_VALUEs of
1280 OUTPUTS some trees for where the values were actually stored. */
1281 expand_asm_operands (ASM_STRING (exp), outputs, ASM_INPUTS (exp),
1282 ASM_CLOBBERS (exp), ASM_VOLATILE_P (exp),
1283 input_location);
1285 /* Copy all the intermediate outputs into the specified outputs. */
1286 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1288 if (o[i] != TREE_VALUE (tail))
1290 expand_assignment (o[i], TREE_VALUE (tail), 0);
1291 free_temp_slots ();
1293 /* Restore the original value so that it's correct the next
1294 time we expand this function. */
1295 TREE_VALUE (tail) = o[i];
1300 /* A subroutine of expand_asm_operands. Check that all operands have
1301 the same number of alternatives. Return true if so. */
1303 static bool
1304 check_operand_nalternatives (tree outputs, tree inputs)
1306 if (outputs || inputs)
1308 tree tmp = TREE_PURPOSE (outputs ? outputs : inputs);
1309 int nalternatives
1310 = n_occurrences (',', TREE_STRING_POINTER (TREE_VALUE (tmp)));
1311 tree next = inputs;
1313 if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
1315 error ("too many alternatives in `asm'");
1316 return false;
1319 tmp = outputs;
1320 while (tmp)
1322 const char *constraint
1323 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (tmp)));
1325 if (n_occurrences (',', constraint) != nalternatives)
1327 error ("operand constraints for `asm' differ in number of alternatives");
1328 return false;
1331 if (TREE_CHAIN (tmp))
1332 tmp = TREE_CHAIN (tmp);
1333 else
1334 tmp = next, next = 0;
1338 return true;
1341 /* A subroutine of expand_asm_operands. Check that all operand names
1342 are unique. Return true if so. We rely on the fact that these names
1343 are identifiers, and so have been canonicalized by get_identifier,
1344 so all we need are pointer comparisons. */
1346 static bool
1347 check_unique_operand_names (tree outputs, tree inputs)
1349 tree i, j;
1351 for (i = outputs; i ; i = TREE_CHAIN (i))
1353 tree i_name = TREE_PURPOSE (TREE_PURPOSE (i));
1354 if (! i_name)
1355 continue;
1357 for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
1358 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1359 goto failure;
1362 for (i = inputs; i ; i = TREE_CHAIN (i))
1364 tree i_name = TREE_PURPOSE (TREE_PURPOSE (i));
1365 if (! i_name)
1366 continue;
1368 for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
1369 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1370 goto failure;
1371 for (j = outputs; j ; j = TREE_CHAIN (j))
1372 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1373 goto failure;
1376 return true;
1378 failure:
1379 error ("duplicate asm operand name '%s'",
1380 TREE_STRING_POINTER (TREE_PURPOSE (TREE_PURPOSE (i))));
1381 return false;
1384 /* A subroutine of expand_asm_operands. Resolve the names of the operands
1385 in *POUTPUTS and *PINPUTS to numbers, and replace the name expansions in
1386 STRING and in the constraints to those numbers. */
1388 tree
1389 resolve_asm_operand_names (tree string, tree outputs, tree inputs)
1391 char *buffer;
1392 char *p;
1393 const char *c;
1394 tree t;
1396 check_unique_operand_names (outputs, inputs);
1398 /* Substitute [<name>] in input constraint strings. There should be no
1399 named operands in output constraints. */
1400 for (t = inputs; t ; t = TREE_CHAIN (t))
1402 c = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
1403 if (strchr (c, '[') != NULL)
1405 p = buffer = xstrdup (c);
1406 while ((p = strchr (p, '[')) != NULL)
1407 p = resolve_operand_name_1 (p, outputs, inputs);
1408 TREE_VALUE (TREE_PURPOSE (t))
1409 = build_string (strlen (buffer), buffer);
1410 free (buffer);
1414 /* Now check for any needed substitutions in the template. */
1415 c = TREE_STRING_POINTER (string);
1416 while ((c = strchr (c, '%')) != NULL)
1418 if (c[1] == '[')
1419 break;
1420 else if (ISALPHA (c[1]) && c[2] == '[')
1421 break;
1422 else
1424 c += 1;
1425 continue;
1429 if (c)
1431 /* OK, we need to make a copy so we can perform the substitutions.
1432 Assume that we will not need extra space--we get to remove '['
1433 and ']', which means we cannot have a problem until we have more
1434 than 999 operands. */
1435 buffer = xstrdup (TREE_STRING_POINTER (string));
1436 p = buffer + (c - TREE_STRING_POINTER (string));
1438 while ((p = strchr (p, '%')) != NULL)
1440 if (p[1] == '[')
1441 p += 1;
1442 else if (ISALPHA (p[1]) && p[2] == '[')
1443 p += 2;
1444 else
1446 p += 1;
1447 continue;
1450 p = resolve_operand_name_1 (p, outputs, inputs);
1453 string = build_string (strlen (buffer), buffer);
1454 free (buffer);
1457 return string;
1460 /* A subroutine of resolve_operand_names. P points to the '[' for a
1461 potential named operand of the form [<name>]. In place, replace
1462 the name and brackets with a number. Return a pointer to the
1463 balance of the string after substitution. */
1465 static char *
1466 resolve_operand_name_1 (char *p, tree outputs, tree inputs)
1468 char *q;
1469 int op;
1470 tree t;
1471 size_t len;
1473 /* Collect the operand name. */
1474 q = strchr (p, ']');
1475 if (!q)
1477 error ("missing close brace for named operand");
1478 return strchr (p, '\0');
1480 len = q - p - 1;
1482 /* Resolve the name to a number. */
1483 for (op = 0, t = outputs; t ; t = TREE_CHAIN (t), op++)
1485 tree name = TREE_PURPOSE (TREE_PURPOSE (t));
1486 if (name)
1488 const char *c = TREE_STRING_POINTER (name);
1489 if (strncmp (c, p + 1, len) == 0 && c[len] == '\0')
1490 goto found;
1493 for (t = inputs; t ; t = TREE_CHAIN (t), op++)
1495 tree name = TREE_PURPOSE (TREE_PURPOSE (t));
1496 if (name)
1498 const char *c = TREE_STRING_POINTER (name);
1499 if (strncmp (c, p + 1, len) == 0 && c[len] == '\0')
1500 goto found;
1504 *q = '\0';
1505 error ("undefined named operand '%s'", p + 1);
1506 op = 0;
1507 found:
1509 /* Replace the name with the number. Unfortunately, not all libraries
1510 get the return value of sprintf correct, so search for the end of the
1511 generated string by hand. */
1512 sprintf (p, "%d", op);
1513 p = strchr (p, '\0');
1515 /* Verify the no extra buffer space assumption. */
1516 if (p > q)
1517 abort ();
1519 /* Shift the rest of the buffer down to fill the gap. */
1520 memmove (p, q + 1, strlen (q + 1) + 1);
1522 return p;
1525 /* Generate RTL to evaluate the expression EXP. */
1527 void
1528 expand_expr_stmt (tree exp)
1530 rtx value;
1531 tree type;
1533 value = expand_expr (exp, const0_rtx, VOIDmode, 0);
1534 type = TREE_TYPE (exp);
1536 /* If all we do is reference a volatile value in memory,
1537 copy it to a register to be sure it is actually touched. */
1538 if (value && MEM_P (value) && TREE_THIS_VOLATILE (exp))
1540 if (TYPE_MODE (type) == VOIDmode)
1542 else if (TYPE_MODE (type) != BLKmode)
1543 value = copy_to_reg (value);
1544 else
1546 rtx lab = gen_label_rtx ();
1548 /* Compare the value with itself to reference it. */
1549 emit_cmp_and_jump_insns (value, value, EQ,
1550 expand_expr (TYPE_SIZE (type),
1551 NULL_RTX, VOIDmode, 0),
1552 BLKmode, 0, lab);
1553 emit_label (lab);
1557 /* Free any temporaries used to evaluate this expression. */
1558 free_temp_slots ();
1561 /* Warn if EXP contains any computations whose results are not used.
1562 Return 1 if a warning is printed; 0 otherwise. LOCUS is the
1563 (potential) location of the expression. */
1566 warn_if_unused_value (tree exp, location_t locus)
1568 restart:
1569 if (TREE_USED (exp))
1570 return 0;
1572 /* Don't warn about void constructs. This includes casting to void,
1573 void function calls, and statement expressions with a final cast
1574 to void. */
1575 if (VOID_TYPE_P (TREE_TYPE (exp)))
1576 return 0;
1578 if (EXPR_HAS_LOCATION (exp))
1579 locus = EXPR_LOCATION (exp);
1581 switch (TREE_CODE (exp))
1583 case PREINCREMENT_EXPR:
1584 case POSTINCREMENT_EXPR:
1585 case PREDECREMENT_EXPR:
1586 case POSTDECREMENT_EXPR:
1587 case MODIFY_EXPR:
1588 case INIT_EXPR:
1589 case TARGET_EXPR:
1590 case CALL_EXPR:
1591 case TRY_CATCH_EXPR:
1592 case WITH_CLEANUP_EXPR:
1593 case EXIT_EXPR:
1594 return 0;
1596 case BIND_EXPR:
1597 /* For a binding, warn if no side effect within it. */
1598 exp = BIND_EXPR_BODY (exp);
1599 goto restart;
1601 case SAVE_EXPR:
1602 exp = TREE_OPERAND (exp, 0);
1603 goto restart;
1605 case TRUTH_ORIF_EXPR:
1606 case TRUTH_ANDIF_EXPR:
1607 /* In && or ||, warn if 2nd operand has no side effect. */
1608 exp = TREE_OPERAND (exp, 1);
1609 goto restart;
1611 case COMPOUND_EXPR:
1612 if (TREE_NO_WARNING (exp))
1613 return 0;
1614 if (warn_if_unused_value (TREE_OPERAND (exp, 0), locus))
1615 return 1;
1616 /* Let people do `(foo (), 0)' without a warning. */
1617 if (TREE_CONSTANT (TREE_OPERAND (exp, 1)))
1618 return 0;
1619 exp = TREE_OPERAND (exp, 1);
1620 goto restart;
1622 case NOP_EXPR:
1623 case CONVERT_EXPR:
1624 case NON_LVALUE_EXPR:
1625 /* Don't warn about conversions not explicit in the user's program. */
1626 if (TREE_NO_WARNING (exp))
1627 return 0;
1628 /* Assignment to a cast usually results in a cast of a modify.
1629 Don't complain about that. There can be an arbitrary number of
1630 casts before the modify, so we must loop until we find the first
1631 non-cast expression and then test to see if that is a modify. */
1633 tree tem = TREE_OPERAND (exp, 0);
1635 while (TREE_CODE (tem) == CONVERT_EXPR || TREE_CODE (tem) == NOP_EXPR)
1636 tem = TREE_OPERAND (tem, 0);
1638 if (TREE_CODE (tem) == MODIFY_EXPR || TREE_CODE (tem) == INIT_EXPR
1639 || TREE_CODE (tem) == CALL_EXPR)
1640 return 0;
1642 goto maybe_warn;
1644 case INDIRECT_REF:
1645 /* Don't warn about automatic dereferencing of references, since
1646 the user cannot control it. */
1647 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == REFERENCE_TYPE)
1649 exp = TREE_OPERAND (exp, 0);
1650 goto restart;
1652 /* Fall through. */
1654 default:
1655 /* Referencing a volatile value is a side effect, so don't warn. */
1656 if ((DECL_P (exp)
1657 || TREE_CODE_CLASS (TREE_CODE (exp)) == 'r')
1658 && TREE_THIS_VOLATILE (exp))
1659 return 0;
1661 /* If this is an expression which has no operands, there is no value
1662 to be unused. There are no such language-independent codes,
1663 but front ends may define such. */
1664 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'e'
1665 && TREE_CODE_LENGTH (TREE_CODE (exp)) == 0)
1666 return 0;
1668 maybe_warn:
1669 /* If this is an expression with side effects, don't warn. */
1670 if (TREE_SIDE_EFFECTS (exp))
1671 return 0;
1673 warning ("%Hvalue computed is not used", &locus);
1674 return 1;
1678 /* Generate RTL for the start of an if-then. COND is the expression
1679 whose truth should be tested.
1681 If EXITFLAG is nonzero, this conditional is visible to
1682 `exit_something'. */
1684 void
1685 expand_start_cond (tree cond, int exitflag)
1687 struct nesting *thiscond = ALLOC_NESTING ();
1689 /* Make an entry on cond_stack for the cond we are entering. */
1691 thiscond->desc = COND_NESTING;
1692 thiscond->next = cond_stack;
1693 thiscond->all = nesting_stack;
1694 thiscond->depth = ++nesting_depth;
1695 thiscond->data.cond.next_label = gen_label_rtx ();
1696 /* Before we encounter an `else', we don't need a separate exit label
1697 unless there are supposed to be exit statements
1698 to exit this conditional. */
1699 thiscond->exit_label = exitflag ? gen_label_rtx () : 0;
1700 thiscond->data.cond.endif_label = thiscond->exit_label;
1701 cond_stack = thiscond;
1702 nesting_stack = thiscond;
1704 do_jump (cond, thiscond->data.cond.next_label, NULL_RTX);
1707 /* Generate RTL between then-clause and the elseif-clause
1708 of an if-then-elseif-.... */
1710 void
1711 expand_start_elseif (tree cond)
1713 if (cond_stack->data.cond.endif_label == 0)
1714 cond_stack->data.cond.endif_label = gen_label_rtx ();
1715 emit_jump (cond_stack->data.cond.endif_label);
1716 emit_label (cond_stack->data.cond.next_label);
1717 cond_stack->data.cond.next_label = gen_label_rtx ();
1718 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
1721 /* Generate RTL between the then-clause and the else-clause
1722 of an if-then-else. */
1724 void
1725 expand_start_else (void)
1727 if (cond_stack->data.cond.endif_label == 0)
1728 cond_stack->data.cond.endif_label = gen_label_rtx ();
1730 emit_jump (cond_stack->data.cond.endif_label);
1731 emit_label (cond_stack->data.cond.next_label);
1732 cond_stack->data.cond.next_label = 0; /* No more _else or _elseif calls. */
1735 /* After calling expand_start_else, turn this "else" into an "else if"
1736 by providing another condition. */
1738 void
1739 expand_elseif (tree cond)
1741 cond_stack->data.cond.next_label = gen_label_rtx ();
1742 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
1745 /* Generate RTL for the end of an if-then.
1746 Pop the record for it off of cond_stack. */
1748 void
1749 expand_end_cond (void)
1751 struct nesting *thiscond = cond_stack;
1753 do_pending_stack_adjust ();
1754 if (thiscond->data.cond.next_label)
1755 emit_label (thiscond->data.cond.next_label);
1756 if (thiscond->data.cond.endif_label)
1757 emit_label (thiscond->data.cond.endif_label);
1759 POPSTACK (cond_stack);
1762 /* Return nonzero if we should preserve sub-expressions as separate
1763 pseudos. We never do so if we aren't optimizing. We always do so
1764 if -fexpensive-optimizations. */
1767 preserve_subexpressions_p (void)
1769 if (flag_expensive_optimizations)
1770 return 1;
1772 if (optimize == 0 || cfun == 0 || cfun->stmt == 0)
1773 return 0;
1775 return 1;
1779 /* Generate RTL to return from the current function, with no value.
1780 (That is, we do not do anything about returning any value.) */
1782 void
1783 expand_null_return (void)
1785 /* If this function was declared to return a value, but we
1786 didn't, clobber the return registers so that they are not
1787 propagated live to the rest of the function. */
1788 clobber_return_register ();
1790 expand_null_return_1 ();
1793 /* Generate RTL to return directly from the current function.
1794 (That is, we bypass any return value.) */
1796 void
1797 expand_naked_return (void)
1799 rtx end_label;
1801 clear_pending_stack_adjust ();
1802 do_pending_stack_adjust ();
1804 end_label = naked_return_label;
1805 if (end_label == 0)
1806 end_label = naked_return_label = gen_label_rtx ();
1808 emit_jump (end_label);
1811 /* Try to guess whether the value of return means error code. */
1812 static enum br_predictor
1813 return_prediction (rtx val)
1815 /* Different heuristics for pointers and scalars. */
1816 if (POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (current_function_decl))))
1818 /* NULL is usually not returned. */
1819 if (val == const0_rtx)
1820 return PRED_NULL_RETURN;
1822 else
1824 /* Negative return values are often used to indicate
1825 errors. */
1826 if (GET_CODE (val) == CONST_INT
1827 && INTVAL (val) < 0)
1828 return PRED_NEGATIVE_RETURN;
1829 /* Constant return values are also usually erors,
1830 zero/one often mean booleans so exclude them from the
1831 heuristics. */
1832 if (CONSTANT_P (val)
1833 && (val != const0_rtx && val != const1_rtx))
1834 return PRED_CONST_RETURN;
1836 return PRED_NO_PREDICTION;
1840 /* If the current function returns values in the most significant part
1841 of a register, shift return value VAL appropriately. The mode of
1842 the function's return type is known not to be BLKmode. */
1844 static rtx
1845 shift_return_value (rtx val)
1847 tree type;
1849 type = TREE_TYPE (DECL_RESULT (current_function_decl));
1850 if (targetm.calls.return_in_msb (type))
1852 rtx target;
1853 HOST_WIDE_INT shift;
1855 target = DECL_RTL (DECL_RESULT (current_function_decl));
1856 shift = (GET_MODE_BITSIZE (GET_MODE (target))
1857 - BITS_PER_UNIT * int_size_in_bytes (type));
1858 if (shift > 0)
1859 val = expand_shift (LSHIFT_EXPR, GET_MODE (target),
1860 gen_lowpart (GET_MODE (target), val),
1861 build_int_2 (shift, 0), target, 1);
1863 return val;
1867 /* Generate RTL to return from the current function, with value VAL. */
1869 static void
1870 expand_value_return (rtx val)
1872 rtx return_reg;
1873 enum br_predictor pred;
1875 if (flag_guess_branch_prob
1876 && (pred = return_prediction (val)) != PRED_NO_PREDICTION)
1878 /* Emit information for branch prediction. */
1879 rtx note;
1881 note = emit_note (NOTE_INSN_PREDICTION);
1883 NOTE_PREDICTION (note) = NOTE_PREDICT (pred, NOT_TAKEN);
1887 return_reg = DECL_RTL (DECL_RESULT (current_function_decl));
1889 /* Copy the value to the return location
1890 unless it's already there. */
1892 if (return_reg != val)
1894 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
1895 if (targetm.calls.promote_function_return (TREE_TYPE (current_function_decl)))
1897 int unsignedp = TYPE_UNSIGNED (type);
1898 enum machine_mode old_mode
1899 = DECL_MODE (DECL_RESULT (current_function_decl));
1900 enum machine_mode mode
1901 = promote_mode (type, old_mode, &unsignedp, 1);
1903 if (mode != old_mode)
1904 val = convert_modes (mode, old_mode, val, unsignedp);
1906 if (GET_CODE (return_reg) == PARALLEL)
1907 emit_group_load (return_reg, val, type, int_size_in_bytes (type));
1908 else
1909 emit_move_insn (return_reg, val);
1912 expand_null_return_1 ();
1915 /* Output a return with no value. */
1917 static void
1918 expand_null_return_1 (void)
1920 rtx end_label;
1922 clear_pending_stack_adjust ();
1923 do_pending_stack_adjust ();
1925 end_label = return_label;
1926 if (end_label == 0)
1927 end_label = return_label = gen_label_rtx ();
1928 emit_jump (end_label);
1931 /* Generate RTL to evaluate the expression RETVAL and return it
1932 from the current function. */
1934 void
1935 expand_return (tree retval)
1937 rtx result_rtl;
1938 rtx val = 0;
1939 tree retval_rhs;
1941 /* If function wants no value, give it none. */
1942 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
1944 expand_expr (retval, NULL_RTX, VOIDmode, 0);
1945 expand_null_return ();
1946 return;
1949 if (retval == error_mark_node)
1951 /* Treat this like a return of no value from a function that
1952 returns a value. */
1953 expand_null_return ();
1954 return;
1956 else if (TREE_CODE (retval) == RESULT_DECL)
1957 retval_rhs = retval;
1958 else if ((TREE_CODE (retval) == MODIFY_EXPR
1959 || TREE_CODE (retval) == INIT_EXPR)
1960 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
1961 retval_rhs = TREE_OPERAND (retval, 1);
1962 else
1963 retval_rhs = retval;
1965 result_rtl = DECL_RTL (DECL_RESULT (current_function_decl));
1967 /* If the result is an aggregate that is being returned in one (or more)
1968 registers, load the registers here. The compiler currently can't handle
1969 copying a BLKmode value into registers. We could put this code in a
1970 more general area (for use by everyone instead of just function
1971 call/return), but until this feature is generally usable it is kept here
1972 (and in expand_call). */
1974 if (retval_rhs != 0
1975 && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
1976 && REG_P (result_rtl))
1978 int i;
1979 unsigned HOST_WIDE_INT bitpos, xbitpos;
1980 unsigned HOST_WIDE_INT padding_correction = 0;
1981 unsigned HOST_WIDE_INT bytes
1982 = int_size_in_bytes (TREE_TYPE (retval_rhs));
1983 int n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1984 unsigned int bitsize
1985 = MIN (TYPE_ALIGN (TREE_TYPE (retval_rhs)), BITS_PER_WORD);
1986 rtx *result_pseudos = alloca (sizeof (rtx) * n_regs);
1987 rtx result_reg, src = NULL_RTX, dst = NULL_RTX;
1988 rtx result_val = expand_expr (retval_rhs, NULL_RTX, VOIDmode, 0);
1989 enum machine_mode tmpmode, result_reg_mode;
1991 if (bytes == 0)
1993 expand_null_return ();
1994 return;
1997 /* If the structure doesn't take up a whole number of words, see
1998 whether the register value should be padded on the left or on
1999 the right. Set PADDING_CORRECTION to the number of padding
2000 bits needed on the left side.
2002 In most ABIs, the structure will be returned at the least end of
2003 the register, which translates to right padding on little-endian
2004 targets and left padding on big-endian targets. The opposite
2005 holds if the structure is returned at the most significant
2006 end of the register. */
2007 if (bytes % UNITS_PER_WORD != 0
2008 && (targetm.calls.return_in_msb (TREE_TYPE (retval_rhs))
2009 ? !BYTES_BIG_ENDIAN
2010 : BYTES_BIG_ENDIAN))
2011 padding_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2012 * BITS_PER_UNIT));
2014 /* Copy the structure BITSIZE bits at a time. */
2015 for (bitpos = 0, xbitpos = padding_correction;
2016 bitpos < bytes * BITS_PER_UNIT;
2017 bitpos += bitsize, xbitpos += bitsize)
2019 /* We need a new destination pseudo each time xbitpos is
2020 on a word boundary and when xbitpos == padding_correction
2021 (the first time through). */
2022 if (xbitpos % BITS_PER_WORD == 0
2023 || xbitpos == padding_correction)
2025 /* Generate an appropriate register. */
2026 dst = gen_reg_rtx (word_mode);
2027 result_pseudos[xbitpos / BITS_PER_WORD] = dst;
2029 /* Clear the destination before we move anything into it. */
2030 emit_move_insn (dst, CONST0_RTX (GET_MODE (dst)));
2033 /* We need a new source operand each time bitpos is on a word
2034 boundary. */
2035 if (bitpos % BITS_PER_WORD == 0)
2036 src = operand_subword_force (result_val,
2037 bitpos / BITS_PER_WORD,
2038 BLKmode);
2040 /* Use bitpos for the source extraction (left justified) and
2041 xbitpos for the destination store (right justified). */
2042 store_bit_field (dst, bitsize, xbitpos % BITS_PER_WORD, word_mode,
2043 extract_bit_field (src, bitsize,
2044 bitpos % BITS_PER_WORD, 1,
2045 NULL_RTX, word_mode, word_mode));
2048 tmpmode = GET_MODE (result_rtl);
2049 if (tmpmode == BLKmode)
2051 /* Find the smallest integer mode large enough to hold the
2052 entire structure and use that mode instead of BLKmode
2053 on the USE insn for the return register. */
2054 for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2055 tmpmode != VOIDmode;
2056 tmpmode = GET_MODE_WIDER_MODE (tmpmode))
2057 /* Have we found a large enough mode? */
2058 if (GET_MODE_SIZE (tmpmode) >= bytes)
2059 break;
2061 /* No suitable mode found. */
2062 if (tmpmode == VOIDmode)
2063 abort ();
2065 PUT_MODE (result_rtl, tmpmode);
2068 if (GET_MODE_SIZE (tmpmode) < GET_MODE_SIZE (word_mode))
2069 result_reg_mode = word_mode;
2070 else
2071 result_reg_mode = tmpmode;
2072 result_reg = gen_reg_rtx (result_reg_mode);
2074 for (i = 0; i < n_regs; i++)
2075 emit_move_insn (operand_subword (result_reg, i, 0, result_reg_mode),
2076 result_pseudos[i]);
2078 if (tmpmode != result_reg_mode)
2079 result_reg = gen_lowpart (tmpmode, result_reg);
2081 expand_value_return (result_reg);
2083 else if (retval_rhs != 0
2084 && !VOID_TYPE_P (TREE_TYPE (retval_rhs))
2085 && (REG_P (result_rtl)
2086 || (GET_CODE (result_rtl) == PARALLEL)))
2088 /* Calculate the return value into a temporary (usually a pseudo
2089 reg). */
2090 tree ot = TREE_TYPE (DECL_RESULT (current_function_decl));
2091 tree nt = build_qualified_type (ot, TYPE_QUALS (ot) | TYPE_QUAL_CONST);
2093 val = assign_temp (nt, 0, 0, 1);
2094 val = expand_expr (retval_rhs, val, GET_MODE (val), 0);
2095 val = force_not_mem (val);
2096 /* Return the calculated value. */
2097 expand_value_return (shift_return_value (val));
2099 else
2101 /* No hard reg used; calculate value into hard return reg. */
2102 expand_expr (retval, const0_rtx, VOIDmode, 0);
2103 expand_value_return (result_rtl);
2107 /* Generate the RTL code for entering a binding contour.
2108 The variables are declared one by one, by calls to `expand_decl'.
2110 FLAGS is a bitwise or of the following flags:
2112 1 - Nonzero if this construct should be visible to
2113 `exit_something'.
2115 2 - Nonzero if this contour does not require a
2116 NOTE_INSN_BLOCK_BEG note. Virtually all calls from
2117 language-independent code should set this flag because they
2118 will not create corresponding BLOCK nodes. (There should be
2119 a one-to-one correspondence between NOTE_INSN_BLOCK_BEG notes
2120 and BLOCKs.) If this flag is set, MARK_ENDS should be zero
2121 when expand_end_bindings is called.
2123 If we are creating a NOTE_INSN_BLOCK_BEG note, a BLOCK may
2124 optionally be supplied. If so, it becomes the NOTE_BLOCK for the
2125 note. */
2127 void
2128 expand_start_bindings_and_block (int flags, tree block)
2130 struct nesting *thisblock = ALLOC_NESTING ();
2131 rtx note;
2132 int exit_flag = ((flags & 1) != 0);
2133 int block_flag = ((flags & 2) == 0);
2135 /* If a BLOCK is supplied, then the caller should be requesting a
2136 NOTE_INSN_BLOCK_BEG note. */
2137 if (!block_flag && block)
2138 abort ();
2140 /* Create a note to mark the beginning of the block. */
2141 note = emit_note (NOTE_INSN_DELETED);
2143 /* Make an entry on block_stack for the block we are entering. */
2145 thisblock->desc = BLOCK_NESTING;
2146 thisblock->next = block_stack;
2147 thisblock->all = nesting_stack;
2148 thisblock->depth = ++nesting_depth;
2149 thisblock->data.block.block_target_temp_slot_level = target_temp_slot_level;
2151 /* When we insert instructions after the last unconditional cleanup,
2152 we don't adjust last_insn. That means that a later add_insn will
2153 clobber the instructions we've just added. The easiest way to
2154 fix this is to just insert another instruction here, so that the
2155 instructions inserted after the last unconditional cleanup are
2156 never the last instruction. */
2157 emit_note (NOTE_INSN_DELETED);
2159 thisblock->data.block.first_insn = note;
2160 thisblock->data.block.block_start_count = ++current_block_start_count;
2161 thisblock->exit_label = exit_flag ? gen_label_rtx () : 0;
2162 block_stack = thisblock;
2163 nesting_stack = thisblock;
2165 /* Make a new level for allocating stack slots. */
2166 push_temp_slots ();
2169 /* Specify the scope of temporaries created by TARGET_EXPRs. Similar
2170 to CLEANUP_POINT_EXPR, but handles cases when a series of calls to
2171 expand_expr are made. After we end the region, we know that all
2172 space for all temporaries that were created by TARGET_EXPRs will be
2173 destroyed and their space freed for reuse. */
2175 void
2176 expand_start_target_temps (void)
2178 /* This is so that even if the result is preserved, the space
2179 allocated will be freed, as we know that it is no longer in use. */
2180 push_temp_slots ();
2182 /* Start a new binding layer that will keep track of all cleanup
2183 actions to be performed. */
2184 expand_start_bindings (2);
2186 target_temp_slot_level = temp_slot_level;
2189 void
2190 expand_end_target_temps (void)
2192 expand_end_bindings (NULL_TREE, 0, 0);
2194 /* This is so that even if the result is preserved, the space
2195 allocated will be freed, as we know that it is no longer in use. */
2196 pop_temp_slots ();
2199 /* Given a pointer to a BLOCK node return nonzero if (and only if) the node
2200 in question represents the outermost pair of curly braces (i.e. the "body
2201 block") of a function or method.
2203 For any BLOCK node representing a "body block" of a function or method, the
2204 BLOCK_SUPERCONTEXT of the node will point to another BLOCK node which
2205 represents the outermost (function) scope for the function or method (i.e.
2206 the one which includes the formal parameters). The BLOCK_SUPERCONTEXT of
2207 *that* node in turn will point to the relevant FUNCTION_DECL node. */
2210 is_body_block (tree stmt)
2212 if (lang_hooks.no_body_blocks)
2213 return 0;
2215 if (TREE_CODE (stmt) == BLOCK)
2217 tree parent = BLOCK_SUPERCONTEXT (stmt);
2219 if (parent && TREE_CODE (parent) == BLOCK)
2221 tree grandparent = BLOCK_SUPERCONTEXT (parent);
2223 if (grandparent && TREE_CODE (grandparent) == FUNCTION_DECL)
2224 return 1;
2228 return 0;
2231 /* Return an opaque pointer to the current nesting level, so frontend code
2232 can check its own sanity. */
2234 struct nesting *
2235 current_nesting_level (void)
2237 return cfun ? block_stack : 0;
2240 /* Emit code to restore vital registers at the beginning of a nonlocal goto
2241 handler. */
2242 static void
2243 expand_nl_goto_receiver (void)
2245 /* Clobber the FP when we get here, so we have to make sure it's
2246 marked as used by this function. */
2247 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
2249 /* Mark the static chain as clobbered here so life information
2250 doesn't get messed up for it. */
2251 emit_insn (gen_rtx_CLOBBER (VOIDmode, static_chain_rtx));
2253 #ifdef HAVE_nonlocal_goto
2254 if (! HAVE_nonlocal_goto)
2255 #endif
2256 /* First adjust our frame pointer to its actual value. It was
2257 previously set to the start of the virtual area corresponding to
2258 the stacked variables when we branched here and now needs to be
2259 adjusted to the actual hardware fp value.
2261 Assignments are to virtual registers are converted by
2262 instantiate_virtual_regs into the corresponding assignment
2263 to the underlying register (fp in this case) that makes
2264 the original assignment true.
2265 So the following insn will actually be
2266 decrementing fp by STARTING_FRAME_OFFSET. */
2267 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
2269 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
2270 if (fixed_regs[ARG_POINTER_REGNUM])
2272 #ifdef ELIMINABLE_REGS
2273 /* If the argument pointer can be eliminated in favor of the
2274 frame pointer, we don't need to restore it. We assume here
2275 that if such an elimination is present, it can always be used.
2276 This is the case on all known machines; if we don't make this
2277 assumption, we do unnecessary saving on many machines. */
2278 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
2279 size_t i;
2281 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
2282 if (elim_regs[i].from == ARG_POINTER_REGNUM
2283 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
2284 break;
2286 if (i == ARRAY_SIZE (elim_regs))
2287 #endif
2289 /* Now restore our arg pointer from the address at which it
2290 was saved in our stack frame. */
2291 emit_move_insn (virtual_incoming_args_rtx,
2292 copy_to_reg (get_arg_pointer_save_area (cfun)));
2295 #endif
2297 #ifdef HAVE_nonlocal_goto_receiver
2298 if (HAVE_nonlocal_goto_receiver)
2299 emit_insn (gen_nonlocal_goto_receiver ());
2300 #endif
2302 /* @@@ This is a kludge. Not all machine descriptions define a blockage
2303 insn, but we must not allow the code we just generated to be reordered
2304 by scheduling. Specifically, the update of the frame pointer must
2305 happen immediately, not later. So emit an ASM_INPUT to act as blockage
2306 insn. */
2307 emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
2310 /* Warn about any unused VARS (which may contain nodes other than
2311 VAR_DECLs, but such nodes are ignored). The nodes are connected
2312 via the TREE_CHAIN field. */
2314 void
2315 warn_about_unused_variables (tree vars)
2317 tree decl;
2319 if (warn_unused_variable)
2320 for (decl = vars; decl; decl = TREE_CHAIN (decl))
2321 if (TREE_CODE (decl) == VAR_DECL
2322 && ! TREE_USED (decl)
2323 && ! DECL_IN_SYSTEM_HEADER (decl)
2324 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
2325 warning ("%Junused variable '%D'", decl, decl);
2328 /* Generate RTL code to terminate a binding contour.
2330 VARS is the chain of VAR_DECL nodes for the variables bound in this
2331 contour. There may actually be other nodes in this chain, but any
2332 nodes other than VAR_DECLS are ignored.
2334 MARK_ENDS is nonzero if we should put a note at the beginning
2335 and end of this binding contour.
2337 DONT_JUMP_IN is positive if it is not valid to jump into this contour,
2338 zero if we can jump into this contour only if it does not have a saved
2339 stack level, and negative if we are not to check for invalid use of
2340 labels (because the front end does that). */
2342 void
2343 expand_end_bindings (tree vars, int mark_ends ATTRIBUTE_UNUSED,
2344 int dont_jump_in ATTRIBUTE_UNUSED)
2346 struct nesting *thisblock = block_stack;
2348 /* If any of the variables in this scope were not used, warn the
2349 user. */
2350 warn_about_unused_variables (vars);
2352 if (thisblock->exit_label)
2354 do_pending_stack_adjust ();
2355 emit_label (thisblock->exit_label);
2358 /* Mark the beginning and end of the scope if requested. */
2360 /* Get rid of the beginning-mark if we don't make an end-mark. */
2361 NOTE_LINE_NUMBER (thisblock->data.block.first_insn) = NOTE_INSN_DELETED;
2363 /* Restore the temporary level of TARGET_EXPRs. */
2364 target_temp_slot_level = thisblock->data.block.block_target_temp_slot_level;
2366 /* Restore block_stack level for containing block. */
2368 POPSTACK (block_stack);
2370 /* Pop the stack slot nesting and free any slots at this level. */
2371 pop_temp_slots ();
2374 /* Generate RTL for the automatic variable declaration DECL.
2375 (Other kinds of declarations are simply ignored if seen here.) */
2377 void
2378 expand_decl (tree decl)
2380 tree type;
2382 type = TREE_TYPE (decl);
2384 /* For a CONST_DECL, set mode, alignment, and sizes from those of the
2385 type in case this node is used in a reference. */
2386 if (TREE_CODE (decl) == CONST_DECL)
2388 DECL_MODE (decl) = TYPE_MODE (type);
2389 DECL_ALIGN (decl) = TYPE_ALIGN (type);
2390 DECL_SIZE (decl) = TYPE_SIZE (type);
2391 DECL_SIZE_UNIT (decl) = TYPE_SIZE_UNIT (type);
2392 return;
2395 /* Otherwise, only automatic variables need any expansion done. Static and
2396 external variables, and external functions, will be handled by
2397 `assemble_variable' (called from finish_decl). TYPE_DECL requires
2398 nothing. PARM_DECLs are handled in `assign_parms'. */
2399 if (TREE_CODE (decl) != VAR_DECL)
2400 return;
2402 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
2403 return;
2405 /* Create the RTL representation for the variable. */
2407 if (type == error_mark_node)
2408 SET_DECL_RTL (decl, gen_rtx_MEM (BLKmode, const0_rtx));
2410 else if (DECL_SIZE (decl) == 0)
2411 /* Variable with incomplete type. */
2413 rtx x;
2414 if (DECL_INITIAL (decl) == 0)
2415 /* Error message was already done; now avoid a crash. */
2416 x = gen_rtx_MEM (BLKmode, const0_rtx);
2417 else
2418 /* An initializer is going to decide the size of this array.
2419 Until we know the size, represent its address with a reg. */
2420 x = gen_rtx_MEM (BLKmode, gen_reg_rtx (Pmode));
2422 set_mem_attributes (x, decl, 1);
2423 SET_DECL_RTL (decl, x);
2425 else if (use_register_for_decl (decl))
2427 /* Automatic variable that can go in a register. */
2428 int unsignedp = TYPE_UNSIGNED (type);
2429 enum machine_mode reg_mode
2430 = promote_mode (type, DECL_MODE (decl), &unsignedp, 0);
2432 SET_DECL_RTL (decl, gen_reg_rtx (reg_mode));
2434 /* Note if the object is a user variable. */
2435 if (!DECL_ARTIFICIAL (decl))
2437 mark_user_reg (DECL_RTL (decl));
2439 /* Trust user variables which have a pointer type to really
2440 be pointers. Do not trust compiler generated temporaries
2441 as our type system is totally busted as it relates to
2442 pointer arithmetic which translates into lots of compiler
2443 generated objects with pointer types, but which are not really
2444 pointers. */
2445 if (POINTER_TYPE_P (type))
2446 mark_reg_pointer (DECL_RTL (decl),
2447 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (decl))));
2450 maybe_set_unchanging (DECL_RTL (decl), decl);
2453 else if (TREE_CODE (DECL_SIZE_UNIT (decl)) == INTEGER_CST
2454 && ! (flag_stack_check && ! STACK_CHECK_BUILTIN
2455 && 0 < compare_tree_int (DECL_SIZE_UNIT (decl),
2456 STACK_CHECK_MAX_VAR_SIZE)))
2458 /* Variable of fixed size that goes on the stack. */
2459 rtx oldaddr = 0;
2460 rtx addr;
2461 rtx x;
2463 /* If we previously made RTL for this decl, it must be an array
2464 whose size was determined by the initializer.
2465 The old address was a register; set that register now
2466 to the proper address. */
2467 if (DECL_RTL_SET_P (decl))
2469 if (!MEM_P (DECL_RTL (decl))
2470 || !REG_P (XEXP (DECL_RTL (decl), 0)))
2471 abort ();
2472 oldaddr = XEXP (DECL_RTL (decl), 0);
2475 /* Set alignment we actually gave this decl. */
2476 DECL_ALIGN (decl) = (DECL_MODE (decl) == BLKmode ? BIGGEST_ALIGNMENT
2477 : GET_MODE_BITSIZE (DECL_MODE (decl)));
2478 DECL_USER_ALIGN (decl) = 0;
2480 x = assign_temp (decl, 1, 1, 1);
2481 set_mem_attributes (x, decl, 1);
2482 SET_DECL_RTL (decl, x);
2484 if (oldaddr)
2486 addr = force_operand (XEXP (DECL_RTL (decl), 0), oldaddr);
2487 if (addr != oldaddr)
2488 emit_move_insn (oldaddr, addr);
2491 else
2492 /* Dynamic-size object: must push space on the stack. */
2494 rtx address, size, x;
2496 /* Record the stack pointer on entry to block, if have
2497 not already done so. */
2498 do_pending_stack_adjust ();
2500 /* Compute the variable's size, in bytes. This will expand any
2501 needed SAVE_EXPRs for the first time. */
2502 size = expand_expr (DECL_SIZE_UNIT (decl), NULL_RTX, VOIDmode, 0);
2503 free_temp_slots ();
2505 /* Allocate space on the stack for the variable. Note that
2506 DECL_ALIGN says how the variable is to be aligned and we
2507 cannot use it to conclude anything about the alignment of
2508 the size. */
2509 address = allocate_dynamic_stack_space (size, NULL_RTX,
2510 TYPE_ALIGN (TREE_TYPE (decl)));
2512 /* Reference the variable indirect through that rtx. */
2513 x = gen_rtx_MEM (DECL_MODE (decl), address);
2514 set_mem_attributes (x, decl, 1);
2515 SET_DECL_RTL (decl, x);
2518 /* Indicate the alignment we actually gave this variable. */
2519 #ifdef STACK_BOUNDARY
2520 DECL_ALIGN (decl) = STACK_BOUNDARY;
2521 #else
2522 DECL_ALIGN (decl) = BIGGEST_ALIGNMENT;
2523 #endif
2524 DECL_USER_ALIGN (decl) = 0;
2528 /* Emit code to allocate T_SIZE bytes of dynamic stack space for ALLOC. */
2529 void
2530 expand_stack_alloc (tree alloc, tree t_size)
2532 rtx address, dest, size;
2533 tree var, type;
2535 if (TREE_CODE (alloc) != ADDR_EXPR)
2536 abort ();
2537 var = TREE_OPERAND (alloc, 0);
2538 if (TREE_CODE (var) != VAR_DECL)
2539 abort ();
2541 type = TREE_TYPE (var);
2543 /* Compute the variable's size, in bytes. */
2544 size = expand_expr (t_size, NULL_RTX, VOIDmode, 0);
2545 free_temp_slots ();
2547 /* Allocate space on the stack for the variable. */
2548 address = XEXP (DECL_RTL (var), 0);
2549 dest = allocate_dynamic_stack_space (size, address, TYPE_ALIGN (type));
2550 if (dest != address)
2551 emit_move_insn (address, dest);
2553 /* Indicate the alignment we actually gave this variable. */
2554 #ifdef STACK_BOUNDARY
2555 DECL_ALIGN (var) = STACK_BOUNDARY;
2556 #else
2557 DECL_ALIGN (var) = BIGGEST_ALIGNMENT;
2558 #endif
2559 DECL_USER_ALIGN (var) = 0;
2562 /* Emit code to save the current value of stack. */
2564 expand_stack_save (void)
2566 rtx ret = NULL_RTX;
2568 do_pending_stack_adjust ();
2569 emit_stack_save (SAVE_BLOCK, &ret, NULL_RTX);
2570 return ret;
2573 /* Emit code to restore the current value of stack. */
2574 void
2575 expand_stack_restore (tree var)
2577 rtx sa = DECL_RTL (var);
2579 emit_stack_restore (SAVE_BLOCK, sa, NULL_RTX);
2582 /* Emit code to perform the initialization of a declaration DECL. */
2584 void
2585 expand_decl_init (tree decl)
2587 int was_used = TREE_USED (decl);
2589 /* If this is a CONST_DECL, we don't have to generate any code. Likewise
2590 for static decls. */
2591 if (TREE_CODE (decl) == CONST_DECL
2592 || TREE_STATIC (decl))
2593 return;
2595 /* Compute and store the initial value now. */
2597 push_temp_slots ();
2599 if (DECL_INITIAL (decl) == error_mark_node)
2601 enum tree_code code = TREE_CODE (TREE_TYPE (decl));
2603 if (code == INTEGER_TYPE || code == REAL_TYPE || code == ENUMERAL_TYPE
2604 || code == POINTER_TYPE || code == REFERENCE_TYPE)
2605 expand_assignment (decl, convert (TREE_TYPE (decl), integer_zero_node),
2608 else if (DECL_INITIAL (decl) && TREE_CODE (DECL_INITIAL (decl)) != TREE_LIST)
2610 emit_line_note (DECL_SOURCE_LOCATION (decl));
2611 expand_assignment (decl, DECL_INITIAL (decl), 0);
2614 /* Don't let the initialization count as "using" the variable. */
2615 TREE_USED (decl) = was_used;
2617 /* Free any temporaries we made while initializing the decl. */
2618 preserve_temp_slots (NULL_RTX);
2619 free_temp_slots ();
2620 pop_temp_slots ();
2624 /* DECL is an anonymous union. CLEANUP is a cleanup for DECL.
2625 DECL_ELTS is the list of elements that belong to DECL's type.
2626 In each, the TREE_VALUE is a VAR_DECL, and the TREE_PURPOSE a cleanup. */
2628 void
2629 expand_anon_union_decl (tree decl, tree cleanup ATTRIBUTE_UNUSED,
2630 tree decl_elts)
2632 rtx x;
2633 tree t;
2635 /* If any of the elements are addressable, so is the entire union. */
2636 for (t = decl_elts; t; t = TREE_CHAIN (t))
2637 if (TREE_ADDRESSABLE (TREE_VALUE (t)))
2639 TREE_ADDRESSABLE (decl) = 1;
2640 break;
2643 expand_decl (decl);
2644 x = DECL_RTL (decl);
2646 /* Go through the elements, assigning RTL to each. */
2647 for (t = decl_elts; t; t = TREE_CHAIN (t))
2649 tree decl_elt = TREE_VALUE (t);
2650 enum machine_mode mode = TYPE_MODE (TREE_TYPE (decl_elt));
2652 /* If any of the elements are addressable, so is the entire
2653 union. */
2654 if (TREE_USED (decl_elt))
2655 TREE_USED (decl) = 1;
2657 /* Propagate the union's alignment to the elements. */
2658 DECL_ALIGN (decl_elt) = DECL_ALIGN (decl);
2659 DECL_USER_ALIGN (decl_elt) = DECL_USER_ALIGN (decl);
2661 /* If the element has BLKmode and the union doesn't, the union is
2662 aligned such that the element doesn't need to have BLKmode, so
2663 change the element's mode to the appropriate one for its size. */
2664 if (mode == BLKmode && DECL_MODE (decl) != BLKmode)
2665 DECL_MODE (decl_elt) = mode
2666 = mode_for_size_tree (DECL_SIZE (decl_elt), MODE_INT, 1);
2668 /* (SUBREG (MEM ...)) at RTL generation time is invalid, so we
2669 instead create a new MEM rtx with the proper mode. */
2670 if (MEM_P (x))
2672 if (mode == GET_MODE (x))
2673 SET_DECL_RTL (decl_elt, x);
2674 else
2675 SET_DECL_RTL (decl_elt, adjust_address_nv (x, mode, 0));
2677 else if (REG_P (x))
2679 if (mode == GET_MODE (x))
2680 SET_DECL_RTL (decl_elt, x);
2681 else
2682 SET_DECL_RTL (decl_elt, gen_lowpart_SUBREG (mode, x));
2684 else
2685 abort ();
2689 /* Enter a case (Pascal) or switch (C) statement.
2690 Push a block onto case_stack and nesting_stack
2691 to accumulate the case-labels that are seen
2692 and to record the labels generated for the statement.
2694 EXIT_FLAG is nonzero if `exit_something' should exit this case stmt.
2695 Otherwise, this construct is transparent for `exit_something'.
2697 EXPR is the index-expression to be dispatched on.
2698 TYPE is its nominal type. We could simply convert EXPR to this type,
2699 but instead we take short cuts. */
2701 void
2702 expand_start_case (tree index_expr)
2704 struct nesting *thiscase = ALLOC_NESTING ();
2706 /* Make an entry on case_stack for the case we are entering. */
2708 thiscase->desc = CASE_NESTING;
2709 thiscase->next = case_stack;
2710 thiscase->all = nesting_stack;
2711 thiscase->depth = ++nesting_depth;
2712 thiscase->exit_label = 0;
2713 thiscase->data.case_stmt.case_list = 0;
2714 thiscase->data.case_stmt.index_expr = index_expr;
2715 thiscase->data.case_stmt.default_label = 0;
2716 case_stack = thiscase;
2717 nesting_stack = thiscase;
2719 do_pending_stack_adjust ();
2721 /* Make sure case_stmt.start points to something that won't
2722 need any transformation before expand_end_case. */
2723 if (!NOTE_P (get_last_insn ()))
2724 emit_note (NOTE_INSN_DELETED);
2726 thiscase->data.case_stmt.start = get_last_insn ();
2729 /* Do the insertion of a case label into
2730 case_stack->data.case_stmt.case_list. Use an AVL tree to avoid
2731 slowdown for large switch statements. */
2734 add_case_node (tree low, tree high, tree label, tree *duplicate)
2736 struct case_node *p, **q, *r;
2738 /* If there's no HIGH value, then this is not a case range; it's
2739 just a simple case label. But that's just a degenerate case
2740 range. */
2741 if (!high)
2742 high = low;
2744 /* Handle default labels specially. */
2745 if (!high && !low)
2747 if (case_stack->data.case_stmt.default_label != 0)
2749 *duplicate = case_stack->data.case_stmt.default_label;
2750 return 2;
2752 case_stack->data.case_stmt.default_label = label;
2753 return 0;
2756 q = &case_stack->data.case_stmt.case_list;
2757 p = *q;
2759 while ((r = *q))
2761 p = r;
2763 /* Keep going past elements distinctly greater than HIGH. */
2764 if (tree_int_cst_lt (high, p->low))
2765 q = &p->left;
2767 /* or distinctly less than LOW. */
2768 else if (tree_int_cst_lt (p->high, low))
2769 q = &p->right;
2771 else
2773 /* We have an overlap; this is an error. */
2774 *duplicate = p->code_label;
2775 return 2;
2779 /* Add this label to the chain, and succeed. */
2781 r = ggc_alloc (sizeof (struct case_node));
2782 r->low = low;
2784 /* If the bounds are equal, turn this into the one-value case. */
2785 if (tree_int_cst_equal (low, high))
2786 r->high = r->low;
2787 else
2788 r->high = high;
2790 r->code_label = label;
2792 *q = r;
2793 r->parent = p;
2794 r->left = 0;
2795 r->right = 0;
2796 r->balance = 0;
2798 while (p)
2800 struct case_node *s;
2802 if (r == p->left)
2804 int b;
2806 if (! (b = p->balance))
2807 /* Growth propagation from left side. */
2808 p->balance = -1;
2809 else if (b < 0)
2811 if (r->balance < 0)
2813 /* R-Rotation */
2814 if ((p->left = s = r->right))
2815 s->parent = p;
2817 r->right = p;
2818 p->balance = 0;
2819 r->balance = 0;
2820 s = p->parent;
2821 p->parent = r;
2823 if ((r->parent = s))
2825 if (s->left == p)
2826 s->left = r;
2827 else
2828 s->right = r;
2830 else
2831 case_stack->data.case_stmt.case_list = r;
2833 else
2834 /* r->balance == +1 */
2836 /* LR-Rotation */
2838 int b2;
2839 struct case_node *t = r->right;
2841 if ((p->left = s = t->right))
2842 s->parent = p;
2844 t->right = p;
2845 if ((r->right = s = t->left))
2846 s->parent = r;
2848 t->left = r;
2849 b = t->balance;
2850 b2 = b < 0;
2851 p->balance = b2;
2852 b2 = -b2 - b;
2853 r->balance = b2;
2854 t->balance = 0;
2855 s = p->parent;
2856 p->parent = t;
2857 r->parent = t;
2859 if ((t->parent = s))
2861 if (s->left == p)
2862 s->left = t;
2863 else
2864 s->right = t;
2866 else
2867 case_stack->data.case_stmt.case_list = t;
2869 break;
2872 else
2874 /* p->balance == +1; growth of left side balances the node. */
2875 p->balance = 0;
2876 break;
2879 else
2880 /* r == p->right */
2882 int b;
2884 if (! (b = p->balance))
2885 /* Growth propagation from right side. */
2886 p->balance++;
2887 else if (b > 0)
2889 if (r->balance > 0)
2891 /* L-Rotation */
2893 if ((p->right = s = r->left))
2894 s->parent = p;
2896 r->left = p;
2897 p->balance = 0;
2898 r->balance = 0;
2899 s = p->parent;
2900 p->parent = r;
2901 if ((r->parent = s))
2903 if (s->left == p)
2904 s->left = r;
2905 else
2906 s->right = r;
2909 else
2910 case_stack->data.case_stmt.case_list = r;
2913 else
2914 /* r->balance == -1 */
2916 /* RL-Rotation */
2917 int b2;
2918 struct case_node *t = r->left;
2920 if ((p->right = s = t->left))
2921 s->parent = p;
2923 t->left = p;
2925 if ((r->left = s = t->right))
2926 s->parent = r;
2928 t->right = r;
2929 b = t->balance;
2930 b2 = b < 0;
2931 r->balance = b2;
2932 b2 = -b2 - b;
2933 p->balance = b2;
2934 t->balance = 0;
2935 s = p->parent;
2936 p->parent = t;
2937 r->parent = t;
2939 if ((t->parent = s))
2941 if (s->left == p)
2942 s->left = t;
2943 else
2944 s->right = t;
2947 else
2948 case_stack->data.case_stmt.case_list = t;
2950 break;
2952 else
2954 /* p->balance == -1; growth of right side balances the node. */
2955 p->balance = 0;
2956 break;
2960 r = p;
2961 p = p->parent;
2964 return 0;
2967 /* Maximum number of case bit tests. */
2968 #define MAX_CASE_BIT_TESTS 3
2970 /* By default, enable case bit tests on targets with ashlsi3. */
2971 #ifndef CASE_USE_BIT_TESTS
2972 #define CASE_USE_BIT_TESTS (ashl_optab->handlers[word_mode].insn_code \
2973 != CODE_FOR_nothing)
2974 #endif
2977 /* A case_bit_test represents a set of case nodes that may be
2978 selected from using a bit-wise comparison. HI and LO hold
2979 the integer to be tested against, LABEL contains the label
2980 to jump to upon success and BITS counts the number of case
2981 nodes handled by this test, typically the number of bits
2982 set in HI:LO. */
2984 struct case_bit_test
2986 HOST_WIDE_INT hi;
2987 HOST_WIDE_INT lo;
2988 rtx label;
2989 int bits;
2992 /* Determine whether "1 << x" is relatively cheap in word_mode. */
2994 static
2995 bool lshift_cheap_p (void)
2997 static bool init = false;
2998 static bool cheap = true;
3000 if (!init)
3002 rtx reg = gen_rtx_REG (word_mode, 10000);
3003 int cost = rtx_cost (gen_rtx_ASHIFT (word_mode, const1_rtx, reg), SET);
3004 cheap = cost < COSTS_N_INSNS (3);
3005 init = true;
3008 return cheap;
3011 /* Comparison function for qsort to order bit tests by decreasing
3012 number of case nodes, i.e. the node with the most cases gets
3013 tested first. */
3015 static int
3016 case_bit_test_cmp (const void *p1, const void *p2)
3018 const struct case_bit_test *d1 = p1;
3019 const struct case_bit_test *d2 = p2;
3021 return d2->bits - d1->bits;
3024 /* Expand a switch statement by a short sequence of bit-wise
3025 comparisons. "switch(x)" is effectively converted into
3026 "if ((1 << (x-MINVAL)) & CST)" where CST and MINVAL are
3027 integer constants.
3029 INDEX_EXPR is the value being switched on, which is of
3030 type INDEX_TYPE. MINVAL is the lowest case value of in
3031 the case nodes, of INDEX_TYPE type, and RANGE is highest
3032 value minus MINVAL, also of type INDEX_TYPE. NODES is
3033 the set of case nodes, and DEFAULT_LABEL is the label to
3034 branch to should none of the cases match.
3036 There *MUST* be MAX_CASE_BIT_TESTS or less unique case
3037 node targets. */
3039 static void
3040 emit_case_bit_tests (tree index_type, tree index_expr, tree minval,
3041 tree range, case_node_ptr nodes, rtx default_label)
3043 struct case_bit_test test[MAX_CASE_BIT_TESTS];
3044 enum machine_mode mode;
3045 rtx expr, index, label;
3046 unsigned int i,j,lo,hi;
3047 struct case_node *n;
3048 unsigned int count;
3050 count = 0;
3051 for (n = nodes; n; n = n->right)
3053 label = label_rtx (n->code_label);
3054 for (i = 0; i < count; i++)
3055 if (same_case_target_p (label, test[i].label))
3056 break;
3058 if (i == count)
3060 if (count >= MAX_CASE_BIT_TESTS)
3061 abort ();
3062 test[i].hi = 0;
3063 test[i].lo = 0;
3064 test[i].label = label;
3065 test[i].bits = 1;
3066 count++;
3068 else
3069 test[i].bits++;
3071 lo = tree_low_cst (fold (build (MINUS_EXPR, index_type,
3072 n->low, minval)), 1);
3073 hi = tree_low_cst (fold (build (MINUS_EXPR, index_type,
3074 n->high, minval)), 1);
3075 for (j = lo; j <= hi; j++)
3076 if (j >= HOST_BITS_PER_WIDE_INT)
3077 test[i].hi |= (HOST_WIDE_INT) 1 << (j - HOST_BITS_PER_INT);
3078 else
3079 test[i].lo |= (HOST_WIDE_INT) 1 << j;
3082 qsort (test, count, sizeof(*test), case_bit_test_cmp);
3084 index_expr = fold (build (MINUS_EXPR, index_type,
3085 convert (index_type, index_expr),
3086 convert (index_type, minval)));
3087 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
3088 do_pending_stack_adjust ();
3090 mode = TYPE_MODE (index_type);
3091 expr = expand_expr (range, NULL_RTX, VOIDmode, 0);
3092 emit_cmp_and_jump_insns (index, expr, GTU, NULL_RTX, mode, 1,
3093 default_label);
3095 index = convert_to_mode (word_mode, index, 0);
3096 index = expand_binop (word_mode, ashl_optab, const1_rtx,
3097 index, NULL_RTX, 1, OPTAB_WIDEN);
3099 for (i = 0; i < count; i++)
3101 expr = immed_double_const (test[i].lo, test[i].hi, word_mode);
3102 expr = expand_binop (word_mode, and_optab, index, expr,
3103 NULL_RTX, 1, OPTAB_WIDEN);
3104 emit_cmp_and_jump_insns (expr, const0_rtx, NE, NULL_RTX,
3105 word_mode, 1, test[i].label);
3108 emit_jump (default_label);
3111 #ifndef HAVE_casesi
3112 #define HAVE_casesi 0
3113 #endif
3115 #ifndef HAVE_tablejump
3116 #define HAVE_tablejump 0
3117 #endif
3119 /* Terminate a case (Pascal) or switch (C) statement
3120 in which ORIG_INDEX is the expression to be tested.
3121 If ORIG_TYPE is not NULL, it is the original ORIG_INDEX
3122 type as given in the source before any compiler conversions.
3123 Generate the code to test it and jump to the right place. */
3125 void
3126 expand_end_case_type (tree orig_index, tree orig_type)
3128 tree minval = NULL_TREE, maxval = NULL_TREE, range = NULL_TREE;
3129 rtx default_label = 0;
3130 struct case_node *n, *m;
3131 unsigned int count, uniq;
3132 rtx index;
3133 rtx table_label;
3134 int ncases;
3135 rtx *labelvec;
3136 int i;
3137 rtx before_case, end, lab;
3138 struct nesting *thiscase = case_stack;
3139 tree index_expr, index_type;
3140 bool exit_done = false;
3141 int unsignedp;
3143 /* Don't crash due to previous errors. */
3144 if (thiscase == NULL)
3145 return;
3147 index_expr = thiscase->data.case_stmt.index_expr;
3148 index_type = TREE_TYPE (index_expr);
3149 unsignedp = TYPE_UNSIGNED (index_type);
3150 if (orig_type == NULL)
3151 orig_type = TREE_TYPE (orig_index);
3153 do_pending_stack_adjust ();
3155 /* An ERROR_MARK occurs for various reasons including invalid data type. */
3156 if (index_type != error_mark_node)
3158 /* If we don't have a default-label, create one here,
3159 after the body of the switch. */
3160 if (thiscase->data.case_stmt.default_label == 0)
3162 thiscase->data.case_stmt.default_label
3163 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
3164 /* Share the exit label if possible. */
3165 if (thiscase->exit_label)
3167 SET_DECL_RTL (thiscase->data.case_stmt.default_label,
3168 thiscase->exit_label);
3169 exit_done = true;
3171 expand_label (thiscase->data.case_stmt.default_label);
3173 default_label = label_rtx (thiscase->data.case_stmt.default_label);
3175 before_case = get_last_insn ();
3177 if (thiscase->data.case_stmt.case_list
3178 && thiscase->data.case_stmt.case_list->left)
3179 thiscase->data.case_stmt.case_list
3180 = case_tree2list (thiscase->data.case_stmt.case_list, 0);
3182 /* Get upper and lower bounds of case values.
3183 Also convert all the case values to the index expr's data type. */
3185 uniq = 0;
3186 count = 0;
3187 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
3189 /* Check low and high label values are integers. */
3190 if (TREE_CODE (n->low) != INTEGER_CST)
3191 abort ();
3192 if (TREE_CODE (n->high) != INTEGER_CST)
3193 abort ();
3195 n->low = convert (index_type, n->low);
3196 n->high = convert (index_type, n->high);
3198 /* Count the elements and track the largest and smallest
3199 of them (treating them as signed even if they are not). */
3200 if (count++ == 0)
3202 minval = n->low;
3203 maxval = n->high;
3205 else
3207 if (INT_CST_LT (n->low, minval))
3208 minval = n->low;
3209 if (INT_CST_LT (maxval, n->high))
3210 maxval = n->high;
3212 /* A range counts double, since it requires two compares. */
3213 if (! tree_int_cst_equal (n->low, n->high))
3214 count++;
3216 /* Count the number of unique case node targets. */
3217 uniq++;
3218 lab = label_rtx (n->code_label);
3219 for (m = thiscase->data.case_stmt.case_list; m != n; m = m->right)
3220 if (same_case_target_p (label_rtx (m->code_label), lab))
3222 uniq--;
3223 break;
3227 /* Compute span of values. */
3228 if (count != 0)
3229 range = fold (build (MINUS_EXPR, index_type, maxval, minval));
3231 if (count == 0)
3233 expand_expr (index_expr, const0_rtx, VOIDmode, 0);
3234 emit_jump (default_label);
3237 /* Try implementing this switch statement by a short sequence of
3238 bit-wise comparisons. However, we let the binary-tree case
3239 below handle constant index expressions. */
3240 else if (CASE_USE_BIT_TESTS
3241 && ! TREE_CONSTANT (index_expr)
3242 && compare_tree_int (range, GET_MODE_BITSIZE (word_mode)) < 0
3243 && compare_tree_int (range, 0) > 0
3244 && lshift_cheap_p ()
3245 && ((uniq == 1 && count >= 3)
3246 || (uniq == 2 && count >= 5)
3247 || (uniq == 3 && count >= 6)))
3249 /* Optimize the case where all the case values fit in a
3250 word without having to subtract MINVAL. In this case,
3251 we can optimize away the subtraction. */
3252 if (compare_tree_int (minval, 0) > 0
3253 && compare_tree_int (maxval, GET_MODE_BITSIZE (word_mode)) < 0)
3255 minval = integer_zero_node;
3256 range = maxval;
3258 emit_case_bit_tests (index_type, index_expr, minval, range,
3259 thiscase->data.case_stmt.case_list,
3260 default_label);
3263 /* If range of values is much bigger than number of values,
3264 make a sequence of conditional branches instead of a dispatch.
3265 If the switch-index is a constant, do it this way
3266 because we can optimize it. */
3268 else if (count < case_values_threshold ()
3269 || compare_tree_int (range,
3270 (optimize_size ? 3 : 10) * count) > 0
3271 /* RANGE may be signed, and really large ranges will show up
3272 as negative numbers. */
3273 || compare_tree_int (range, 0) < 0
3274 #ifndef ASM_OUTPUT_ADDR_DIFF_ELT
3275 || flag_pic
3276 #endif
3277 || TREE_CONSTANT (index_expr)
3278 /* If neither casesi or tablejump is available, we can
3279 only go this way. */
3280 || (!HAVE_casesi && !HAVE_tablejump))
3282 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
3284 /* If the index is a short or char that we do not have
3285 an insn to handle comparisons directly, convert it to
3286 a full integer now, rather than letting each comparison
3287 generate the conversion. */
3289 if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT
3290 && ! have_insn_for (COMPARE, GET_MODE (index)))
3292 enum machine_mode wider_mode;
3293 for (wider_mode = GET_MODE (index); wider_mode != VOIDmode;
3294 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
3295 if (have_insn_for (COMPARE, wider_mode))
3297 index = convert_to_mode (wider_mode, index, unsignedp);
3298 break;
3302 do_pending_stack_adjust ();
3304 if (MEM_P (index))
3305 index = copy_to_reg (index);
3306 if (GET_CODE (index) == CONST_INT
3307 || TREE_CODE (index_expr) == INTEGER_CST)
3309 /* Make a tree node with the proper constant value
3310 if we don't already have one. */
3311 if (TREE_CODE (index_expr) != INTEGER_CST)
3313 index_expr
3314 = build_int_2 (INTVAL (index),
3315 unsignedp || INTVAL (index) >= 0 ? 0 : -1);
3316 index_expr = convert (index_type, index_expr);
3319 /* For constant index expressions we need only
3320 issue an unconditional branch to the appropriate
3321 target code. The job of removing any unreachable
3322 code is left to the optimization phase if the
3323 "-O" option is specified. */
3324 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
3325 if (! tree_int_cst_lt (index_expr, n->low)
3326 && ! tree_int_cst_lt (n->high, index_expr))
3327 break;
3329 if (n)
3330 emit_jump (label_rtx (n->code_label));
3331 else
3332 emit_jump (default_label);
3334 else
3336 /* If the index expression is not constant we generate
3337 a binary decision tree to select the appropriate
3338 target code. This is done as follows:
3340 The list of cases is rearranged into a binary tree,
3341 nearly optimal assuming equal probability for each case.
3343 The tree is transformed into RTL, eliminating
3344 redundant test conditions at the same time.
3346 If program flow could reach the end of the
3347 decision tree an unconditional jump to the
3348 default code is emitted. */
3350 use_cost_table
3351 = (TREE_CODE (orig_type) != ENUMERAL_TYPE
3352 && estimate_case_costs (thiscase->data.case_stmt.case_list));
3353 balance_case_nodes (&thiscase->data.case_stmt.case_list, NULL);
3354 emit_case_nodes (index, thiscase->data.case_stmt.case_list,
3355 default_label, index_type);
3356 emit_jump (default_label);
3359 else
3361 table_label = gen_label_rtx ();
3362 if (! try_casesi (index_type, index_expr, minval, range,
3363 table_label, default_label))
3365 index_type = integer_type_node;
3367 /* Index jumptables from zero for suitable values of
3368 minval to avoid a subtraction. */
3369 if (! optimize_size
3370 && compare_tree_int (minval, 0) > 0
3371 && compare_tree_int (minval, 3) < 0)
3373 minval = integer_zero_node;
3374 range = maxval;
3377 if (! try_tablejump (index_type, index_expr, minval, range,
3378 table_label, default_label))
3379 abort ();
3382 /* Get table of labels to jump to, in order of case index. */
3384 ncases = tree_low_cst (range, 0) + 1;
3385 labelvec = alloca (ncases * sizeof (rtx));
3386 memset (labelvec, 0, ncases * sizeof (rtx));
3388 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
3390 /* Compute the low and high bounds relative to the minimum
3391 value since that should fit in a HOST_WIDE_INT while the
3392 actual values may not. */
3393 HOST_WIDE_INT i_low
3394 = tree_low_cst (fold (build (MINUS_EXPR, index_type,
3395 n->low, minval)), 1);
3396 HOST_WIDE_INT i_high
3397 = tree_low_cst (fold (build (MINUS_EXPR, index_type,
3398 n->high, minval)), 1);
3399 HOST_WIDE_INT i;
3401 for (i = i_low; i <= i_high; i ++)
3402 labelvec[i]
3403 = gen_rtx_LABEL_REF (Pmode, label_rtx (n->code_label));
3406 /* Fill in the gaps with the default. */
3407 for (i = 0; i < ncases; i++)
3408 if (labelvec[i] == 0)
3409 labelvec[i] = gen_rtx_LABEL_REF (Pmode, default_label);
3411 /* Output the table. */
3412 emit_label (table_label);
3414 if (CASE_VECTOR_PC_RELATIVE || flag_pic)
3415 emit_jump_insn (gen_rtx_ADDR_DIFF_VEC (CASE_VECTOR_MODE,
3416 gen_rtx_LABEL_REF (Pmode, table_label),
3417 gen_rtvec_v (ncases, labelvec),
3418 const0_rtx, const0_rtx));
3419 else
3420 emit_jump_insn (gen_rtx_ADDR_VEC (CASE_VECTOR_MODE,
3421 gen_rtvec_v (ncases, labelvec)));
3423 /* If the case insn drops through the table,
3424 after the table we must jump to the default-label.
3425 Otherwise record no drop-through after the table. */
3426 #ifdef CASE_DROPS_THROUGH
3427 emit_jump (default_label);
3428 #else
3429 emit_barrier ();
3430 #endif
3433 before_case = NEXT_INSN (before_case);
3434 end = get_last_insn ();
3435 if (squeeze_notes (&before_case, &end))
3436 abort ();
3437 reorder_insns (before_case, end,
3438 thiscase->data.case_stmt.start);
3441 if (thiscase->exit_label && !exit_done)
3442 emit_label (thiscase->exit_label);
3444 POPSTACK (case_stack);
3446 free_temp_slots ();
3449 /* Convert the tree NODE into a list linked by the right field, with the left
3450 field zeroed. RIGHT is used for recursion; it is a list to be placed
3451 rightmost in the resulting list. */
3453 static struct case_node *
3454 case_tree2list (struct case_node *node, struct case_node *right)
3456 struct case_node *left;
3458 if (node->right)
3459 right = case_tree2list (node->right, right);
3461 node->right = right;
3462 if ((left = node->left))
3464 node->left = 0;
3465 return case_tree2list (left, node);
3468 return node;
3471 /* Generate code to jump to LABEL if OP1 and OP2 are equal. */
3473 static void
3474 do_jump_if_equal (rtx op1, rtx op2, rtx label, int unsignedp)
3476 if (GET_CODE (op1) == CONST_INT && GET_CODE (op2) == CONST_INT)
3478 if (op1 == op2)
3479 emit_jump (label);
3481 else
3482 emit_cmp_and_jump_insns (op1, op2, EQ, NULL_RTX,
3483 (GET_MODE (op1) == VOIDmode
3484 ? GET_MODE (op2) : GET_MODE (op1)),
3485 unsignedp, label);
3488 /* Not all case values are encountered equally. This function
3489 uses a heuristic to weight case labels, in cases where that
3490 looks like a reasonable thing to do.
3492 Right now, all we try to guess is text, and we establish the
3493 following weights:
3495 chars above space: 16
3496 digits: 16
3497 default: 12
3498 space, punct: 8
3499 tab: 4
3500 newline: 2
3501 other "\" chars: 1
3502 remaining chars: 0
3504 If we find any cases in the switch that are not either -1 or in the range
3505 of valid ASCII characters, or are control characters other than those
3506 commonly used with "\", don't treat this switch scanning text.
3508 Return 1 if these nodes are suitable for cost estimation, otherwise
3509 return 0. */
3511 static int
3512 estimate_case_costs (case_node_ptr node)
3514 tree min_ascii = integer_minus_one_node;
3515 tree max_ascii = convert (TREE_TYPE (node->high), build_int_2 (127, 0));
3516 case_node_ptr n;
3517 int i;
3519 /* If we haven't already made the cost table, make it now. Note that the
3520 lower bound of the table is -1, not zero. */
3522 if (! cost_table_initialized)
3524 cost_table_initialized = 1;
3526 for (i = 0; i < 128; i++)
3528 if (ISALNUM (i))
3529 COST_TABLE (i) = 16;
3530 else if (ISPUNCT (i))
3531 COST_TABLE (i) = 8;
3532 else if (ISCNTRL (i))
3533 COST_TABLE (i) = -1;
3536 COST_TABLE (' ') = 8;
3537 COST_TABLE ('\t') = 4;
3538 COST_TABLE ('\0') = 4;
3539 COST_TABLE ('\n') = 2;
3540 COST_TABLE ('\f') = 1;
3541 COST_TABLE ('\v') = 1;
3542 COST_TABLE ('\b') = 1;
3545 /* See if all the case expressions look like text. It is text if the
3546 constant is >= -1 and the highest constant is <= 127. Do all comparisons
3547 as signed arithmetic since we don't want to ever access cost_table with a
3548 value less than -1. Also check that none of the constants in a range
3549 are strange control characters. */
3551 for (n = node; n; n = n->right)
3553 if ((INT_CST_LT (n->low, min_ascii)) || INT_CST_LT (max_ascii, n->high))
3554 return 0;
3556 for (i = (HOST_WIDE_INT) TREE_INT_CST_LOW (n->low);
3557 i <= (HOST_WIDE_INT) TREE_INT_CST_LOW (n->high); i++)
3558 if (COST_TABLE (i) < 0)
3559 return 0;
3562 /* All interesting values are within the range of interesting
3563 ASCII characters. */
3564 return 1;
3567 /* Determine whether two case labels branch to the same target.
3568 Since we now do tree optimizations, just comparing labels is
3569 good enough. */
3571 static bool
3572 same_case_target_p (rtx l1, rtx l2)
3574 return l1 == l2;
3577 /* Take an ordered list of case nodes
3578 and transform them into a near optimal binary tree,
3579 on the assumption that any target code selection value is as
3580 likely as any other.
3582 The transformation is performed by splitting the ordered
3583 list into two equal sections plus a pivot. The parts are
3584 then attached to the pivot as left and right branches. Each
3585 branch is then transformed recursively. */
3587 static void
3588 balance_case_nodes (case_node_ptr *head, case_node_ptr parent)
3590 case_node_ptr np;
3592 np = *head;
3593 if (np)
3595 int cost = 0;
3596 int i = 0;
3597 int ranges = 0;
3598 case_node_ptr *npp;
3599 case_node_ptr left;
3601 /* Count the number of entries on branch. Also count the ranges. */
3603 while (np)
3605 if (!tree_int_cst_equal (np->low, np->high))
3607 ranges++;
3608 if (use_cost_table)
3609 cost += COST_TABLE (TREE_INT_CST_LOW (np->high));
3612 if (use_cost_table)
3613 cost += COST_TABLE (TREE_INT_CST_LOW (np->low));
3615 i++;
3616 np = np->right;
3619 if (i > 2)
3621 /* Split this list if it is long enough for that to help. */
3622 npp = head;
3623 left = *npp;
3624 if (use_cost_table)
3626 /* Find the place in the list that bisects the list's total cost,
3627 Here I gets half the total cost. */
3628 int n_moved = 0;
3629 i = (cost + 1) / 2;
3630 while (1)
3632 /* Skip nodes while their cost does not reach that amount. */
3633 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
3634 i -= COST_TABLE (TREE_INT_CST_LOW ((*npp)->high));
3635 i -= COST_TABLE (TREE_INT_CST_LOW ((*npp)->low));
3636 if (i <= 0)
3637 break;
3638 npp = &(*npp)->right;
3639 n_moved += 1;
3641 if (n_moved == 0)
3643 /* Leave this branch lopsided, but optimize left-hand
3644 side and fill in `parent' fields for right-hand side. */
3645 np = *head;
3646 np->parent = parent;
3647 balance_case_nodes (&np->left, np);
3648 for (; np->right; np = np->right)
3649 np->right->parent = np;
3650 return;
3653 /* If there are just three nodes, split at the middle one. */
3654 else if (i == 3)
3655 npp = &(*npp)->right;
3656 else
3658 /* Find the place in the list that bisects the list's total cost,
3659 where ranges count as 2.
3660 Here I gets half the total cost. */
3661 i = (i + ranges + 1) / 2;
3662 while (1)
3664 /* Skip nodes while their cost does not reach that amount. */
3665 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
3666 i--;
3667 i--;
3668 if (i <= 0)
3669 break;
3670 npp = &(*npp)->right;
3673 *head = np = *npp;
3674 *npp = 0;
3675 np->parent = parent;
3676 np->left = left;
3678 /* Optimize each of the two split parts. */
3679 balance_case_nodes (&np->left, np);
3680 balance_case_nodes (&np->right, np);
3682 else
3684 /* Else leave this branch as one level,
3685 but fill in `parent' fields. */
3686 np = *head;
3687 np->parent = parent;
3688 for (; np->right; np = np->right)
3689 np->right->parent = np;
3694 /* Search the parent sections of the case node tree
3695 to see if a test for the lower bound of NODE would be redundant.
3696 INDEX_TYPE is the type of the index expression.
3698 The instructions to generate the case decision tree are
3699 output in the same order as nodes are processed so it is
3700 known that if a parent node checks the range of the current
3701 node minus one that the current node is bounded at its lower
3702 span. Thus the test would be redundant. */
3704 static int
3705 node_has_low_bound (case_node_ptr node, tree index_type)
3707 tree low_minus_one;
3708 case_node_ptr pnode;
3710 /* If the lower bound of this node is the lowest value in the index type,
3711 we need not test it. */
3713 if (tree_int_cst_equal (node->low, TYPE_MIN_VALUE (index_type)))
3714 return 1;
3716 /* If this node has a left branch, the value at the left must be less
3717 than that at this node, so it cannot be bounded at the bottom and
3718 we need not bother testing any further. */
3720 if (node->left)
3721 return 0;
3723 low_minus_one = fold (build (MINUS_EXPR, TREE_TYPE (node->low),
3724 node->low, integer_one_node));
3726 /* If the subtraction above overflowed, we can't verify anything.
3727 Otherwise, look for a parent that tests our value - 1. */
3729 if (! tree_int_cst_lt (low_minus_one, node->low))
3730 return 0;
3732 for (pnode = node->parent; pnode; pnode = pnode->parent)
3733 if (tree_int_cst_equal (low_minus_one, pnode->high))
3734 return 1;
3736 return 0;
3739 /* Search the parent sections of the case node tree
3740 to see if a test for the upper bound of NODE would be redundant.
3741 INDEX_TYPE is the type of the index expression.
3743 The instructions to generate the case decision tree are
3744 output in the same order as nodes are processed so it is
3745 known that if a parent node checks the range of the current
3746 node plus one that the current node is bounded at its upper
3747 span. Thus the test would be redundant. */
3749 static int
3750 node_has_high_bound (case_node_ptr node, tree index_type)
3752 tree high_plus_one;
3753 case_node_ptr pnode;
3755 /* If there is no upper bound, obviously no test is needed. */
3757 if (TYPE_MAX_VALUE (index_type) == NULL)
3758 return 1;
3760 /* If the upper bound of this node is the highest value in the type
3761 of the index expression, we need not test against it. */
3763 if (tree_int_cst_equal (node->high, TYPE_MAX_VALUE (index_type)))
3764 return 1;
3766 /* If this node has a right branch, the value at the right must be greater
3767 than that at this node, so it cannot be bounded at the top and
3768 we need not bother testing any further. */
3770 if (node->right)
3771 return 0;
3773 high_plus_one = fold (build (PLUS_EXPR, TREE_TYPE (node->high),
3774 node->high, integer_one_node));
3776 /* If the addition above overflowed, we can't verify anything.
3777 Otherwise, look for a parent that tests our value + 1. */
3779 if (! tree_int_cst_lt (node->high, high_plus_one))
3780 return 0;
3782 for (pnode = node->parent; pnode; pnode = pnode->parent)
3783 if (tree_int_cst_equal (high_plus_one, pnode->low))
3784 return 1;
3786 return 0;
3789 /* Search the parent sections of the
3790 case node tree to see if both tests for the upper and lower
3791 bounds of NODE would be redundant. */
3793 static int
3794 node_is_bounded (case_node_ptr node, tree index_type)
3796 return (node_has_low_bound (node, index_type)
3797 && node_has_high_bound (node, index_type));
3800 /* Emit step-by-step code to select a case for the value of INDEX.
3801 The thus generated decision tree follows the form of the
3802 case-node binary tree NODE, whose nodes represent test conditions.
3803 INDEX_TYPE is the type of the index of the switch.
3805 Care is taken to prune redundant tests from the decision tree
3806 by detecting any boundary conditions already checked by
3807 emitted rtx. (See node_has_high_bound, node_has_low_bound
3808 and node_is_bounded, above.)
3810 Where the test conditions can be shown to be redundant we emit
3811 an unconditional jump to the target code. As a further
3812 optimization, the subordinates of a tree node are examined to
3813 check for bounded nodes. In this case conditional and/or
3814 unconditional jumps as a result of the boundary check for the
3815 current node are arranged to target the subordinates associated
3816 code for out of bound conditions on the current node.
3818 We can assume that when control reaches the code generated here,
3819 the index value has already been compared with the parents
3820 of this node, and determined to be on the same side of each parent
3821 as this node is. Thus, if this node tests for the value 51,
3822 and a parent tested for 52, we don't need to consider
3823 the possibility of a value greater than 51. If another parent
3824 tests for the value 50, then this node need not test anything. */
3826 static void
3827 emit_case_nodes (rtx index, case_node_ptr node, rtx default_label,
3828 tree index_type)
3830 /* If INDEX has an unsigned type, we must make unsigned branches. */
3831 int unsignedp = TYPE_UNSIGNED (index_type);
3832 enum machine_mode mode = GET_MODE (index);
3833 enum machine_mode imode = TYPE_MODE (index_type);
3835 /* See if our parents have already tested everything for us.
3836 If they have, emit an unconditional jump for this node. */
3837 if (node_is_bounded (node, index_type))
3838 emit_jump (label_rtx (node->code_label));
3840 else if (tree_int_cst_equal (node->low, node->high))
3842 /* Node is single valued. First see if the index expression matches
3843 this node and then check our children, if any. */
3845 do_jump_if_equal (index,
3846 convert_modes (mode, imode,
3847 expand_expr (node->low, NULL_RTX,
3848 VOIDmode, 0),
3849 unsignedp),
3850 label_rtx (node->code_label), unsignedp);
3852 if (node->right != 0 && node->left != 0)
3854 /* This node has children on both sides.
3855 Dispatch to one side or the other
3856 by comparing the index value with this node's value.
3857 If one subtree is bounded, check that one first,
3858 so we can avoid real branches in the tree. */
3860 if (node_is_bounded (node->right, index_type))
3862 emit_cmp_and_jump_insns (index,
3863 convert_modes
3864 (mode, imode,
3865 expand_expr (node->high, NULL_RTX,
3866 VOIDmode, 0),
3867 unsignedp),
3868 GT, NULL_RTX, mode, unsignedp,
3869 label_rtx (node->right->code_label));
3870 emit_case_nodes (index, node->left, default_label, index_type);
3873 else if (node_is_bounded (node->left, index_type))
3875 emit_cmp_and_jump_insns (index,
3876 convert_modes
3877 (mode, imode,
3878 expand_expr (node->high, NULL_RTX,
3879 VOIDmode, 0),
3880 unsignedp),
3881 LT, NULL_RTX, mode, unsignedp,
3882 label_rtx (node->left->code_label));
3883 emit_case_nodes (index, node->right, default_label, index_type);
3886 /* If both children are single-valued cases with no
3887 children, finish up all the work. This way, we can save
3888 one ordered comparison. */
3889 else if (tree_int_cst_equal (node->right->low, node->right->high)
3890 && node->right->left == 0
3891 && node->right->right == 0
3892 && tree_int_cst_equal (node->left->low, node->left->high)
3893 && node->left->left == 0
3894 && node->left->right == 0)
3896 /* Neither node is bounded. First distinguish the two sides;
3897 then emit the code for one side at a time. */
3899 /* See if the value matches what the right hand side
3900 wants. */
3901 do_jump_if_equal (index,
3902 convert_modes (mode, imode,
3903 expand_expr (node->right->low,
3904 NULL_RTX,
3905 VOIDmode, 0),
3906 unsignedp),
3907 label_rtx (node->right->code_label),
3908 unsignedp);
3910 /* See if the value matches what the left hand side
3911 wants. */
3912 do_jump_if_equal (index,
3913 convert_modes (mode, imode,
3914 expand_expr (node->left->low,
3915 NULL_RTX,
3916 VOIDmode, 0),
3917 unsignedp),
3918 label_rtx (node->left->code_label),
3919 unsignedp);
3922 else
3924 /* Neither node is bounded. First distinguish the two sides;
3925 then emit the code for one side at a time. */
3927 tree test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
3929 /* See if the value is on the right. */
3930 emit_cmp_and_jump_insns (index,
3931 convert_modes
3932 (mode, imode,
3933 expand_expr (node->high, NULL_RTX,
3934 VOIDmode, 0),
3935 unsignedp),
3936 GT, NULL_RTX, mode, unsignedp,
3937 label_rtx (test_label));
3939 /* Value must be on the left.
3940 Handle the left-hand subtree. */
3941 emit_case_nodes (index, node->left, default_label, index_type);
3942 /* If left-hand subtree does nothing,
3943 go to default. */
3944 emit_jump (default_label);
3946 /* Code branches here for the right-hand subtree. */
3947 expand_label (test_label);
3948 emit_case_nodes (index, node->right, default_label, index_type);
3952 else if (node->right != 0 && node->left == 0)
3954 /* Here we have a right child but no left so we issue conditional
3955 branch to default and process the right child.
3957 Omit the conditional branch to default if we it avoid only one
3958 right child; it costs too much space to save so little time. */
3960 if (node->right->right || node->right->left
3961 || !tree_int_cst_equal (node->right->low, node->right->high))
3963 if (!node_has_low_bound (node, index_type))
3965 emit_cmp_and_jump_insns (index,
3966 convert_modes
3967 (mode, imode,
3968 expand_expr (node->high, NULL_RTX,
3969 VOIDmode, 0),
3970 unsignedp),
3971 LT, NULL_RTX, mode, unsignedp,
3972 default_label);
3975 emit_case_nodes (index, node->right, default_label, index_type);
3977 else
3978 /* We cannot process node->right normally
3979 since we haven't ruled out the numbers less than
3980 this node's value. So handle node->right explicitly. */
3981 do_jump_if_equal (index,
3982 convert_modes
3983 (mode, imode,
3984 expand_expr (node->right->low, NULL_RTX,
3985 VOIDmode, 0),
3986 unsignedp),
3987 label_rtx (node->right->code_label), unsignedp);
3990 else if (node->right == 0 && node->left != 0)
3992 /* Just one subtree, on the left. */
3993 if (node->left->left || node->left->right
3994 || !tree_int_cst_equal (node->left->low, node->left->high))
3996 if (!node_has_high_bound (node, index_type))
3998 emit_cmp_and_jump_insns (index,
3999 convert_modes
4000 (mode, imode,
4001 expand_expr (node->high, NULL_RTX,
4002 VOIDmode, 0),
4003 unsignedp),
4004 GT, NULL_RTX, mode, unsignedp,
4005 default_label);
4008 emit_case_nodes (index, node->left, default_label, index_type);
4010 else
4011 /* We cannot process node->left normally
4012 since we haven't ruled out the numbers less than
4013 this node's value. So handle node->left explicitly. */
4014 do_jump_if_equal (index,
4015 convert_modes
4016 (mode, imode,
4017 expand_expr (node->left->low, NULL_RTX,
4018 VOIDmode, 0),
4019 unsignedp),
4020 label_rtx (node->left->code_label), unsignedp);
4023 else
4025 /* Node is a range. These cases are very similar to those for a single
4026 value, except that we do not start by testing whether this node
4027 is the one to branch to. */
4029 if (node->right != 0 && node->left != 0)
4031 /* Node has subtrees on both sides.
4032 If the right-hand subtree is bounded,
4033 test for it first, since we can go straight there.
4034 Otherwise, we need to make a branch in the control structure,
4035 then handle the two subtrees. */
4036 tree test_label = 0;
4038 if (node_is_bounded (node->right, index_type))
4039 /* Right hand node is fully bounded so we can eliminate any
4040 testing and branch directly to the target code. */
4041 emit_cmp_and_jump_insns (index,
4042 convert_modes
4043 (mode, imode,
4044 expand_expr (node->high, NULL_RTX,
4045 VOIDmode, 0),
4046 unsignedp),
4047 GT, NULL_RTX, mode, unsignedp,
4048 label_rtx (node->right->code_label));
4049 else
4051 /* Right hand node requires testing.
4052 Branch to a label where we will handle it later. */
4054 test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
4055 emit_cmp_and_jump_insns (index,
4056 convert_modes
4057 (mode, imode,
4058 expand_expr (node->high, NULL_RTX,
4059 VOIDmode, 0),
4060 unsignedp),
4061 GT, NULL_RTX, mode, unsignedp,
4062 label_rtx (test_label));
4065 /* Value belongs to this node or to the left-hand subtree. */
4067 emit_cmp_and_jump_insns (index,
4068 convert_modes
4069 (mode, imode,
4070 expand_expr (node->low, NULL_RTX,
4071 VOIDmode, 0),
4072 unsignedp),
4073 GE, NULL_RTX, mode, unsignedp,
4074 label_rtx (node->code_label));
4076 /* Handle the left-hand subtree. */
4077 emit_case_nodes (index, node->left, default_label, index_type);
4079 /* If right node had to be handled later, do that now. */
4081 if (test_label)
4083 /* If the left-hand subtree fell through,
4084 don't let it fall into the right-hand subtree. */
4085 emit_jump (default_label);
4087 expand_label (test_label);
4088 emit_case_nodes (index, node->right, default_label, index_type);
4092 else if (node->right != 0 && node->left == 0)
4094 /* Deal with values to the left of this node,
4095 if they are possible. */
4096 if (!node_has_low_bound (node, index_type))
4098 emit_cmp_and_jump_insns (index,
4099 convert_modes
4100 (mode, imode,
4101 expand_expr (node->low, NULL_RTX,
4102 VOIDmode, 0),
4103 unsignedp),
4104 LT, NULL_RTX, mode, unsignedp,
4105 default_label);
4108 /* Value belongs to this node or to the right-hand subtree. */
4110 emit_cmp_and_jump_insns (index,
4111 convert_modes
4112 (mode, imode,
4113 expand_expr (node->high, NULL_RTX,
4114 VOIDmode, 0),
4115 unsignedp),
4116 LE, NULL_RTX, mode, unsignedp,
4117 label_rtx (node->code_label));
4119 emit_case_nodes (index, node->right, default_label, index_type);
4122 else if (node->right == 0 && node->left != 0)
4124 /* Deal with values to the right of this node,
4125 if they are possible. */
4126 if (!node_has_high_bound (node, index_type))
4128 emit_cmp_and_jump_insns (index,
4129 convert_modes
4130 (mode, imode,
4131 expand_expr (node->high, NULL_RTX,
4132 VOIDmode, 0),
4133 unsignedp),
4134 GT, NULL_RTX, mode, unsignedp,
4135 default_label);
4138 /* Value belongs to this node or to the left-hand subtree. */
4140 emit_cmp_and_jump_insns (index,
4141 convert_modes
4142 (mode, imode,
4143 expand_expr (node->low, NULL_RTX,
4144 VOIDmode, 0),
4145 unsignedp),
4146 GE, NULL_RTX, mode, unsignedp,
4147 label_rtx (node->code_label));
4149 emit_case_nodes (index, node->left, default_label, index_type);
4152 else
4154 /* Node has no children so we check low and high bounds to remove
4155 redundant tests. Only one of the bounds can exist,
4156 since otherwise this node is bounded--a case tested already. */
4157 int high_bound = node_has_high_bound (node, index_type);
4158 int low_bound = node_has_low_bound (node, index_type);
4160 if (!high_bound && low_bound)
4162 emit_cmp_and_jump_insns (index,
4163 convert_modes
4164 (mode, imode,
4165 expand_expr (node->high, NULL_RTX,
4166 VOIDmode, 0),
4167 unsignedp),
4168 GT, NULL_RTX, mode, unsignedp,
4169 default_label);
4172 else if (!low_bound && high_bound)
4174 emit_cmp_and_jump_insns (index,
4175 convert_modes
4176 (mode, imode,
4177 expand_expr (node->low, NULL_RTX,
4178 VOIDmode, 0),
4179 unsignedp),
4180 LT, NULL_RTX, mode, unsignedp,
4181 default_label);
4183 else if (!low_bound && !high_bound)
4185 /* Widen LOW and HIGH to the same width as INDEX. */
4186 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
4187 tree low = build1 (CONVERT_EXPR, type, node->low);
4188 tree high = build1 (CONVERT_EXPR, type, node->high);
4189 rtx low_rtx, new_index, new_bound;
4191 /* Instead of doing two branches, emit one unsigned branch for
4192 (index-low) > (high-low). */
4193 low_rtx = expand_expr (low, NULL_RTX, mode, 0);
4194 new_index = expand_simple_binop (mode, MINUS, index, low_rtx,
4195 NULL_RTX, unsignedp,
4196 OPTAB_WIDEN);
4197 new_bound = expand_expr (fold (build (MINUS_EXPR, type,
4198 high, low)),
4199 NULL_RTX, mode, 0);
4201 emit_cmp_and_jump_insns (new_index, new_bound, GT, NULL_RTX,
4202 mode, 1, default_label);
4205 emit_jump (label_rtx (node->code_label));
4210 #include "gt-stmt.h"