* call.c (z_candidate::template_decl): Rename from template.
[official-gcc.git] / gcc / stmt.c
bloba09d83d57511871b21ae645adb0494054f973e59
1 /* Expands front end tree to back end RTL for GCC
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 /* This file handles the generation of rtl code from tree structure
23 above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
24 It also creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
27 The functions whose names start with `expand_' are called by the
28 parser to generate RTL instructions for various kinds of constructs.
30 Some control and binding constructs require calling several such
31 functions at different times. For example, a simple if-then
32 is expanded by calling `expand_start_cond' (with the condition-expression
33 as argument) before parsing the then-clause and calling `expand_end_cond'
34 after parsing the then-clause. */
36 #include "config.h"
37 #include "system.h"
38 #include "coretypes.h"
39 #include "tm.h"
41 #include "rtl.h"
42 #include "tree.h"
43 #include "tm_p.h"
44 #include "flags.h"
45 #include "except.h"
46 #include "function.h"
47 #include "insn-config.h"
48 #include "expr.h"
49 #include "libfuncs.h"
50 #include "hard-reg-set.h"
51 #include "loop.h"
52 #include "recog.h"
53 #include "machmode.h"
54 #include "toplev.h"
55 #include "output.h"
56 #include "ggc.h"
57 #include "langhooks.h"
58 #include "predict.h"
59 #include "optabs.h"
60 #include "target.h"
61 #include "regs.h"
63 /* Functions and data structures for expanding case statements. */
65 /* Case label structure, used to hold info on labels within case
66 statements. We handle "range" labels; for a single-value label
67 as in C, the high and low limits are the same.
69 We start with a vector of case nodes sorted in ascending order, and
70 the default label as the last element in the vector. Before expanding
71 to RTL, we transform this vector into a list linked via the RIGHT
72 fields in the case_node struct. Nodes with higher case values are
73 later in the list.
75 Switch statements can be output in three forms. A branch table is
76 used if there are more than a few labels and the labels are dense
77 within the range between the smallest and largest case value. If a
78 branch table is used, no further manipulations are done with the case
79 node chain.
81 The alternative to the use of a branch table is to generate a series
82 of compare and jump insns. When that is done, we use the LEFT, RIGHT,
83 and PARENT fields to hold a binary tree. Initially the tree is
84 totally unbalanced, with everything on the right. We balance the tree
85 with nodes on the left having lower case values than the parent
86 and nodes on the right having higher values. We then output the tree
87 in order.
89 For very small, suitable switch statements, we can generate a series
90 of simple bit test and branches instead. */
92 struct case_node GTY(())
94 struct case_node *left; /* Left son in binary tree */
95 struct case_node *right; /* Right son in binary tree; also node chain */
96 struct case_node *parent; /* Parent of node in binary tree */
97 tree low; /* Lowest index value for this label */
98 tree high; /* Highest index value for this label */
99 tree code_label; /* Label to jump to when node matches */
102 typedef struct case_node case_node;
103 typedef struct case_node *case_node_ptr;
105 /* These are used by estimate_case_costs and balance_case_nodes. */
107 /* This must be a signed type, and non-ANSI compilers lack signed char. */
108 static short cost_table_[129];
109 static int use_cost_table;
110 static int cost_table_initialized;
112 /* Special care is needed because we allow -1, but TREE_INT_CST_LOW
113 is unsigned. */
114 #define COST_TABLE(I) cost_table_[(unsigned HOST_WIDE_INT) ((I) + 1)]
116 /* Stack of control and binding constructs we are currently inside.
118 These constructs begin when you call `expand_start_WHATEVER'
119 and end when you call `expand_end_WHATEVER'. This stack records
120 info about how the construct began that tells the end-function
121 what to do. It also may provide information about the construct
122 to alter the behavior of other constructs within the body.
123 For example, they may affect the behavior of C `break' and `continue'.
125 Each construct gets one `struct nesting' object.
126 All of these objects are chained through the `all' field.
127 `nesting_stack' points to the first object (innermost construct).
128 The position of an entry on `nesting_stack' is in its `depth' field.
130 Each type of construct has its own individual stack.
131 For example, loops have `cond_stack'. Each object points to the
132 next object of the same type through the `next' field.
134 Some constructs are visible to `break' exit-statements and others
135 are not. Which constructs are visible depends on the language.
136 Therefore, the data structure allows each construct to be visible
137 or not, according to the args given when the construct is started.
138 The construct is visible if the `exit_label' field is non-null.
139 In that case, the value should be a CODE_LABEL rtx. */
141 struct nesting GTY(())
143 struct nesting *all;
144 struct nesting *next;
145 int depth;
146 rtx exit_label;
147 enum nesting_desc {
148 COND_NESTING,
149 BLOCK_NESTING,
150 CASE_NESTING
151 } desc;
152 union nesting_u
154 /* For conds (if-then and if-then-else statements). */
155 struct nesting_cond
157 /* Label for the end of the if construct.
158 There is none if EXITFLAG was not set
159 and no `else' has been seen yet. */
160 rtx endif_label;
161 /* Label for the end of this alternative.
162 This may be the end of the if or the next else/elseif. */
163 rtx next_label;
164 } GTY ((tag ("COND_NESTING"))) cond;
165 /* For switch (C) or case (Pascal) statements. */
166 struct nesting_case
168 /* The insn after which the case dispatch should finally
169 be emitted. Zero for a dummy. */
170 rtx start;
171 /* A list of case labels; it is first built as an AVL tree.
172 During expand_end_case, this is converted to a list, and may be
173 rearranged into a nearly balanced binary tree. */
174 struct case_node *case_list;
175 /* Label to jump to if no case matches. */
176 tree default_label;
177 /* The expression to be dispatched on. */
178 tree index_expr;
179 } GTY ((tag ("CASE_NESTING"))) case_stmt;
180 } GTY ((desc ("%1.desc"))) data;
183 /* Allocate and return a new `struct nesting'. */
185 #define ALLOC_NESTING() ggc_alloc (sizeof (struct nesting))
187 /* Pop the nesting stack element by element until we pop off
188 the element which is at the top of STACK.
189 Update all the other stacks, popping off elements from them
190 as we pop them from nesting_stack. */
192 #define POPSTACK(STACK) \
193 do { struct nesting *target = STACK; \
194 struct nesting *this; \
195 do { this = nesting_stack; \
196 if (cond_stack == this) \
197 cond_stack = cond_stack->next; \
198 if (case_stack == this) \
199 case_stack = case_stack->next; \
200 nesting_depth = nesting_stack->depth - 1; \
201 nesting_stack = this->all; } \
202 while (this != target); } while (0)
205 struct stmt_status GTY(())
207 /* If any new stacks are added here, add them to POPSTACKS too. */
209 /* Chain of all pending conditional statements. */
210 struct nesting * x_cond_stack;
212 /* Chain of all pending case or switch statements. */
213 struct nesting * x_case_stack;
215 /* Separate chain including all of the above,
216 chained through the `all' field. */
217 struct nesting * x_nesting_stack;
219 /* Number of entries on nesting_stack now. */
220 int x_nesting_depth;
222 /* Location of last line-number note, whether we actually
223 emitted it or not. */
224 location_t x_emit_locus;
227 #define cond_stack (cfun->stmt->x_cond_stack)
228 #define case_stack (cfun->stmt->x_case_stack)
229 #define nesting_stack (cfun->stmt->x_nesting_stack)
230 #define nesting_depth (cfun->stmt->x_nesting_depth)
231 #define emit_locus (cfun->stmt->x_emit_locus)
233 static int n_occurrences (int, const char *);
234 static bool decl_conflicts_with_clobbers_p (tree, const HARD_REG_SET);
235 static void expand_nl_goto_receiver (void);
236 static bool check_operand_nalternatives (tree, tree);
237 static bool check_unique_operand_names (tree, tree);
238 static char *resolve_operand_name_1 (char *, tree, tree);
239 static void expand_null_return_1 (void);
240 static rtx shift_return_value (rtx);
241 static void expand_value_return (rtx);
242 static void do_jump_if_equal (rtx, rtx, rtx, int);
243 static int estimate_case_costs (case_node_ptr);
244 static bool same_case_target_p (rtx, rtx);
245 static bool lshift_cheap_p (void);
246 static int case_bit_test_cmp (const void *, const void *);
247 static void emit_case_bit_tests (tree, tree, tree, tree, case_node_ptr, rtx);
248 static void balance_case_nodes (case_node_ptr *, case_node_ptr);
249 static int node_has_low_bound (case_node_ptr, tree);
250 static int node_has_high_bound (case_node_ptr, tree);
251 static int node_is_bounded (case_node_ptr, tree);
252 static void emit_case_nodes (rtx, case_node_ptr, rtx, tree);
254 void
255 init_stmt_for_function (void)
257 cfun->stmt = ggc_alloc_cleared (sizeof (struct stmt_status));
260 /* Record the current file and line. Called from emit_line_note. */
262 void
263 set_file_and_line_for_stmt (location_t location)
265 /* If we're outputting an inline function, and we add a line note,
266 there may be no CFUN->STMT information. So, there's no need to
267 update it. */
268 if (cfun->stmt)
269 emit_locus = location;
272 /* Emit a no-op instruction. */
274 void
275 emit_nop (void)
277 rtx last_insn;
279 last_insn = get_last_insn ();
280 if (!optimize
281 && (LABEL_P (last_insn)
282 || (NOTE_P (last_insn)
283 && prev_real_insn (last_insn) == 0)))
284 emit_insn (gen_nop ());
287 /* Return the rtx-label that corresponds to a LABEL_DECL,
288 creating it if necessary. */
291 label_rtx (tree label)
293 if (TREE_CODE (label) != LABEL_DECL)
294 abort ();
296 if (!DECL_RTL_SET_P (label))
298 rtx r = gen_label_rtx ();
299 SET_DECL_RTL (label, r);
300 if (FORCED_LABEL (label) || DECL_NONLOCAL (label))
301 LABEL_PRESERVE_P (r) = 1;
304 return DECL_RTL (label);
307 /* As above, but also put it on the forced-reference list of the
308 function that contains it. */
310 force_label_rtx (tree label)
312 rtx ref = label_rtx (label);
313 tree function = decl_function_context (label);
314 struct function *p;
316 if (!function)
317 abort ();
319 if (function != current_function_decl)
320 p = find_function_data (function);
321 else
322 p = cfun;
324 p->expr->x_forced_labels = gen_rtx_EXPR_LIST (VOIDmode, ref,
325 p->expr->x_forced_labels);
326 return ref;
329 /* Add an unconditional jump to LABEL as the next sequential instruction. */
331 void
332 emit_jump (rtx label)
334 do_pending_stack_adjust ();
335 emit_jump_insn (gen_jump (label));
336 emit_barrier ();
339 /* Emit code to jump to the address
340 specified by the pointer expression EXP. */
342 void
343 expand_computed_goto (tree exp)
345 rtx x = expand_expr (exp, NULL_RTX, VOIDmode, 0);
347 x = convert_memory_address (Pmode, x);
349 do_pending_stack_adjust ();
350 emit_indirect_jump (x);
353 /* Handle goto statements and the labels that they can go to. */
355 /* Specify the location in the RTL code of a label LABEL,
356 which is a LABEL_DECL tree node.
358 This is used for the kind of label that the user can jump to with a
359 goto statement, and for alternatives of a switch or case statement.
360 RTL labels generated for loops and conditionals don't go through here;
361 they are generated directly at the RTL level, by other functions below.
363 Note that this has nothing to do with defining label *names*.
364 Languages vary in how they do that and what that even means. */
366 void
367 expand_label (tree label)
369 rtx label_r = label_rtx (label);
371 do_pending_stack_adjust ();
372 emit_label (label_r);
373 if (DECL_NAME (label))
374 LABEL_NAME (DECL_RTL (label)) = IDENTIFIER_POINTER (DECL_NAME (label));
376 if (DECL_NONLOCAL (label))
378 expand_nl_goto_receiver ();
379 nonlocal_goto_handler_labels
380 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
381 nonlocal_goto_handler_labels);
384 if (FORCED_LABEL (label))
385 forced_labels = gen_rtx_EXPR_LIST (VOIDmode, label_r, forced_labels);
387 if (DECL_NONLOCAL (label) || FORCED_LABEL (label))
388 maybe_set_first_label_num (label_r);
391 /* Generate RTL code for a `goto' statement with target label LABEL.
392 LABEL should be a LABEL_DECL tree node that was or will later be
393 defined with `expand_label'. */
395 void
396 expand_goto (tree label)
398 #ifdef ENABLE_CHECKING
399 /* Check for a nonlocal goto to a containing function. Should have
400 gotten translated to __builtin_nonlocal_goto. */
401 tree context = decl_function_context (label);
402 if (context != 0 && context != current_function_decl)
403 abort ();
404 #endif
406 emit_jump (label_rtx (label));
409 /* Return the number of times character C occurs in string S. */
410 static int
411 n_occurrences (int c, const char *s)
413 int n = 0;
414 while (*s)
415 n += (*s++ == c);
416 return n;
419 /* Generate RTL for an asm statement (explicit assembler code).
420 STRING is a STRING_CST node containing the assembler code text,
421 or an ADDR_EXPR containing a STRING_CST. VOL nonzero means the
422 insn is volatile; don't optimize it. */
424 void
425 expand_asm (tree string, int vol)
427 rtx body;
429 if (TREE_CODE (string) == ADDR_EXPR)
430 string = TREE_OPERAND (string, 0);
432 body = gen_rtx_ASM_INPUT (VOIDmode, TREE_STRING_POINTER (string));
434 MEM_VOLATILE_P (body) = vol;
436 emit_insn (body);
439 /* Parse the output constraint pointed to by *CONSTRAINT_P. It is the
440 OPERAND_NUMth output operand, indexed from zero. There are NINPUTS
441 inputs and NOUTPUTS outputs to this extended-asm. Upon return,
442 *ALLOWS_MEM will be TRUE iff the constraint allows the use of a
443 memory operand. Similarly, *ALLOWS_REG will be TRUE iff the
444 constraint allows the use of a register operand. And, *IS_INOUT
445 will be true if the operand is read-write, i.e., if it is used as
446 an input as well as an output. If *CONSTRAINT_P is not in
447 canonical form, it will be made canonical. (Note that `+' will be
448 replaced with `=' as part of this process.)
450 Returns TRUE if all went well; FALSE if an error occurred. */
452 bool
453 parse_output_constraint (const char **constraint_p, int operand_num,
454 int ninputs, int noutputs, bool *allows_mem,
455 bool *allows_reg, bool *is_inout)
457 const char *constraint = *constraint_p;
458 const char *p;
460 /* Assume the constraint doesn't allow the use of either a register
461 or memory. */
462 *allows_mem = false;
463 *allows_reg = false;
465 /* Allow the `=' or `+' to not be at the beginning of the string,
466 since it wasn't explicitly documented that way, and there is a
467 large body of code that puts it last. Swap the character to
468 the front, so as not to uglify any place else. */
469 p = strchr (constraint, '=');
470 if (!p)
471 p = strchr (constraint, '+');
473 /* If the string doesn't contain an `=', issue an error
474 message. */
475 if (!p)
477 error ("output operand constraint lacks `='");
478 return false;
481 /* If the constraint begins with `+', then the operand is both read
482 from and written to. */
483 *is_inout = (*p == '+');
485 /* Canonicalize the output constraint so that it begins with `='. */
486 if (p != constraint || is_inout)
488 char *buf;
489 size_t c_len = strlen (constraint);
491 if (p != constraint)
492 warning ("output constraint `%c' for operand %d is not at the beginning",
493 *p, operand_num);
495 /* Make a copy of the constraint. */
496 buf = alloca (c_len + 1);
497 strcpy (buf, constraint);
498 /* Swap the first character and the `=' or `+'. */
499 buf[p - constraint] = buf[0];
500 /* Make sure the first character is an `='. (Until we do this,
501 it might be a `+'.) */
502 buf[0] = '=';
503 /* Replace the constraint with the canonicalized string. */
504 *constraint_p = ggc_alloc_string (buf, c_len);
505 constraint = *constraint_p;
508 /* Loop through the constraint string. */
509 for (p = constraint + 1; *p; p += CONSTRAINT_LEN (*p, p))
510 switch (*p)
512 case '+':
513 case '=':
514 error ("operand constraint contains incorrectly positioned '+' or '='");
515 return false;
517 case '%':
518 if (operand_num + 1 == ninputs + noutputs)
520 error ("`%%' constraint used with last operand");
521 return false;
523 break;
525 case 'V': case 'm': case 'o':
526 *allows_mem = true;
527 break;
529 case '?': case '!': case '*': case '&': case '#':
530 case 'E': case 'F': case 'G': case 'H':
531 case 's': case 'i': case 'n':
532 case 'I': case 'J': case 'K': case 'L': case 'M':
533 case 'N': case 'O': case 'P': case ',':
534 break;
536 case '0': case '1': case '2': case '3': case '4':
537 case '5': case '6': case '7': case '8': case '9':
538 case '[':
539 error ("matching constraint not valid in output operand");
540 return false;
542 case '<': case '>':
543 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
544 excepting those that expand_call created. So match memory
545 and hope. */
546 *allows_mem = true;
547 break;
549 case 'g': case 'X':
550 *allows_reg = true;
551 *allows_mem = true;
552 break;
554 case 'p': case 'r':
555 *allows_reg = true;
556 break;
558 default:
559 if (!ISALPHA (*p))
560 break;
561 if (REG_CLASS_FROM_CONSTRAINT (*p, p) != NO_REGS)
562 *allows_reg = true;
563 #ifdef EXTRA_CONSTRAINT_STR
564 else if (EXTRA_ADDRESS_CONSTRAINT (*p, p))
565 *allows_reg = true;
566 else if (EXTRA_MEMORY_CONSTRAINT (*p, p))
567 *allows_mem = true;
568 else
570 /* Otherwise we can't assume anything about the nature of
571 the constraint except that it isn't purely registers.
572 Treat it like "g" and hope for the best. */
573 *allows_reg = true;
574 *allows_mem = true;
576 #endif
577 break;
580 return true;
583 /* Similar, but for input constraints. */
585 bool
586 parse_input_constraint (const char **constraint_p, int input_num,
587 int ninputs, int noutputs, int ninout,
588 const char * const * constraints,
589 bool *allows_mem, bool *allows_reg)
591 const char *constraint = *constraint_p;
592 const char *orig_constraint = constraint;
593 size_t c_len = strlen (constraint);
594 size_t j;
595 bool saw_match = false;
597 /* Assume the constraint doesn't allow the use of either
598 a register or memory. */
599 *allows_mem = false;
600 *allows_reg = false;
602 /* Make sure constraint has neither `=', `+', nor '&'. */
604 for (j = 0; j < c_len; j += CONSTRAINT_LEN (constraint[j], constraint+j))
605 switch (constraint[j])
607 case '+': case '=': case '&':
608 if (constraint == orig_constraint)
610 error ("input operand constraint contains `%c'", constraint[j]);
611 return false;
613 break;
615 case '%':
616 if (constraint == orig_constraint
617 && input_num + 1 == ninputs - ninout)
619 error ("`%%' constraint used with last operand");
620 return false;
622 break;
624 case 'V': case 'm': case 'o':
625 *allows_mem = true;
626 break;
628 case '<': case '>':
629 case '?': case '!': case '*': case '#':
630 case 'E': case 'F': case 'G': case 'H':
631 case 's': case 'i': case 'n':
632 case 'I': case 'J': case 'K': case 'L': case 'M':
633 case 'N': case 'O': case 'P': case ',':
634 break;
636 /* Whether or not a numeric constraint allows a register is
637 decided by the matching constraint, and so there is no need
638 to do anything special with them. We must handle them in
639 the default case, so that we don't unnecessarily force
640 operands to memory. */
641 case '0': case '1': case '2': case '3': case '4':
642 case '5': case '6': case '7': case '8': case '9':
644 char *end;
645 unsigned long match;
647 saw_match = true;
649 match = strtoul (constraint + j, &end, 10);
650 if (match >= (unsigned long) noutputs)
652 error ("matching constraint references invalid operand number");
653 return false;
656 /* Try and find the real constraint for this dup. Only do this
657 if the matching constraint is the only alternative. */
658 if (*end == '\0'
659 && (j == 0 || (j == 1 && constraint[0] == '%')))
661 constraint = constraints[match];
662 *constraint_p = constraint;
663 c_len = strlen (constraint);
664 j = 0;
665 /* ??? At the end of the loop, we will skip the first part of
666 the matched constraint. This assumes not only that the
667 other constraint is an output constraint, but also that
668 the '=' or '+' come first. */
669 break;
671 else
672 j = end - constraint;
673 /* Anticipate increment at end of loop. */
674 j--;
676 /* Fall through. */
678 case 'p': case 'r':
679 *allows_reg = true;
680 break;
682 case 'g': case 'X':
683 *allows_reg = true;
684 *allows_mem = true;
685 break;
687 default:
688 if (! ISALPHA (constraint[j]))
690 error ("invalid punctuation `%c' in constraint", constraint[j]);
691 return false;
693 if (REG_CLASS_FROM_CONSTRAINT (constraint[j], constraint + j)
694 != NO_REGS)
695 *allows_reg = true;
696 #ifdef EXTRA_CONSTRAINT_STR
697 else if (EXTRA_ADDRESS_CONSTRAINT (constraint[j], constraint + j))
698 *allows_reg = true;
699 else if (EXTRA_MEMORY_CONSTRAINT (constraint[j], constraint + j))
700 *allows_mem = true;
701 else
703 /* Otherwise we can't assume anything about the nature of
704 the constraint except that it isn't purely registers.
705 Treat it like "g" and hope for the best. */
706 *allows_reg = true;
707 *allows_mem = true;
709 #endif
710 break;
713 if (saw_match && !*allows_reg)
714 warning ("matching constraint does not allow a register");
716 return true;
719 /* INPUT is one of the input operands from EXPR, an ASM_EXPR. Returns true
720 if it is an operand which must be passed in memory (i.e. an "m"
721 constraint), false otherwise. */
723 bool
724 asm_op_is_mem_input (tree input, tree expr)
726 const char *constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (input)));
727 tree outputs = ASM_OUTPUTS (expr);
728 int noutputs = list_length (outputs);
729 const char **constraints
730 = (const char **) alloca ((noutputs) * sizeof (const char *));
731 int i = 0;
732 bool allows_mem, allows_reg;
733 tree t;
735 /* Collect output constraints. */
736 for (t = outputs; t ; t = TREE_CHAIN (t), i++)
737 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
739 /* We pass 0 for input_num, ninputs and ninout; they are only used for
740 error checking which will be done at expand time. */
741 parse_input_constraint (&constraint, 0, 0, noutputs, 0, constraints,
742 &allows_mem, &allows_reg);
743 return (!allows_reg && allows_mem);
746 /* Check for overlap between registers marked in CLOBBERED_REGS and
747 anything inappropriate in DECL. Emit error and return TRUE for error,
748 FALSE for ok. */
750 static bool
751 decl_conflicts_with_clobbers_p (tree decl, const HARD_REG_SET clobbered_regs)
753 /* Conflicts between asm-declared register variables and the clobber
754 list are not allowed. */
755 if ((TREE_CODE (decl) == VAR_DECL || TREE_CODE (decl) == PARM_DECL)
756 && DECL_REGISTER (decl)
757 && REG_P (DECL_RTL (decl))
758 && REGNO (DECL_RTL (decl)) < FIRST_PSEUDO_REGISTER)
760 rtx reg = DECL_RTL (decl);
761 unsigned int regno;
763 for (regno = REGNO (reg);
764 regno < (REGNO (reg)
765 + hard_regno_nregs[REGNO (reg)][GET_MODE (reg)]);
766 regno++)
767 if (TEST_HARD_REG_BIT (clobbered_regs, regno))
769 error ("asm-specifier for variable `%s' conflicts with asm clobber list",
770 IDENTIFIER_POINTER (DECL_NAME (decl)));
772 /* Reset registerness to stop multiple errors emitted for a
773 single variable. */
774 DECL_REGISTER (decl) = 0;
775 return true;
778 return false;
781 /* Generate RTL for an asm statement with arguments.
782 STRING is the instruction template.
783 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
784 Each output or input has an expression in the TREE_VALUE and
785 and a tree list in TREE_PURPOSE which in turn contains a constraint
786 name in TREE_VALUE (or NULL_TREE) and a constraint string
787 in TREE_PURPOSE.
788 CLOBBERS is a list of STRING_CST nodes each naming a hard register
789 that is clobbered by this insn.
791 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
792 Some elements of OUTPUTS may be replaced with trees representing temporary
793 values. The caller should copy those temporary values to the originally
794 specified lvalues.
796 VOL nonzero means the insn is volatile; don't optimize it. */
798 void
799 expand_asm_operands (tree string, tree outputs, tree inputs,
800 tree clobbers, int vol, location_t locus)
802 rtvec argvec, constraintvec;
803 rtx body;
804 int ninputs = list_length (inputs);
805 int noutputs = list_length (outputs);
806 int ninout;
807 int nclobbers;
808 HARD_REG_SET clobbered_regs;
809 int clobber_conflict_found = 0;
810 tree tail;
811 tree t;
812 int i;
813 /* Vector of RTX's of evaluated output operands. */
814 rtx *output_rtx = alloca (noutputs * sizeof (rtx));
815 int *inout_opnum = alloca (noutputs * sizeof (int));
816 rtx *real_output_rtx = alloca (noutputs * sizeof (rtx));
817 enum machine_mode *inout_mode
818 = alloca (noutputs * sizeof (enum machine_mode));
819 const char **constraints
820 = alloca ((noutputs + ninputs) * sizeof (const char *));
821 int old_generating_concat_p = generating_concat_p;
823 /* An ASM with no outputs needs to be treated as volatile, for now. */
824 if (noutputs == 0)
825 vol = 1;
827 if (! check_operand_nalternatives (outputs, inputs))
828 return;
830 string = resolve_asm_operand_names (string, outputs, inputs);
832 /* Collect constraints. */
833 i = 0;
834 for (t = outputs; t ; t = TREE_CHAIN (t), i++)
835 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
836 for (t = inputs; t ; t = TREE_CHAIN (t), i++)
837 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
839 /* Sometimes we wish to automatically clobber registers across an asm.
840 Case in point is when the i386 backend moved from cc0 to a hard reg --
841 maintaining source-level compatibility means automatically clobbering
842 the flags register. */
843 clobbers = targetm.md_asm_clobbers (clobbers);
845 /* Count the number of meaningful clobbered registers, ignoring what
846 we would ignore later. */
847 nclobbers = 0;
848 CLEAR_HARD_REG_SET (clobbered_regs);
849 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
851 const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
853 i = decode_reg_name (regname);
854 if (i >= 0 || i == -4)
855 ++nclobbers;
856 else if (i == -2)
857 error ("unknown register name `%s' in `asm'", regname);
859 /* Mark clobbered registers. */
860 if (i >= 0)
862 /* Clobbering the PIC register is an error */
863 if (i == (int) PIC_OFFSET_TABLE_REGNUM)
865 error ("PIC register `%s' clobbered in `asm'", regname);
866 return;
869 SET_HARD_REG_BIT (clobbered_regs, i);
873 /* First pass over inputs and outputs checks validity and sets
874 mark_addressable if needed. */
876 ninout = 0;
877 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
879 tree val = TREE_VALUE (tail);
880 tree type = TREE_TYPE (val);
881 const char *constraint;
882 bool is_inout;
883 bool allows_reg;
884 bool allows_mem;
886 /* If there's an erroneous arg, emit no insn. */
887 if (type == error_mark_node)
888 return;
890 /* Try to parse the output constraint. If that fails, there's
891 no point in going further. */
892 constraint = constraints[i];
893 if (!parse_output_constraint (&constraint, i, ninputs, noutputs,
894 &allows_mem, &allows_reg, &is_inout))
895 return;
897 if (! allows_reg
898 && (allows_mem
899 || is_inout
900 || (DECL_P (val)
901 && REG_P (DECL_RTL (val))
902 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type))))
903 lang_hooks.mark_addressable (val);
905 if (is_inout)
906 ninout++;
909 ninputs += ninout;
910 if (ninputs + noutputs > MAX_RECOG_OPERANDS)
912 error ("more than %d operands in `asm'", MAX_RECOG_OPERANDS);
913 return;
916 for (i = 0, tail = inputs; tail; i++, tail = TREE_CHAIN (tail))
918 bool allows_reg, allows_mem;
919 const char *constraint;
921 /* If there's an erroneous arg, emit no insn, because the ASM_INPUT
922 would get VOIDmode and that could cause a crash in reload. */
923 if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
924 return;
926 constraint = constraints[i + noutputs];
927 if (! parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
928 constraints, &allows_mem, &allows_reg))
929 return;
931 if (! allows_reg && allows_mem)
932 lang_hooks.mark_addressable (TREE_VALUE (tail));
935 /* Second pass evaluates arguments. */
937 ninout = 0;
938 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
940 tree val = TREE_VALUE (tail);
941 tree type = TREE_TYPE (val);
942 bool is_inout;
943 bool allows_reg;
944 bool allows_mem;
945 rtx op;
947 if (!parse_output_constraint (&constraints[i], i, ninputs,
948 noutputs, &allows_mem, &allows_reg,
949 &is_inout))
950 abort ();
952 /* If an output operand is not a decl or indirect ref and our constraint
953 allows a register, make a temporary to act as an intermediate.
954 Make the asm insn write into that, then our caller will copy it to
955 the real output operand. Likewise for promoted variables. */
957 generating_concat_p = 0;
959 real_output_rtx[i] = NULL_RTX;
960 if ((TREE_CODE (val) == INDIRECT_REF
961 && allows_mem)
962 || (DECL_P (val)
963 && (allows_mem || REG_P (DECL_RTL (val)))
964 && ! (REG_P (DECL_RTL (val))
965 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
966 || ! allows_reg
967 || is_inout)
969 op = expand_expr (val, NULL_RTX, VOIDmode, EXPAND_WRITE);
970 if (MEM_P (op))
971 op = validize_mem (op);
973 if (! allows_reg && !MEM_P (op))
974 error ("output number %d not directly addressable", i);
975 if ((! allows_mem && MEM_P (op))
976 || GET_CODE (op) == CONCAT)
978 real_output_rtx[i] = op;
979 op = gen_reg_rtx (GET_MODE (op));
980 if (is_inout)
981 emit_move_insn (op, real_output_rtx[i]);
984 else
986 op = assign_temp (type, 0, 0, 1);
987 op = validize_mem (op);
988 TREE_VALUE (tail) = make_tree (type, op);
990 output_rtx[i] = op;
992 generating_concat_p = old_generating_concat_p;
994 if (is_inout)
996 inout_mode[ninout] = TYPE_MODE (type);
997 inout_opnum[ninout++] = i;
1000 if (decl_conflicts_with_clobbers_p (val, clobbered_regs))
1001 clobber_conflict_found = 1;
1004 /* Make vectors for the expression-rtx, constraint strings,
1005 and named operands. */
1007 argvec = rtvec_alloc (ninputs);
1008 constraintvec = rtvec_alloc (ninputs);
1010 body = gen_rtx_ASM_OPERANDS ((noutputs == 0 ? VOIDmode
1011 : GET_MODE (output_rtx[0])),
1012 TREE_STRING_POINTER (string),
1013 empty_string, 0, argvec, constraintvec,
1014 locus);
1016 MEM_VOLATILE_P (body) = vol;
1018 /* Eval the inputs and put them into ARGVEC.
1019 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
1021 for (i = 0, tail = inputs; tail; tail = TREE_CHAIN (tail), ++i)
1023 bool allows_reg, allows_mem;
1024 const char *constraint;
1025 tree val, type;
1026 rtx op;
1028 constraint = constraints[i + noutputs];
1029 if (! parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
1030 constraints, &allows_mem, &allows_reg))
1031 abort ();
1033 generating_concat_p = 0;
1035 val = TREE_VALUE (tail);
1036 type = TREE_TYPE (val);
1037 op = expand_expr (val, NULL_RTX, VOIDmode,
1038 (allows_mem && !allows_reg
1039 ? EXPAND_MEMORY : EXPAND_NORMAL));
1041 /* Never pass a CONCAT to an ASM. */
1042 if (GET_CODE (op) == CONCAT)
1043 op = force_reg (GET_MODE (op), op);
1044 else if (MEM_P (op))
1045 op = validize_mem (op);
1047 if (asm_operand_ok (op, constraint) <= 0)
1049 if (allows_reg)
1050 op = force_reg (TYPE_MODE (type), op);
1051 else if (!allows_mem)
1052 warning ("asm operand %d probably doesn't match constraints",
1053 i + noutputs);
1054 else if (MEM_P (op))
1056 /* We won't recognize either volatile memory or memory
1057 with a queued address as available a memory_operand
1058 at this point. Ignore it: clearly this *is* a memory. */
1060 else
1062 warning ("use of memory input without lvalue in "
1063 "asm operand %d is deprecated", i + noutputs);
1065 if (CONSTANT_P (op))
1067 rtx mem = force_const_mem (TYPE_MODE (type), op);
1068 if (mem)
1069 op = validize_mem (mem);
1070 else
1071 op = force_reg (TYPE_MODE (type), op);
1073 if (REG_P (op)
1074 || GET_CODE (op) == SUBREG
1075 || GET_CODE (op) == CONCAT)
1077 tree qual_type = build_qualified_type (type,
1078 (TYPE_QUALS (type)
1079 | TYPE_QUAL_CONST));
1080 rtx memloc = assign_temp (qual_type, 1, 1, 1);
1081 memloc = validize_mem (memloc);
1082 emit_move_insn (memloc, op);
1083 op = memloc;
1088 generating_concat_p = old_generating_concat_p;
1089 ASM_OPERANDS_INPUT (body, i) = op;
1091 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, i)
1092 = gen_rtx_ASM_INPUT (TYPE_MODE (type), constraints[i + noutputs]);
1094 if (decl_conflicts_with_clobbers_p (val, clobbered_regs))
1095 clobber_conflict_found = 1;
1098 /* Protect all the operands from the queue now that they have all been
1099 evaluated. */
1101 generating_concat_p = 0;
1103 /* For in-out operands, copy output rtx to input rtx. */
1104 for (i = 0; i < ninout; i++)
1106 int j = inout_opnum[i];
1107 char buffer[16];
1109 ASM_OPERANDS_INPUT (body, ninputs - ninout + i)
1110 = output_rtx[j];
1112 sprintf (buffer, "%d", j);
1113 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, ninputs - ninout + i)
1114 = gen_rtx_ASM_INPUT (inout_mode[i], ggc_strdup (buffer));
1117 generating_concat_p = old_generating_concat_p;
1119 /* Now, for each output, construct an rtx
1120 (set OUTPUT (asm_operands INSN OUTPUTCONSTRAINT OUTPUTNUMBER
1121 ARGVEC CONSTRAINTS OPNAMES))
1122 If there is more than one, put them inside a PARALLEL. */
1124 if (noutputs == 1 && nclobbers == 0)
1126 ASM_OPERANDS_OUTPUT_CONSTRAINT (body) = constraints[0];
1127 emit_insn (gen_rtx_SET (VOIDmode, output_rtx[0], body));
1130 else if (noutputs == 0 && nclobbers == 0)
1132 /* No output operands: put in a raw ASM_OPERANDS rtx. */
1133 emit_insn (body);
1136 else
1138 rtx obody = body;
1139 int num = noutputs;
1141 if (num == 0)
1142 num = 1;
1144 body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
1146 /* For each output operand, store a SET. */
1147 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1149 XVECEXP (body, 0, i)
1150 = gen_rtx_SET (VOIDmode,
1151 output_rtx[i],
1152 gen_rtx_ASM_OPERANDS
1153 (GET_MODE (output_rtx[i]),
1154 TREE_STRING_POINTER (string),
1155 constraints[i], i, argvec, constraintvec,
1156 locus));
1158 MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
1161 /* If there are no outputs (but there are some clobbers)
1162 store the bare ASM_OPERANDS into the PARALLEL. */
1164 if (i == 0)
1165 XVECEXP (body, 0, i++) = obody;
1167 /* Store (clobber REG) for each clobbered register specified. */
1169 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1171 const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1172 int j = decode_reg_name (regname);
1173 rtx clobbered_reg;
1175 if (j < 0)
1177 if (j == -3) /* `cc', which is not a register */
1178 continue;
1180 if (j == -4) /* `memory', don't cache memory across asm */
1182 XVECEXP (body, 0, i++)
1183 = gen_rtx_CLOBBER (VOIDmode,
1184 gen_rtx_MEM
1185 (BLKmode,
1186 gen_rtx_SCRATCH (VOIDmode)));
1187 continue;
1190 /* Ignore unknown register, error already signaled. */
1191 continue;
1194 /* Use QImode since that's guaranteed to clobber just one reg. */
1195 clobbered_reg = gen_rtx_REG (QImode, j);
1197 /* Do sanity check for overlap between clobbers and respectively
1198 input and outputs that hasn't been handled. Such overlap
1199 should have been detected and reported above. */
1200 if (!clobber_conflict_found)
1202 int opno;
1204 /* We test the old body (obody) contents to avoid tripping
1205 over the under-construction body. */
1206 for (opno = 0; opno < noutputs; opno++)
1207 if (reg_overlap_mentioned_p (clobbered_reg, output_rtx[opno]))
1208 internal_error ("asm clobber conflict with output operand");
1210 for (opno = 0; opno < ninputs - ninout; opno++)
1211 if (reg_overlap_mentioned_p (clobbered_reg,
1212 ASM_OPERANDS_INPUT (obody, opno)))
1213 internal_error ("asm clobber conflict with input operand");
1216 XVECEXP (body, 0, i++)
1217 = gen_rtx_CLOBBER (VOIDmode, clobbered_reg);
1220 emit_insn (body);
1223 /* For any outputs that needed reloading into registers, spill them
1224 back to where they belong. */
1225 for (i = 0; i < noutputs; ++i)
1226 if (real_output_rtx[i])
1227 emit_move_insn (real_output_rtx[i], output_rtx[i]);
1229 free_temp_slots ();
1232 void
1233 expand_asm_expr (tree exp)
1235 int noutputs, i;
1236 tree outputs, tail;
1237 tree *o;
1239 if (ASM_INPUT_P (exp))
1241 expand_asm (ASM_STRING (exp), ASM_VOLATILE_P (exp));
1242 return;
1245 outputs = ASM_OUTPUTS (exp);
1246 noutputs = list_length (outputs);
1247 /* o[I] is the place that output number I should be written. */
1248 o = (tree *) alloca (noutputs * sizeof (tree));
1250 /* Record the contents of OUTPUTS before it is modified. */
1251 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1252 o[i] = TREE_VALUE (tail);
1254 /* Generate the ASM_OPERANDS insn; store into the TREE_VALUEs of
1255 OUTPUTS some trees for where the values were actually stored. */
1256 expand_asm_operands (ASM_STRING (exp), outputs, ASM_INPUTS (exp),
1257 ASM_CLOBBERS (exp), ASM_VOLATILE_P (exp),
1258 input_location);
1260 /* Copy all the intermediate outputs into the specified outputs. */
1261 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1263 if (o[i] != TREE_VALUE (tail))
1265 expand_assignment (o[i], TREE_VALUE (tail), 0);
1266 free_temp_slots ();
1268 /* Restore the original value so that it's correct the next
1269 time we expand this function. */
1270 TREE_VALUE (tail) = o[i];
1275 /* A subroutine of expand_asm_operands. Check that all operands have
1276 the same number of alternatives. Return true if so. */
1278 static bool
1279 check_operand_nalternatives (tree outputs, tree inputs)
1281 if (outputs || inputs)
1283 tree tmp = TREE_PURPOSE (outputs ? outputs : inputs);
1284 int nalternatives
1285 = n_occurrences (',', TREE_STRING_POINTER (TREE_VALUE (tmp)));
1286 tree next = inputs;
1288 if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
1290 error ("too many alternatives in `asm'");
1291 return false;
1294 tmp = outputs;
1295 while (tmp)
1297 const char *constraint
1298 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (tmp)));
1300 if (n_occurrences (',', constraint) != nalternatives)
1302 error ("operand constraints for `asm' differ in number of alternatives");
1303 return false;
1306 if (TREE_CHAIN (tmp))
1307 tmp = TREE_CHAIN (tmp);
1308 else
1309 tmp = next, next = 0;
1313 return true;
1316 /* A subroutine of expand_asm_operands. Check that all operand names
1317 are unique. Return true if so. We rely on the fact that these names
1318 are identifiers, and so have been canonicalized by get_identifier,
1319 so all we need are pointer comparisons. */
1321 static bool
1322 check_unique_operand_names (tree outputs, tree inputs)
1324 tree i, j;
1326 for (i = outputs; i ; i = TREE_CHAIN (i))
1328 tree i_name = TREE_PURPOSE (TREE_PURPOSE (i));
1329 if (! i_name)
1330 continue;
1332 for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
1333 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1334 goto failure;
1337 for (i = inputs; i ; i = TREE_CHAIN (i))
1339 tree i_name = TREE_PURPOSE (TREE_PURPOSE (i));
1340 if (! i_name)
1341 continue;
1343 for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
1344 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1345 goto failure;
1346 for (j = outputs; j ; j = TREE_CHAIN (j))
1347 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1348 goto failure;
1351 return true;
1353 failure:
1354 error ("duplicate asm operand name '%s'",
1355 TREE_STRING_POINTER (TREE_PURPOSE (TREE_PURPOSE (i))));
1356 return false;
1359 /* A subroutine of expand_asm_operands. Resolve the names of the operands
1360 in *POUTPUTS and *PINPUTS to numbers, and replace the name expansions in
1361 STRING and in the constraints to those numbers. */
1363 tree
1364 resolve_asm_operand_names (tree string, tree outputs, tree inputs)
1366 char *buffer;
1367 char *p;
1368 const char *c;
1369 tree t;
1371 check_unique_operand_names (outputs, inputs);
1373 /* Substitute [<name>] in input constraint strings. There should be no
1374 named operands in output constraints. */
1375 for (t = inputs; t ; t = TREE_CHAIN (t))
1377 c = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
1378 if (strchr (c, '[') != NULL)
1380 p = buffer = xstrdup (c);
1381 while ((p = strchr (p, '[')) != NULL)
1382 p = resolve_operand_name_1 (p, outputs, inputs);
1383 TREE_VALUE (TREE_PURPOSE (t))
1384 = build_string (strlen (buffer), buffer);
1385 free (buffer);
1389 /* Now check for any needed substitutions in the template. */
1390 c = TREE_STRING_POINTER (string);
1391 while ((c = strchr (c, '%')) != NULL)
1393 if (c[1] == '[')
1394 break;
1395 else if (ISALPHA (c[1]) && c[2] == '[')
1396 break;
1397 else
1399 c += 1;
1400 continue;
1404 if (c)
1406 /* OK, we need to make a copy so we can perform the substitutions.
1407 Assume that we will not need extra space--we get to remove '['
1408 and ']', which means we cannot have a problem until we have more
1409 than 999 operands. */
1410 buffer = xstrdup (TREE_STRING_POINTER (string));
1411 p = buffer + (c - TREE_STRING_POINTER (string));
1413 while ((p = strchr (p, '%')) != NULL)
1415 if (p[1] == '[')
1416 p += 1;
1417 else if (ISALPHA (p[1]) && p[2] == '[')
1418 p += 2;
1419 else
1421 p += 1;
1422 continue;
1425 p = resolve_operand_name_1 (p, outputs, inputs);
1428 string = build_string (strlen (buffer), buffer);
1429 free (buffer);
1432 return string;
1435 /* A subroutine of resolve_operand_names. P points to the '[' for a
1436 potential named operand of the form [<name>]. In place, replace
1437 the name and brackets with a number. Return a pointer to the
1438 balance of the string after substitution. */
1440 static char *
1441 resolve_operand_name_1 (char *p, tree outputs, tree inputs)
1443 char *q;
1444 int op;
1445 tree t;
1446 size_t len;
1448 /* Collect the operand name. */
1449 q = strchr (p, ']');
1450 if (!q)
1452 error ("missing close brace for named operand");
1453 return strchr (p, '\0');
1455 len = q - p - 1;
1457 /* Resolve the name to a number. */
1458 for (op = 0, t = outputs; t ; t = TREE_CHAIN (t), op++)
1460 tree name = TREE_PURPOSE (TREE_PURPOSE (t));
1461 if (name)
1463 const char *c = TREE_STRING_POINTER (name);
1464 if (strncmp (c, p + 1, len) == 0 && c[len] == '\0')
1465 goto found;
1468 for (t = inputs; t ; t = TREE_CHAIN (t), op++)
1470 tree name = TREE_PURPOSE (TREE_PURPOSE (t));
1471 if (name)
1473 const char *c = TREE_STRING_POINTER (name);
1474 if (strncmp (c, p + 1, len) == 0 && c[len] == '\0')
1475 goto found;
1479 *q = '\0';
1480 error ("undefined named operand '%s'", p + 1);
1481 op = 0;
1482 found:
1484 /* Replace the name with the number. Unfortunately, not all libraries
1485 get the return value of sprintf correct, so search for the end of the
1486 generated string by hand. */
1487 sprintf (p, "%d", op);
1488 p = strchr (p, '\0');
1490 /* Verify the no extra buffer space assumption. */
1491 if (p > q)
1492 abort ();
1494 /* Shift the rest of the buffer down to fill the gap. */
1495 memmove (p, q + 1, strlen (q + 1) + 1);
1497 return p;
1500 /* Generate RTL to evaluate the expression EXP. */
1502 void
1503 expand_expr_stmt (tree exp)
1505 rtx value;
1506 tree type;
1508 value = expand_expr (exp, const0_rtx, VOIDmode, 0);
1509 type = TREE_TYPE (exp);
1511 /* If all we do is reference a volatile value in memory,
1512 copy it to a register to be sure it is actually touched. */
1513 if (value && MEM_P (value) && TREE_THIS_VOLATILE (exp))
1515 if (TYPE_MODE (type) == VOIDmode)
1517 else if (TYPE_MODE (type) != BLKmode)
1518 value = copy_to_reg (value);
1519 else
1521 rtx lab = gen_label_rtx ();
1523 /* Compare the value with itself to reference it. */
1524 emit_cmp_and_jump_insns (value, value, EQ,
1525 expand_expr (TYPE_SIZE (type),
1526 NULL_RTX, VOIDmode, 0),
1527 BLKmode, 0, lab);
1528 emit_label (lab);
1532 /* Free any temporaries used to evaluate this expression. */
1533 free_temp_slots ();
1536 /* Warn if EXP contains any computations whose results are not used.
1537 Return 1 if a warning is printed; 0 otherwise. LOCUS is the
1538 (potential) location of the expression. */
1541 warn_if_unused_value (tree exp, location_t locus)
1543 restart:
1544 if (TREE_USED (exp))
1545 return 0;
1547 /* Don't warn about void constructs. This includes casting to void,
1548 void function calls, and statement expressions with a final cast
1549 to void. */
1550 if (VOID_TYPE_P (TREE_TYPE (exp)))
1551 return 0;
1553 if (EXPR_HAS_LOCATION (exp))
1554 locus = EXPR_LOCATION (exp);
1556 switch (TREE_CODE (exp))
1558 case PREINCREMENT_EXPR:
1559 case POSTINCREMENT_EXPR:
1560 case PREDECREMENT_EXPR:
1561 case POSTDECREMENT_EXPR:
1562 case MODIFY_EXPR:
1563 case INIT_EXPR:
1564 case TARGET_EXPR:
1565 case CALL_EXPR:
1566 case TRY_CATCH_EXPR:
1567 case WITH_CLEANUP_EXPR:
1568 case EXIT_EXPR:
1569 return 0;
1571 case BIND_EXPR:
1572 /* For a binding, warn if no side effect within it. */
1573 exp = BIND_EXPR_BODY (exp);
1574 goto restart;
1576 case SAVE_EXPR:
1577 exp = TREE_OPERAND (exp, 0);
1578 goto restart;
1580 case TRUTH_ORIF_EXPR:
1581 case TRUTH_ANDIF_EXPR:
1582 /* In && or ||, warn if 2nd operand has no side effect. */
1583 exp = TREE_OPERAND (exp, 1);
1584 goto restart;
1586 case COMPOUND_EXPR:
1587 if (TREE_NO_WARNING (exp))
1588 return 0;
1589 if (warn_if_unused_value (TREE_OPERAND (exp, 0), locus))
1590 return 1;
1591 /* Let people do `(foo (), 0)' without a warning. */
1592 if (TREE_CONSTANT (TREE_OPERAND (exp, 1)))
1593 return 0;
1594 exp = TREE_OPERAND (exp, 1);
1595 goto restart;
1597 case NOP_EXPR:
1598 case CONVERT_EXPR:
1599 case NON_LVALUE_EXPR:
1600 /* Don't warn about conversions not explicit in the user's program. */
1601 if (TREE_NO_WARNING (exp))
1602 return 0;
1603 /* Assignment to a cast usually results in a cast of a modify.
1604 Don't complain about that. There can be an arbitrary number of
1605 casts before the modify, so we must loop until we find the first
1606 non-cast expression and then test to see if that is a modify. */
1608 tree tem = TREE_OPERAND (exp, 0);
1610 while (TREE_CODE (tem) == CONVERT_EXPR || TREE_CODE (tem) == NOP_EXPR)
1611 tem = TREE_OPERAND (tem, 0);
1613 if (TREE_CODE (tem) == MODIFY_EXPR || TREE_CODE (tem) == INIT_EXPR
1614 || TREE_CODE (tem) == CALL_EXPR)
1615 return 0;
1617 goto maybe_warn;
1619 case INDIRECT_REF:
1620 /* Don't warn about automatic dereferencing of references, since
1621 the user cannot control it. */
1622 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == REFERENCE_TYPE)
1624 exp = TREE_OPERAND (exp, 0);
1625 goto restart;
1627 /* Fall through. */
1629 default:
1630 /* Referencing a volatile value is a side effect, so don't warn. */
1631 if ((DECL_P (exp)
1632 || TREE_CODE_CLASS (TREE_CODE (exp)) == 'r')
1633 && TREE_THIS_VOLATILE (exp))
1634 return 0;
1636 /* If this is an expression which has no operands, there is no value
1637 to be unused. There are no such language-independent codes,
1638 but front ends may define such. */
1639 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'e'
1640 && TREE_CODE_LENGTH (TREE_CODE (exp)) == 0)
1641 return 0;
1643 maybe_warn:
1644 /* If this is an expression with side effects, don't warn. */
1645 if (TREE_SIDE_EFFECTS (exp))
1646 return 0;
1648 warning ("%Hvalue computed is not used", &locus);
1649 return 1;
1653 /* Generate RTL for the start of an if-then. COND is the expression
1654 whose truth should be tested.
1656 If EXITFLAG is nonzero, this conditional is visible to
1657 `exit_something'. */
1659 void
1660 expand_start_cond (tree cond, int exitflag)
1662 struct nesting *thiscond = ALLOC_NESTING ();
1664 /* Make an entry on cond_stack for the cond we are entering. */
1666 thiscond->desc = COND_NESTING;
1667 thiscond->next = cond_stack;
1668 thiscond->all = nesting_stack;
1669 thiscond->depth = ++nesting_depth;
1670 thiscond->data.cond.next_label = gen_label_rtx ();
1671 /* Before we encounter an `else', we don't need a separate exit label
1672 unless there are supposed to be exit statements
1673 to exit this conditional. */
1674 thiscond->exit_label = exitflag ? gen_label_rtx () : 0;
1675 thiscond->data.cond.endif_label = thiscond->exit_label;
1676 cond_stack = thiscond;
1677 nesting_stack = thiscond;
1679 do_jump (cond, thiscond->data.cond.next_label, NULL_RTX);
1682 /* Generate RTL between then-clause and the elseif-clause
1683 of an if-then-elseif-.... */
1685 void
1686 expand_start_elseif (tree cond)
1688 if (cond_stack->data.cond.endif_label == 0)
1689 cond_stack->data.cond.endif_label = gen_label_rtx ();
1690 emit_jump (cond_stack->data.cond.endif_label);
1691 emit_label (cond_stack->data.cond.next_label);
1692 cond_stack->data.cond.next_label = gen_label_rtx ();
1693 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
1696 /* Generate RTL between the then-clause and the else-clause
1697 of an if-then-else. */
1699 void
1700 expand_start_else (void)
1702 if (cond_stack->data.cond.endif_label == 0)
1703 cond_stack->data.cond.endif_label = gen_label_rtx ();
1705 emit_jump (cond_stack->data.cond.endif_label);
1706 emit_label (cond_stack->data.cond.next_label);
1707 cond_stack->data.cond.next_label = 0; /* No more _else or _elseif calls. */
1710 /* After calling expand_start_else, turn this "else" into an "else if"
1711 by providing another condition. */
1713 void
1714 expand_elseif (tree cond)
1716 cond_stack->data.cond.next_label = gen_label_rtx ();
1717 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
1720 /* Generate RTL for the end of an if-then.
1721 Pop the record for it off of cond_stack. */
1723 void
1724 expand_end_cond (void)
1726 struct nesting *thiscond = cond_stack;
1728 do_pending_stack_adjust ();
1729 if (thiscond->data.cond.next_label)
1730 emit_label (thiscond->data.cond.next_label);
1731 if (thiscond->data.cond.endif_label)
1732 emit_label (thiscond->data.cond.endif_label);
1734 POPSTACK (cond_stack);
1737 /* Return nonzero if we should preserve sub-expressions as separate
1738 pseudos. We never do so if we aren't optimizing. We always do so
1739 if -fexpensive-optimizations. */
1742 preserve_subexpressions_p (void)
1744 if (flag_expensive_optimizations)
1745 return 1;
1747 if (optimize == 0 || cfun == 0 || cfun->stmt == 0)
1748 return 0;
1750 return 1;
1754 /* Generate RTL to return from the current function, with no value.
1755 (That is, we do not do anything about returning any value.) */
1757 void
1758 expand_null_return (void)
1760 /* If this function was declared to return a value, but we
1761 didn't, clobber the return registers so that they are not
1762 propagated live to the rest of the function. */
1763 clobber_return_register ();
1765 expand_null_return_1 ();
1768 /* Generate RTL to return directly from the current function.
1769 (That is, we bypass any return value.) */
1771 void
1772 expand_naked_return (void)
1774 rtx end_label;
1776 clear_pending_stack_adjust ();
1777 do_pending_stack_adjust ();
1779 end_label = naked_return_label;
1780 if (end_label == 0)
1781 end_label = naked_return_label = gen_label_rtx ();
1783 emit_jump (end_label);
1786 /* If the current function returns values in the most significant part
1787 of a register, shift return value VAL appropriately. The mode of
1788 the function's return type is known not to be BLKmode. */
1790 static rtx
1791 shift_return_value (rtx val)
1793 tree type;
1795 type = TREE_TYPE (DECL_RESULT (current_function_decl));
1796 if (targetm.calls.return_in_msb (type))
1798 rtx target;
1799 HOST_WIDE_INT shift;
1801 target = DECL_RTL (DECL_RESULT (current_function_decl));
1802 shift = (GET_MODE_BITSIZE (GET_MODE (target))
1803 - BITS_PER_UNIT * int_size_in_bytes (type));
1804 if (shift > 0)
1805 val = expand_shift (LSHIFT_EXPR, GET_MODE (target),
1806 gen_lowpart (GET_MODE (target), val),
1807 build_int_2 (shift, 0), target, 1);
1809 return val;
1813 /* Generate RTL to return from the current function, with value VAL. */
1815 static void
1816 expand_value_return (rtx val)
1818 /* Copy the value to the return location
1819 unless it's already there. */
1821 rtx return_reg = DECL_RTL (DECL_RESULT (current_function_decl));
1822 if (return_reg != val)
1824 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
1825 if (targetm.calls.promote_function_return (TREE_TYPE (current_function_decl)))
1827 int unsignedp = TYPE_UNSIGNED (type);
1828 enum machine_mode old_mode
1829 = DECL_MODE (DECL_RESULT (current_function_decl));
1830 enum machine_mode mode
1831 = promote_mode (type, old_mode, &unsignedp, 1);
1833 if (mode != old_mode)
1834 val = convert_modes (mode, old_mode, val, unsignedp);
1836 if (GET_CODE (return_reg) == PARALLEL)
1837 emit_group_load (return_reg, val, type, int_size_in_bytes (type));
1838 else
1839 emit_move_insn (return_reg, val);
1842 expand_null_return_1 ();
1845 /* Output a return with no value. */
1847 static void
1848 expand_null_return_1 (void)
1850 rtx end_label;
1852 clear_pending_stack_adjust ();
1853 do_pending_stack_adjust ();
1855 end_label = return_label;
1856 if (end_label == 0)
1857 end_label = return_label = gen_label_rtx ();
1858 emit_jump (end_label);
1861 /* Generate RTL to evaluate the expression RETVAL and return it
1862 from the current function. */
1864 void
1865 expand_return (tree retval)
1867 rtx result_rtl;
1868 rtx val = 0;
1869 tree retval_rhs;
1871 /* If function wants no value, give it none. */
1872 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
1874 expand_expr (retval, NULL_RTX, VOIDmode, 0);
1875 expand_null_return ();
1876 return;
1879 if (retval == error_mark_node)
1881 /* Treat this like a return of no value from a function that
1882 returns a value. */
1883 expand_null_return ();
1884 return;
1886 else if (TREE_CODE (retval) == RESULT_DECL)
1887 retval_rhs = retval;
1888 else if ((TREE_CODE (retval) == MODIFY_EXPR
1889 || TREE_CODE (retval) == INIT_EXPR)
1890 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
1891 retval_rhs = TREE_OPERAND (retval, 1);
1892 else
1893 retval_rhs = retval;
1895 result_rtl = DECL_RTL (DECL_RESULT (current_function_decl));
1897 /* If the result is an aggregate that is being returned in one (or more)
1898 registers, load the registers here. The compiler currently can't handle
1899 copying a BLKmode value into registers. We could put this code in a
1900 more general area (for use by everyone instead of just function
1901 call/return), but until this feature is generally usable it is kept here
1902 (and in expand_call). */
1904 if (retval_rhs != 0
1905 && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
1906 && REG_P (result_rtl))
1908 int i;
1909 unsigned HOST_WIDE_INT bitpos, xbitpos;
1910 unsigned HOST_WIDE_INT padding_correction = 0;
1911 unsigned HOST_WIDE_INT bytes
1912 = int_size_in_bytes (TREE_TYPE (retval_rhs));
1913 int n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1914 unsigned int bitsize
1915 = MIN (TYPE_ALIGN (TREE_TYPE (retval_rhs)), BITS_PER_WORD);
1916 rtx *result_pseudos = alloca (sizeof (rtx) * n_regs);
1917 rtx result_reg, src = NULL_RTX, dst = NULL_RTX;
1918 rtx result_val = expand_expr (retval_rhs, NULL_RTX, VOIDmode, 0);
1919 enum machine_mode tmpmode, result_reg_mode;
1921 if (bytes == 0)
1923 expand_null_return ();
1924 return;
1927 /* If the structure doesn't take up a whole number of words, see
1928 whether the register value should be padded on the left or on
1929 the right. Set PADDING_CORRECTION to the number of padding
1930 bits needed on the left side.
1932 In most ABIs, the structure will be returned at the least end of
1933 the register, which translates to right padding on little-endian
1934 targets and left padding on big-endian targets. The opposite
1935 holds if the structure is returned at the most significant
1936 end of the register. */
1937 if (bytes % UNITS_PER_WORD != 0
1938 && (targetm.calls.return_in_msb (TREE_TYPE (retval_rhs))
1939 ? !BYTES_BIG_ENDIAN
1940 : BYTES_BIG_ENDIAN))
1941 padding_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
1942 * BITS_PER_UNIT));
1944 /* Copy the structure BITSIZE bits at a time. */
1945 for (bitpos = 0, xbitpos = padding_correction;
1946 bitpos < bytes * BITS_PER_UNIT;
1947 bitpos += bitsize, xbitpos += bitsize)
1949 /* We need a new destination pseudo each time xbitpos is
1950 on a word boundary and when xbitpos == padding_correction
1951 (the first time through). */
1952 if (xbitpos % BITS_PER_WORD == 0
1953 || xbitpos == padding_correction)
1955 /* Generate an appropriate register. */
1956 dst = gen_reg_rtx (word_mode);
1957 result_pseudos[xbitpos / BITS_PER_WORD] = dst;
1959 /* Clear the destination before we move anything into it. */
1960 emit_move_insn (dst, CONST0_RTX (GET_MODE (dst)));
1963 /* We need a new source operand each time bitpos is on a word
1964 boundary. */
1965 if (bitpos % BITS_PER_WORD == 0)
1966 src = operand_subword_force (result_val,
1967 bitpos / BITS_PER_WORD,
1968 BLKmode);
1970 /* Use bitpos for the source extraction (left justified) and
1971 xbitpos for the destination store (right justified). */
1972 store_bit_field (dst, bitsize, xbitpos % BITS_PER_WORD, word_mode,
1973 extract_bit_field (src, bitsize,
1974 bitpos % BITS_PER_WORD, 1,
1975 NULL_RTX, word_mode, word_mode));
1978 tmpmode = GET_MODE (result_rtl);
1979 if (tmpmode == BLKmode)
1981 /* Find the smallest integer mode large enough to hold the
1982 entire structure and use that mode instead of BLKmode
1983 on the USE insn for the return register. */
1984 for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1985 tmpmode != VOIDmode;
1986 tmpmode = GET_MODE_WIDER_MODE (tmpmode))
1987 /* Have we found a large enough mode? */
1988 if (GET_MODE_SIZE (tmpmode) >= bytes)
1989 break;
1991 /* No suitable mode found. */
1992 if (tmpmode == VOIDmode)
1993 abort ();
1995 PUT_MODE (result_rtl, tmpmode);
1998 if (GET_MODE_SIZE (tmpmode) < GET_MODE_SIZE (word_mode))
1999 result_reg_mode = word_mode;
2000 else
2001 result_reg_mode = tmpmode;
2002 result_reg = gen_reg_rtx (result_reg_mode);
2004 for (i = 0; i < n_regs; i++)
2005 emit_move_insn (operand_subword (result_reg, i, 0, result_reg_mode),
2006 result_pseudos[i]);
2008 if (tmpmode != result_reg_mode)
2009 result_reg = gen_lowpart (tmpmode, result_reg);
2011 expand_value_return (result_reg);
2013 else if (retval_rhs != 0
2014 && !VOID_TYPE_P (TREE_TYPE (retval_rhs))
2015 && (REG_P (result_rtl)
2016 || (GET_CODE (result_rtl) == PARALLEL)))
2018 /* Calculate the return value into a temporary (usually a pseudo
2019 reg). */
2020 tree ot = TREE_TYPE (DECL_RESULT (current_function_decl));
2021 tree nt = build_qualified_type (ot, TYPE_QUALS (ot) | TYPE_QUAL_CONST);
2023 val = assign_temp (nt, 0, 0, 1);
2024 val = expand_expr (retval_rhs, val, GET_MODE (val), 0);
2025 val = force_not_mem (val);
2026 /* Return the calculated value. */
2027 expand_value_return (shift_return_value (val));
2029 else
2031 /* No hard reg used; calculate value into hard return reg. */
2032 expand_expr (retval, const0_rtx, VOIDmode, 0);
2033 expand_value_return (result_rtl);
2037 /* Given a pointer to a BLOCK node return nonzero if (and only if) the node
2038 in question represents the outermost pair of curly braces (i.e. the "body
2039 block") of a function or method.
2041 For any BLOCK node representing a "body block" of a function or method, the
2042 BLOCK_SUPERCONTEXT of the node will point to another BLOCK node which
2043 represents the outermost (function) scope for the function or method (i.e.
2044 the one which includes the formal parameters). The BLOCK_SUPERCONTEXT of
2045 *that* node in turn will point to the relevant FUNCTION_DECL node. */
2048 is_body_block (tree stmt)
2050 if (lang_hooks.no_body_blocks)
2051 return 0;
2053 if (TREE_CODE (stmt) == BLOCK)
2055 tree parent = BLOCK_SUPERCONTEXT (stmt);
2057 if (parent && TREE_CODE (parent) == BLOCK)
2059 tree grandparent = BLOCK_SUPERCONTEXT (parent);
2061 if (grandparent && TREE_CODE (grandparent) == FUNCTION_DECL)
2062 return 1;
2066 return 0;
2069 /* Emit code to restore vital registers at the beginning of a nonlocal goto
2070 handler. */
2071 static void
2072 expand_nl_goto_receiver (void)
2074 /* Clobber the FP when we get here, so we have to make sure it's
2075 marked as used by this function. */
2076 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
2078 /* Mark the static chain as clobbered here so life information
2079 doesn't get messed up for it. */
2080 emit_insn (gen_rtx_CLOBBER (VOIDmode, static_chain_rtx));
2082 #ifdef HAVE_nonlocal_goto
2083 if (! HAVE_nonlocal_goto)
2084 #endif
2085 /* First adjust our frame pointer to its actual value. It was
2086 previously set to the start of the virtual area corresponding to
2087 the stacked variables when we branched here and now needs to be
2088 adjusted to the actual hardware fp value.
2090 Assignments are to virtual registers are converted by
2091 instantiate_virtual_regs into the corresponding assignment
2092 to the underlying register (fp in this case) that makes
2093 the original assignment true.
2094 So the following insn will actually be
2095 decrementing fp by STARTING_FRAME_OFFSET. */
2096 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
2098 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
2099 if (fixed_regs[ARG_POINTER_REGNUM])
2101 #ifdef ELIMINABLE_REGS
2102 /* If the argument pointer can be eliminated in favor of the
2103 frame pointer, we don't need to restore it. We assume here
2104 that if such an elimination is present, it can always be used.
2105 This is the case on all known machines; if we don't make this
2106 assumption, we do unnecessary saving on many machines. */
2107 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
2108 size_t i;
2110 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
2111 if (elim_regs[i].from == ARG_POINTER_REGNUM
2112 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
2113 break;
2115 if (i == ARRAY_SIZE (elim_regs))
2116 #endif
2118 /* Now restore our arg pointer from the address at which it
2119 was saved in our stack frame. */
2120 emit_move_insn (virtual_incoming_args_rtx,
2121 copy_to_reg (get_arg_pointer_save_area (cfun)));
2124 #endif
2126 #ifdef HAVE_nonlocal_goto_receiver
2127 if (HAVE_nonlocal_goto_receiver)
2128 emit_insn (gen_nonlocal_goto_receiver ());
2129 #endif
2131 /* @@@ This is a kludge. Not all machine descriptions define a blockage
2132 insn, but we must not allow the code we just generated to be reordered
2133 by scheduling. Specifically, the update of the frame pointer must
2134 happen immediately, not later. So emit an ASM_INPUT to act as blockage
2135 insn. */
2136 emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
2139 /* Generate RTL for the automatic variable declaration DECL.
2140 (Other kinds of declarations are simply ignored if seen here.) */
2142 void
2143 expand_decl (tree decl)
2145 tree type;
2147 type = TREE_TYPE (decl);
2149 /* For a CONST_DECL, set mode, alignment, and sizes from those of the
2150 type in case this node is used in a reference. */
2151 if (TREE_CODE (decl) == CONST_DECL)
2153 DECL_MODE (decl) = TYPE_MODE (type);
2154 DECL_ALIGN (decl) = TYPE_ALIGN (type);
2155 DECL_SIZE (decl) = TYPE_SIZE (type);
2156 DECL_SIZE_UNIT (decl) = TYPE_SIZE_UNIT (type);
2157 return;
2160 /* Otherwise, only automatic variables need any expansion done. Static and
2161 external variables, and external functions, will be handled by
2162 `assemble_variable' (called from finish_decl). TYPE_DECL requires
2163 nothing. PARM_DECLs are handled in `assign_parms'. */
2164 if (TREE_CODE (decl) != VAR_DECL)
2165 return;
2167 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
2168 return;
2170 /* Create the RTL representation for the variable. */
2172 if (type == error_mark_node)
2173 SET_DECL_RTL (decl, gen_rtx_MEM (BLKmode, const0_rtx));
2175 else if (DECL_SIZE (decl) == 0)
2176 /* Variable with incomplete type. */
2178 rtx x;
2179 if (DECL_INITIAL (decl) == 0)
2180 /* Error message was already done; now avoid a crash. */
2181 x = gen_rtx_MEM (BLKmode, const0_rtx);
2182 else
2183 /* An initializer is going to decide the size of this array.
2184 Until we know the size, represent its address with a reg. */
2185 x = gen_rtx_MEM (BLKmode, gen_reg_rtx (Pmode));
2187 set_mem_attributes (x, decl, 1);
2188 SET_DECL_RTL (decl, x);
2190 else if (use_register_for_decl (decl))
2192 /* Automatic variable that can go in a register. */
2193 int unsignedp = TYPE_UNSIGNED (type);
2194 enum machine_mode reg_mode
2195 = promote_mode (type, DECL_MODE (decl), &unsignedp, 0);
2197 SET_DECL_RTL (decl, gen_reg_rtx (reg_mode));
2199 /* Note if the object is a user variable. */
2200 if (!DECL_ARTIFICIAL (decl))
2202 mark_user_reg (DECL_RTL (decl));
2204 /* Trust user variables which have a pointer type to really
2205 be pointers. Do not trust compiler generated temporaries
2206 as our type system is totally busted as it relates to
2207 pointer arithmetic which translates into lots of compiler
2208 generated objects with pointer types, but which are not really
2209 pointers. */
2210 if (POINTER_TYPE_P (type))
2211 mark_reg_pointer (DECL_RTL (decl),
2212 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (decl))));
2215 maybe_set_unchanging (DECL_RTL (decl), decl);
2218 else if (TREE_CODE (DECL_SIZE_UNIT (decl)) == INTEGER_CST
2219 && ! (flag_stack_check && ! STACK_CHECK_BUILTIN
2220 && 0 < compare_tree_int (DECL_SIZE_UNIT (decl),
2221 STACK_CHECK_MAX_VAR_SIZE)))
2223 /* Variable of fixed size that goes on the stack. */
2224 rtx oldaddr = 0;
2225 rtx addr;
2226 rtx x;
2228 /* If we previously made RTL for this decl, it must be an array
2229 whose size was determined by the initializer.
2230 The old address was a register; set that register now
2231 to the proper address. */
2232 if (DECL_RTL_SET_P (decl))
2234 if (!MEM_P (DECL_RTL (decl))
2235 || !REG_P (XEXP (DECL_RTL (decl), 0)))
2236 abort ();
2237 oldaddr = XEXP (DECL_RTL (decl), 0);
2240 /* Set alignment we actually gave this decl. */
2241 DECL_ALIGN (decl) = (DECL_MODE (decl) == BLKmode ? BIGGEST_ALIGNMENT
2242 : GET_MODE_BITSIZE (DECL_MODE (decl)));
2243 DECL_USER_ALIGN (decl) = 0;
2245 x = assign_temp (decl, 1, 1, 1);
2246 set_mem_attributes (x, decl, 1);
2247 SET_DECL_RTL (decl, x);
2249 if (oldaddr)
2251 addr = force_operand (XEXP (DECL_RTL (decl), 0), oldaddr);
2252 if (addr != oldaddr)
2253 emit_move_insn (oldaddr, addr);
2256 else
2257 /* Dynamic-size object: must push space on the stack. */
2259 rtx address, size, x;
2261 /* Record the stack pointer on entry to block, if have
2262 not already done so. */
2263 do_pending_stack_adjust ();
2265 /* Compute the variable's size, in bytes. This will expand any
2266 needed SAVE_EXPRs for the first time. */
2267 size = expand_expr (DECL_SIZE_UNIT (decl), NULL_RTX, VOIDmode, 0);
2268 free_temp_slots ();
2270 /* Allocate space on the stack for the variable. Note that
2271 DECL_ALIGN says how the variable is to be aligned and we
2272 cannot use it to conclude anything about the alignment of
2273 the size. */
2274 address = allocate_dynamic_stack_space (size, NULL_RTX,
2275 TYPE_ALIGN (TREE_TYPE (decl)));
2277 /* Reference the variable indirect through that rtx. */
2278 x = gen_rtx_MEM (DECL_MODE (decl), address);
2279 set_mem_attributes (x, decl, 1);
2280 SET_DECL_RTL (decl, x);
2283 /* Indicate the alignment we actually gave this variable. */
2284 #ifdef STACK_BOUNDARY
2285 DECL_ALIGN (decl) = STACK_BOUNDARY;
2286 #else
2287 DECL_ALIGN (decl) = BIGGEST_ALIGNMENT;
2288 #endif
2289 DECL_USER_ALIGN (decl) = 0;
2293 /* Emit code to allocate T_SIZE bytes of dynamic stack space for ALLOC. */
2294 void
2295 expand_stack_alloc (tree alloc, tree t_size)
2297 rtx address, dest, size;
2298 tree var, type;
2300 if (TREE_CODE (alloc) != ADDR_EXPR)
2301 abort ();
2302 var = TREE_OPERAND (alloc, 0);
2303 if (TREE_CODE (var) != VAR_DECL)
2304 abort ();
2306 type = TREE_TYPE (var);
2308 /* Compute the variable's size, in bytes. */
2309 size = expand_expr (t_size, NULL_RTX, VOIDmode, 0);
2310 free_temp_slots ();
2312 /* Allocate space on the stack for the variable. */
2313 address = XEXP (DECL_RTL (var), 0);
2314 dest = allocate_dynamic_stack_space (size, address, TYPE_ALIGN (type));
2315 if (dest != address)
2316 emit_move_insn (address, dest);
2318 /* Indicate the alignment we actually gave this variable. */
2319 #ifdef STACK_BOUNDARY
2320 DECL_ALIGN (var) = STACK_BOUNDARY;
2321 #else
2322 DECL_ALIGN (var) = BIGGEST_ALIGNMENT;
2323 #endif
2324 DECL_USER_ALIGN (var) = 0;
2327 /* Emit code to save the current value of stack. */
2329 expand_stack_save (void)
2331 rtx ret = NULL_RTX;
2333 do_pending_stack_adjust ();
2334 emit_stack_save (SAVE_BLOCK, &ret, NULL_RTX);
2335 return ret;
2338 /* Emit code to restore the current value of stack. */
2339 void
2340 expand_stack_restore (tree var)
2342 rtx sa = DECL_RTL (var);
2344 emit_stack_restore (SAVE_BLOCK, sa, NULL_RTX);
2347 /* Emit code to perform the initialization of a declaration DECL. */
2349 void
2350 expand_decl_init (tree decl)
2352 int was_used = TREE_USED (decl);
2354 /* If this is a CONST_DECL, we don't have to generate any code. Likewise
2355 for static decls. */
2356 if (TREE_CODE (decl) == CONST_DECL
2357 || TREE_STATIC (decl))
2358 return;
2360 /* Compute and store the initial value now. */
2362 push_temp_slots ();
2364 if (DECL_INITIAL (decl) == error_mark_node)
2366 enum tree_code code = TREE_CODE (TREE_TYPE (decl));
2368 if (code == INTEGER_TYPE || code == REAL_TYPE || code == ENUMERAL_TYPE
2369 || code == POINTER_TYPE || code == REFERENCE_TYPE)
2370 expand_assignment (decl, convert (TREE_TYPE (decl), integer_zero_node),
2373 else if (DECL_INITIAL (decl) && TREE_CODE (DECL_INITIAL (decl)) != TREE_LIST)
2375 emit_line_note (DECL_SOURCE_LOCATION (decl));
2376 expand_assignment (decl, DECL_INITIAL (decl), 0);
2379 /* Don't let the initialization count as "using" the variable. */
2380 TREE_USED (decl) = was_used;
2382 /* Free any temporaries we made while initializing the decl. */
2383 preserve_temp_slots (NULL_RTX);
2384 free_temp_slots ();
2385 pop_temp_slots ();
2389 /* DECL is an anonymous union. CLEANUP is a cleanup for DECL.
2390 DECL_ELTS is the list of elements that belong to DECL's type.
2391 In each, the TREE_VALUE is a VAR_DECL, and the TREE_PURPOSE a cleanup. */
2393 void
2394 expand_anon_union_decl (tree decl, tree cleanup ATTRIBUTE_UNUSED,
2395 tree decl_elts)
2397 rtx x;
2398 tree t;
2400 /* If any of the elements are addressable, so is the entire union. */
2401 for (t = decl_elts; t; t = TREE_CHAIN (t))
2402 if (TREE_ADDRESSABLE (TREE_VALUE (t)))
2404 TREE_ADDRESSABLE (decl) = 1;
2405 break;
2408 expand_decl (decl);
2409 x = DECL_RTL (decl);
2411 /* Go through the elements, assigning RTL to each. */
2412 for (t = decl_elts; t; t = TREE_CHAIN (t))
2414 tree decl_elt = TREE_VALUE (t);
2415 enum machine_mode mode = TYPE_MODE (TREE_TYPE (decl_elt));
2417 /* If any of the elements are addressable, so is the entire
2418 union. */
2419 if (TREE_USED (decl_elt))
2420 TREE_USED (decl) = 1;
2422 /* Propagate the union's alignment to the elements. */
2423 DECL_ALIGN (decl_elt) = DECL_ALIGN (decl);
2424 DECL_USER_ALIGN (decl_elt) = DECL_USER_ALIGN (decl);
2426 /* If the element has BLKmode and the union doesn't, the union is
2427 aligned such that the element doesn't need to have BLKmode, so
2428 change the element's mode to the appropriate one for its size. */
2429 if (mode == BLKmode && DECL_MODE (decl) != BLKmode)
2430 DECL_MODE (decl_elt) = mode
2431 = mode_for_size_tree (DECL_SIZE (decl_elt), MODE_INT, 1);
2433 /* (SUBREG (MEM ...)) at RTL generation time is invalid, so we
2434 instead create a new MEM rtx with the proper mode. */
2435 if (MEM_P (x))
2437 if (mode == GET_MODE (x))
2438 SET_DECL_RTL (decl_elt, x);
2439 else
2440 SET_DECL_RTL (decl_elt, adjust_address_nv (x, mode, 0));
2442 else if (REG_P (x))
2444 if (mode == GET_MODE (x))
2445 SET_DECL_RTL (decl_elt, x);
2446 else
2447 SET_DECL_RTL (decl_elt, gen_lowpart_SUBREG (mode, x));
2449 else
2450 abort ();
2454 /* Enter a case (Pascal) or switch (C) statement.
2455 Push a block onto case_stack and nesting_stack
2456 to accumulate the case-labels that are seen
2457 and to record the labels generated for the statement.
2459 EXIT_FLAG is nonzero if `exit_something' should exit this case stmt.
2460 Otherwise, this construct is transparent for `exit_something'.
2462 EXPR is the index-expression to be dispatched on.
2463 TYPE is its nominal type. We could simply convert EXPR to this type,
2464 but instead we take short cuts. */
2466 void
2467 expand_start_case (tree index_expr)
2469 struct nesting *thiscase = ALLOC_NESTING ();
2471 /* Make an entry on case_stack for the case we are entering. */
2473 thiscase->desc = CASE_NESTING;
2474 thiscase->next = case_stack;
2475 thiscase->all = nesting_stack;
2476 thiscase->depth = ++nesting_depth;
2477 thiscase->exit_label = 0;
2478 thiscase->data.case_stmt.case_list = 0;
2479 thiscase->data.case_stmt.index_expr = index_expr;
2480 thiscase->data.case_stmt.default_label = 0;
2481 case_stack = thiscase;
2482 nesting_stack = thiscase;
2484 do_pending_stack_adjust ();
2486 /* Make sure case_stmt.start points to something that won't
2487 need any transformation before expand_end_case. */
2488 if (!NOTE_P (get_last_insn ()))
2489 emit_note (NOTE_INSN_DELETED);
2491 thiscase->data.case_stmt.start = get_last_insn ();
2494 /* Do the insertion of a case label into
2495 case_stack->data.case_stmt.case_list. The labels are fed to us
2496 in descending order from the sorted vector of case labels used
2497 in the tree part of the middle end. So the list we construct is
2498 sorted in ascending order. */
2500 void
2501 add_case_node (tree low, tree high, tree label)
2503 struct case_node *r;
2505 /* If there's no HIGH value, then this is not a case range; it's
2506 just a simple case label. But that's just a degenerate case
2507 range.
2508 If the bounds are equal, turn this into the one-value case. */
2509 if (!high || tree_int_cst_equal (low, high))
2510 high = low;
2512 /* Handle default labels specially. */
2513 if (!high && !low)
2515 #ifdef ENABLE_CHECKING
2516 if (case_stack->data.case_stmt.default_label != 0)
2517 abort ();
2518 #endif
2519 case_stack->data.case_stmt.default_label = label;
2520 return;
2523 /* Add this label to the chain. */
2524 r = ggc_alloc (sizeof (struct case_node));
2525 r->low = low;
2526 r->high = high;
2527 r->code_label = label;
2528 r->parent = r->left = NULL;
2529 r->right = case_stack->data.case_stmt.case_list;
2530 case_stack->data.case_stmt.case_list = r;
2533 /* Maximum number of case bit tests. */
2534 #define MAX_CASE_BIT_TESTS 3
2536 /* By default, enable case bit tests on targets with ashlsi3. */
2537 #ifndef CASE_USE_BIT_TESTS
2538 #define CASE_USE_BIT_TESTS (ashl_optab->handlers[word_mode].insn_code \
2539 != CODE_FOR_nothing)
2540 #endif
2543 /* A case_bit_test represents a set of case nodes that may be
2544 selected from using a bit-wise comparison. HI and LO hold
2545 the integer to be tested against, LABEL contains the label
2546 to jump to upon success and BITS counts the number of case
2547 nodes handled by this test, typically the number of bits
2548 set in HI:LO. */
2550 struct case_bit_test
2552 HOST_WIDE_INT hi;
2553 HOST_WIDE_INT lo;
2554 rtx label;
2555 int bits;
2558 /* Determine whether "1 << x" is relatively cheap in word_mode. */
2560 static
2561 bool lshift_cheap_p (void)
2563 static bool init = false;
2564 static bool cheap = true;
2566 if (!init)
2568 rtx reg = gen_rtx_REG (word_mode, 10000);
2569 int cost = rtx_cost (gen_rtx_ASHIFT (word_mode, const1_rtx, reg), SET);
2570 cheap = cost < COSTS_N_INSNS (3);
2571 init = true;
2574 return cheap;
2577 /* Comparison function for qsort to order bit tests by decreasing
2578 number of case nodes, i.e. the node with the most cases gets
2579 tested first. */
2581 static int
2582 case_bit_test_cmp (const void *p1, const void *p2)
2584 const struct case_bit_test *d1 = p1;
2585 const struct case_bit_test *d2 = p2;
2587 return d2->bits - d1->bits;
2590 /* Expand a switch statement by a short sequence of bit-wise
2591 comparisons. "switch(x)" is effectively converted into
2592 "if ((1 << (x-MINVAL)) & CST)" where CST and MINVAL are
2593 integer constants.
2595 INDEX_EXPR is the value being switched on, which is of
2596 type INDEX_TYPE. MINVAL is the lowest case value of in
2597 the case nodes, of INDEX_TYPE type, and RANGE is highest
2598 value minus MINVAL, also of type INDEX_TYPE. NODES is
2599 the set of case nodes, and DEFAULT_LABEL is the label to
2600 branch to should none of the cases match.
2602 There *MUST* be MAX_CASE_BIT_TESTS or less unique case
2603 node targets. */
2605 static void
2606 emit_case_bit_tests (tree index_type, tree index_expr, tree minval,
2607 tree range, case_node_ptr nodes, rtx default_label)
2609 struct case_bit_test test[MAX_CASE_BIT_TESTS];
2610 enum machine_mode mode;
2611 rtx expr, index, label;
2612 unsigned int i,j,lo,hi;
2613 struct case_node *n;
2614 unsigned int count;
2616 count = 0;
2617 for (n = nodes; n; n = n->right)
2619 label = label_rtx (n->code_label);
2620 for (i = 0; i < count; i++)
2621 if (same_case_target_p (label, test[i].label))
2622 break;
2624 if (i == count)
2626 if (count >= MAX_CASE_BIT_TESTS)
2627 abort ();
2628 test[i].hi = 0;
2629 test[i].lo = 0;
2630 test[i].label = label;
2631 test[i].bits = 1;
2632 count++;
2634 else
2635 test[i].bits++;
2637 lo = tree_low_cst (fold (build2 (MINUS_EXPR, index_type,
2638 n->low, minval)), 1);
2639 hi = tree_low_cst (fold (build2 (MINUS_EXPR, index_type,
2640 n->high, minval)), 1);
2641 for (j = lo; j <= hi; j++)
2642 if (j >= HOST_BITS_PER_WIDE_INT)
2643 test[i].hi |= (HOST_WIDE_INT) 1 << (j - HOST_BITS_PER_INT);
2644 else
2645 test[i].lo |= (HOST_WIDE_INT) 1 << j;
2648 qsort (test, count, sizeof(*test), case_bit_test_cmp);
2650 index_expr = fold (build2 (MINUS_EXPR, index_type,
2651 convert (index_type, index_expr),
2652 convert (index_type, minval)));
2653 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
2654 do_pending_stack_adjust ();
2656 mode = TYPE_MODE (index_type);
2657 expr = expand_expr (range, NULL_RTX, VOIDmode, 0);
2658 emit_cmp_and_jump_insns (index, expr, GTU, NULL_RTX, mode, 1,
2659 default_label);
2661 index = convert_to_mode (word_mode, index, 0);
2662 index = expand_binop (word_mode, ashl_optab, const1_rtx,
2663 index, NULL_RTX, 1, OPTAB_WIDEN);
2665 for (i = 0; i < count; i++)
2667 expr = immed_double_const (test[i].lo, test[i].hi, word_mode);
2668 expr = expand_binop (word_mode, and_optab, index, expr,
2669 NULL_RTX, 1, OPTAB_WIDEN);
2670 emit_cmp_and_jump_insns (expr, const0_rtx, NE, NULL_RTX,
2671 word_mode, 1, test[i].label);
2674 emit_jump (default_label);
2677 #ifndef HAVE_casesi
2678 #define HAVE_casesi 0
2679 #endif
2681 #ifndef HAVE_tablejump
2682 #define HAVE_tablejump 0
2683 #endif
2685 /* Terminate a case (Pascal) or switch (C) statement
2686 in which ORIG_INDEX is the expression to be tested.
2687 If ORIG_TYPE is not NULL, it is the original ORIG_INDEX
2688 type as given in the source before any compiler conversions.
2689 Generate the code to test it and jump to the right place. */
2691 void
2692 expand_end_case_type (tree orig_index, tree orig_type)
2694 tree minval = NULL_TREE, maxval = NULL_TREE, range = NULL_TREE;
2695 rtx default_label = 0;
2696 struct case_node *n, *m;
2697 unsigned int count, uniq;
2698 rtx index;
2699 rtx table_label;
2700 int ncases;
2701 rtx *labelvec;
2702 int i;
2703 rtx before_case, end, lab;
2704 struct nesting *thiscase = case_stack;
2705 tree index_expr, index_type;
2706 bool exit_done = false;
2707 int unsignedp;
2709 /* Don't crash due to previous errors. */
2710 if (thiscase == NULL)
2711 return;
2713 index_expr = thiscase->data.case_stmt.index_expr;
2714 index_type = TREE_TYPE (index_expr);
2715 unsignedp = TYPE_UNSIGNED (index_type);
2716 if (orig_type == NULL)
2717 orig_type = TREE_TYPE (orig_index);
2719 do_pending_stack_adjust ();
2721 /* An ERROR_MARK occurs for various reasons including invalid data type. */
2722 if (index_type != error_mark_node)
2724 /* If we don't have a default-label, create one here,
2725 after the body of the switch. */
2726 if (thiscase->data.case_stmt.default_label == 0)
2728 thiscase->data.case_stmt.default_label
2729 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
2730 /* Share the exit label if possible. */
2731 if (thiscase->exit_label)
2733 SET_DECL_RTL (thiscase->data.case_stmt.default_label,
2734 thiscase->exit_label);
2735 exit_done = true;
2737 expand_label (thiscase->data.case_stmt.default_label);
2739 default_label = label_rtx (thiscase->data.case_stmt.default_label);
2741 before_case = get_last_insn ();
2743 /* Get upper and lower bounds of case values.
2744 Also convert all the case values to the index expr's data type. */
2746 uniq = 0;
2747 count = 0;
2748 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
2750 /* Check low and high label values are integers. */
2751 if (TREE_CODE (n->low) != INTEGER_CST)
2752 abort ();
2753 if (TREE_CODE (n->high) != INTEGER_CST)
2754 abort ();
2756 n->low = convert (index_type, n->low);
2757 n->high = convert (index_type, n->high);
2759 /* Count the elements and track the largest and smallest
2760 of them (treating them as signed even if they are not). */
2761 if (count++ == 0)
2763 minval = n->low;
2764 maxval = n->high;
2766 else
2768 if (INT_CST_LT (n->low, minval))
2769 minval = n->low;
2770 if (INT_CST_LT (maxval, n->high))
2771 maxval = n->high;
2773 /* A range counts double, since it requires two compares. */
2774 if (! tree_int_cst_equal (n->low, n->high))
2775 count++;
2777 /* Count the number of unique case node targets. */
2778 uniq++;
2779 lab = label_rtx (n->code_label);
2780 for (m = thiscase->data.case_stmt.case_list; m != n; m = m->right)
2781 if (same_case_target_p (label_rtx (m->code_label), lab))
2783 uniq--;
2784 break;
2788 /* Compute span of values. */
2789 if (count != 0)
2790 range = fold (build2 (MINUS_EXPR, index_type, maxval, minval));
2792 if (count == 0)
2794 expand_expr (index_expr, const0_rtx, VOIDmode, 0);
2795 emit_jump (default_label);
2798 /* Try implementing this switch statement by a short sequence of
2799 bit-wise comparisons. However, we let the binary-tree case
2800 below handle constant index expressions. */
2801 else if (CASE_USE_BIT_TESTS
2802 && ! TREE_CONSTANT (index_expr)
2803 && compare_tree_int (range, GET_MODE_BITSIZE (word_mode)) < 0
2804 && compare_tree_int (range, 0) > 0
2805 && lshift_cheap_p ()
2806 && ((uniq == 1 && count >= 3)
2807 || (uniq == 2 && count >= 5)
2808 || (uniq == 3 && count >= 6)))
2810 /* Optimize the case where all the case values fit in a
2811 word without having to subtract MINVAL. In this case,
2812 we can optimize away the subtraction. */
2813 if (compare_tree_int (minval, 0) > 0
2814 && compare_tree_int (maxval, GET_MODE_BITSIZE (word_mode)) < 0)
2816 minval = integer_zero_node;
2817 range = maxval;
2819 emit_case_bit_tests (index_type, index_expr, minval, range,
2820 thiscase->data.case_stmt.case_list,
2821 default_label);
2824 /* If range of values is much bigger than number of values,
2825 make a sequence of conditional branches instead of a dispatch.
2826 If the switch-index is a constant, do it this way
2827 because we can optimize it. */
2829 else if (count < case_values_threshold ()
2830 || compare_tree_int (range,
2831 (optimize_size ? 3 : 10) * count) > 0
2832 /* RANGE may be signed, and really large ranges will show up
2833 as negative numbers. */
2834 || compare_tree_int (range, 0) < 0
2835 #ifndef ASM_OUTPUT_ADDR_DIFF_ELT
2836 || flag_pic
2837 #endif
2838 || TREE_CONSTANT (index_expr)
2839 /* If neither casesi or tablejump is available, we can
2840 only go this way. */
2841 || (!HAVE_casesi && !HAVE_tablejump))
2843 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
2845 /* If the index is a short or char that we do not have
2846 an insn to handle comparisons directly, convert it to
2847 a full integer now, rather than letting each comparison
2848 generate the conversion. */
2850 if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT
2851 && ! have_insn_for (COMPARE, GET_MODE (index)))
2853 enum machine_mode wider_mode;
2854 for (wider_mode = GET_MODE (index); wider_mode != VOIDmode;
2855 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
2856 if (have_insn_for (COMPARE, wider_mode))
2858 index = convert_to_mode (wider_mode, index, unsignedp);
2859 break;
2863 do_pending_stack_adjust ();
2865 if (MEM_P (index))
2866 index = copy_to_reg (index);
2867 if (GET_CODE (index) == CONST_INT
2868 || TREE_CODE (index_expr) == INTEGER_CST)
2870 /* Make a tree node with the proper constant value
2871 if we don't already have one. */
2872 if (TREE_CODE (index_expr) != INTEGER_CST)
2874 index_expr
2875 = build_int_2 (INTVAL (index),
2876 unsignedp || INTVAL (index) >= 0 ? 0 : -1);
2877 index_expr = convert (index_type, index_expr);
2880 /* For constant index expressions we need only
2881 issue an unconditional branch to the appropriate
2882 target code. The job of removing any unreachable
2883 code is left to the optimization phase if the
2884 "-O" option is specified. */
2885 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
2886 if (! tree_int_cst_lt (index_expr, n->low)
2887 && ! tree_int_cst_lt (n->high, index_expr))
2888 break;
2890 if (n)
2891 emit_jump (label_rtx (n->code_label));
2892 else
2893 emit_jump (default_label);
2895 else
2897 /* If the index expression is not constant we generate
2898 a binary decision tree to select the appropriate
2899 target code. This is done as follows:
2901 The list of cases is rearranged into a binary tree,
2902 nearly optimal assuming equal probability for each case.
2904 The tree is transformed into RTL, eliminating
2905 redundant test conditions at the same time.
2907 If program flow could reach the end of the
2908 decision tree an unconditional jump to the
2909 default code is emitted. */
2911 use_cost_table
2912 = (TREE_CODE (orig_type) != ENUMERAL_TYPE
2913 && estimate_case_costs (thiscase->data.case_stmt.case_list));
2914 balance_case_nodes (&thiscase->data.case_stmt.case_list, NULL);
2915 emit_case_nodes (index, thiscase->data.case_stmt.case_list,
2916 default_label, index_type);
2917 emit_jump (default_label);
2920 else
2922 table_label = gen_label_rtx ();
2923 if (! try_casesi (index_type, index_expr, minval, range,
2924 table_label, default_label))
2926 index_type = integer_type_node;
2928 /* Index jumptables from zero for suitable values of
2929 minval to avoid a subtraction. */
2930 if (! optimize_size
2931 && compare_tree_int (minval, 0) > 0
2932 && compare_tree_int (minval, 3) < 0)
2934 minval = integer_zero_node;
2935 range = maxval;
2938 if (! try_tablejump (index_type, index_expr, minval, range,
2939 table_label, default_label))
2940 abort ();
2943 /* Get table of labels to jump to, in order of case index. */
2945 ncases = tree_low_cst (range, 0) + 1;
2946 labelvec = alloca (ncases * sizeof (rtx));
2947 memset (labelvec, 0, ncases * sizeof (rtx));
2949 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
2951 /* Compute the low and high bounds relative to the minimum
2952 value since that should fit in a HOST_WIDE_INT while the
2953 actual values may not. */
2954 HOST_WIDE_INT i_low
2955 = tree_low_cst (fold (build2 (MINUS_EXPR, index_type,
2956 n->low, minval)), 1);
2957 HOST_WIDE_INT i_high
2958 = tree_low_cst (fold (build2 (MINUS_EXPR, index_type,
2959 n->high, minval)), 1);
2960 HOST_WIDE_INT i;
2962 for (i = i_low; i <= i_high; i ++)
2963 labelvec[i]
2964 = gen_rtx_LABEL_REF (Pmode, label_rtx (n->code_label));
2967 /* Fill in the gaps with the default. */
2968 for (i = 0; i < ncases; i++)
2969 if (labelvec[i] == 0)
2970 labelvec[i] = gen_rtx_LABEL_REF (Pmode, default_label);
2972 /* Output the table. */
2973 emit_label (table_label);
2975 if (CASE_VECTOR_PC_RELATIVE || flag_pic)
2976 emit_jump_insn (gen_rtx_ADDR_DIFF_VEC (CASE_VECTOR_MODE,
2977 gen_rtx_LABEL_REF (Pmode, table_label),
2978 gen_rtvec_v (ncases, labelvec),
2979 const0_rtx, const0_rtx));
2980 else
2981 emit_jump_insn (gen_rtx_ADDR_VEC (CASE_VECTOR_MODE,
2982 gen_rtvec_v (ncases, labelvec)));
2984 /* If the case insn drops through the table,
2985 after the table we must jump to the default-label.
2986 Otherwise record no drop-through after the table. */
2987 #ifdef CASE_DROPS_THROUGH
2988 emit_jump (default_label);
2989 #else
2990 emit_barrier ();
2991 #endif
2994 before_case = NEXT_INSN (before_case);
2995 end = get_last_insn ();
2996 if (squeeze_notes (&before_case, &end))
2997 abort ();
2998 reorder_insns (before_case, end,
2999 thiscase->data.case_stmt.start);
3002 if (thiscase->exit_label && !exit_done)
3003 emit_label (thiscase->exit_label);
3005 POPSTACK (case_stack);
3007 free_temp_slots ();
3010 /* Generate code to jump to LABEL if OP1 and OP2 are equal. */
3012 static void
3013 do_jump_if_equal (rtx op1, rtx op2, rtx label, int unsignedp)
3015 if (GET_CODE (op1) == CONST_INT && GET_CODE (op2) == CONST_INT)
3017 if (op1 == op2)
3018 emit_jump (label);
3020 else
3021 emit_cmp_and_jump_insns (op1, op2, EQ, NULL_RTX,
3022 (GET_MODE (op1) == VOIDmode
3023 ? GET_MODE (op2) : GET_MODE (op1)),
3024 unsignedp, label);
3027 /* Not all case values are encountered equally. This function
3028 uses a heuristic to weight case labels, in cases where that
3029 looks like a reasonable thing to do.
3031 Right now, all we try to guess is text, and we establish the
3032 following weights:
3034 chars above space: 16
3035 digits: 16
3036 default: 12
3037 space, punct: 8
3038 tab: 4
3039 newline: 2
3040 other "\" chars: 1
3041 remaining chars: 0
3043 If we find any cases in the switch that are not either -1 or in the range
3044 of valid ASCII characters, or are control characters other than those
3045 commonly used with "\", don't treat this switch scanning text.
3047 Return 1 if these nodes are suitable for cost estimation, otherwise
3048 return 0. */
3050 static int
3051 estimate_case_costs (case_node_ptr node)
3053 tree min_ascii = integer_minus_one_node;
3054 tree max_ascii = convert (TREE_TYPE (node->high), build_int_2 (127, 0));
3055 case_node_ptr n;
3056 int i;
3058 /* If we haven't already made the cost table, make it now. Note that the
3059 lower bound of the table is -1, not zero. */
3061 if (! cost_table_initialized)
3063 cost_table_initialized = 1;
3065 for (i = 0; i < 128; i++)
3067 if (ISALNUM (i))
3068 COST_TABLE (i) = 16;
3069 else if (ISPUNCT (i))
3070 COST_TABLE (i) = 8;
3071 else if (ISCNTRL (i))
3072 COST_TABLE (i) = -1;
3075 COST_TABLE (' ') = 8;
3076 COST_TABLE ('\t') = 4;
3077 COST_TABLE ('\0') = 4;
3078 COST_TABLE ('\n') = 2;
3079 COST_TABLE ('\f') = 1;
3080 COST_TABLE ('\v') = 1;
3081 COST_TABLE ('\b') = 1;
3084 /* See if all the case expressions look like text. It is text if the
3085 constant is >= -1 and the highest constant is <= 127. Do all comparisons
3086 as signed arithmetic since we don't want to ever access cost_table with a
3087 value less than -1. Also check that none of the constants in a range
3088 are strange control characters. */
3090 for (n = node; n; n = n->right)
3092 if ((INT_CST_LT (n->low, min_ascii)) || INT_CST_LT (max_ascii, n->high))
3093 return 0;
3095 for (i = (HOST_WIDE_INT) TREE_INT_CST_LOW (n->low);
3096 i <= (HOST_WIDE_INT) TREE_INT_CST_LOW (n->high); i++)
3097 if (COST_TABLE (i) < 0)
3098 return 0;
3101 /* All interesting values are within the range of interesting
3102 ASCII characters. */
3103 return 1;
3106 /* Determine whether two case labels branch to the same target.
3107 Since we now do tree optimizations, just comparing labels is
3108 good enough. */
3110 static bool
3111 same_case_target_p (rtx l1, rtx l2)
3113 return l1 == l2;
3116 /* Take an ordered list of case nodes
3117 and transform them into a near optimal binary tree,
3118 on the assumption that any target code selection value is as
3119 likely as any other.
3121 The transformation is performed by splitting the ordered
3122 list into two equal sections plus a pivot. The parts are
3123 then attached to the pivot as left and right branches. Each
3124 branch is then transformed recursively. */
3126 static void
3127 balance_case_nodes (case_node_ptr *head, case_node_ptr parent)
3129 case_node_ptr np;
3131 np = *head;
3132 if (np)
3134 int cost = 0;
3135 int i = 0;
3136 int ranges = 0;
3137 case_node_ptr *npp;
3138 case_node_ptr left;
3140 /* Count the number of entries on branch. Also count the ranges. */
3142 while (np)
3144 if (!tree_int_cst_equal (np->low, np->high))
3146 ranges++;
3147 if (use_cost_table)
3148 cost += COST_TABLE (TREE_INT_CST_LOW (np->high));
3151 if (use_cost_table)
3152 cost += COST_TABLE (TREE_INT_CST_LOW (np->low));
3154 i++;
3155 np = np->right;
3158 if (i > 2)
3160 /* Split this list if it is long enough for that to help. */
3161 npp = head;
3162 left = *npp;
3163 if (use_cost_table)
3165 /* Find the place in the list that bisects the list's total cost,
3166 Here I gets half the total cost. */
3167 int n_moved = 0;
3168 i = (cost + 1) / 2;
3169 while (1)
3171 /* Skip nodes while their cost does not reach that amount. */
3172 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
3173 i -= COST_TABLE (TREE_INT_CST_LOW ((*npp)->high));
3174 i -= COST_TABLE (TREE_INT_CST_LOW ((*npp)->low));
3175 if (i <= 0)
3176 break;
3177 npp = &(*npp)->right;
3178 n_moved += 1;
3180 if (n_moved == 0)
3182 /* Leave this branch lopsided, but optimize left-hand
3183 side and fill in `parent' fields for right-hand side. */
3184 np = *head;
3185 np->parent = parent;
3186 balance_case_nodes (&np->left, np);
3187 for (; np->right; np = np->right)
3188 np->right->parent = np;
3189 return;
3192 /* If there are just three nodes, split at the middle one. */
3193 else if (i == 3)
3194 npp = &(*npp)->right;
3195 else
3197 /* Find the place in the list that bisects the list's total cost,
3198 where ranges count as 2.
3199 Here I gets half the total cost. */
3200 i = (i + ranges + 1) / 2;
3201 while (1)
3203 /* Skip nodes while their cost does not reach that amount. */
3204 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
3205 i--;
3206 i--;
3207 if (i <= 0)
3208 break;
3209 npp = &(*npp)->right;
3212 *head = np = *npp;
3213 *npp = 0;
3214 np->parent = parent;
3215 np->left = left;
3217 /* Optimize each of the two split parts. */
3218 balance_case_nodes (&np->left, np);
3219 balance_case_nodes (&np->right, np);
3221 else
3223 /* Else leave this branch as one level,
3224 but fill in `parent' fields. */
3225 np = *head;
3226 np->parent = parent;
3227 for (; np->right; np = np->right)
3228 np->right->parent = np;
3233 /* Search the parent sections of the case node tree
3234 to see if a test for the lower bound of NODE would be redundant.
3235 INDEX_TYPE is the type of the index expression.
3237 The instructions to generate the case decision tree are
3238 output in the same order as nodes are processed so it is
3239 known that if a parent node checks the range of the current
3240 node minus one that the current node is bounded at its lower
3241 span. Thus the test would be redundant. */
3243 static int
3244 node_has_low_bound (case_node_ptr node, tree index_type)
3246 tree low_minus_one;
3247 case_node_ptr pnode;
3249 /* If the lower bound of this node is the lowest value in the index type,
3250 we need not test it. */
3252 if (tree_int_cst_equal (node->low, TYPE_MIN_VALUE (index_type)))
3253 return 1;
3255 /* If this node has a left branch, the value at the left must be less
3256 than that at this node, so it cannot be bounded at the bottom and
3257 we need not bother testing any further. */
3259 if (node->left)
3260 return 0;
3262 low_minus_one = fold (build2 (MINUS_EXPR, TREE_TYPE (node->low),
3263 node->low, integer_one_node));
3265 /* If the subtraction above overflowed, we can't verify anything.
3266 Otherwise, look for a parent that tests our value - 1. */
3268 if (! tree_int_cst_lt (low_minus_one, node->low))
3269 return 0;
3271 for (pnode = node->parent; pnode; pnode = pnode->parent)
3272 if (tree_int_cst_equal (low_minus_one, pnode->high))
3273 return 1;
3275 return 0;
3278 /* Search the parent sections of the case node tree
3279 to see if a test for the upper bound of NODE would be redundant.
3280 INDEX_TYPE is the type of the index expression.
3282 The instructions to generate the case decision tree are
3283 output in the same order as nodes are processed so it is
3284 known that if a parent node checks the range of the current
3285 node plus one that the current node is bounded at its upper
3286 span. Thus the test would be redundant. */
3288 static int
3289 node_has_high_bound (case_node_ptr node, tree index_type)
3291 tree high_plus_one;
3292 case_node_ptr pnode;
3294 /* If there is no upper bound, obviously no test is needed. */
3296 if (TYPE_MAX_VALUE (index_type) == NULL)
3297 return 1;
3299 /* If the upper bound of this node is the highest value in the type
3300 of the index expression, we need not test against it. */
3302 if (tree_int_cst_equal (node->high, TYPE_MAX_VALUE (index_type)))
3303 return 1;
3305 /* If this node has a right branch, the value at the right must be greater
3306 than that at this node, so it cannot be bounded at the top and
3307 we need not bother testing any further. */
3309 if (node->right)
3310 return 0;
3312 high_plus_one = fold (build2 (PLUS_EXPR, TREE_TYPE (node->high),
3313 node->high, integer_one_node));
3315 /* If the addition above overflowed, we can't verify anything.
3316 Otherwise, look for a parent that tests our value + 1. */
3318 if (! tree_int_cst_lt (node->high, high_plus_one))
3319 return 0;
3321 for (pnode = node->parent; pnode; pnode = pnode->parent)
3322 if (tree_int_cst_equal (high_plus_one, pnode->low))
3323 return 1;
3325 return 0;
3328 /* Search the parent sections of the
3329 case node tree to see if both tests for the upper and lower
3330 bounds of NODE would be redundant. */
3332 static int
3333 node_is_bounded (case_node_ptr node, tree index_type)
3335 return (node_has_low_bound (node, index_type)
3336 && node_has_high_bound (node, index_type));
3339 /* Emit step-by-step code to select a case for the value of INDEX.
3340 The thus generated decision tree follows the form of the
3341 case-node binary tree NODE, whose nodes represent test conditions.
3342 INDEX_TYPE is the type of the index of the switch.
3344 Care is taken to prune redundant tests from the decision tree
3345 by detecting any boundary conditions already checked by
3346 emitted rtx. (See node_has_high_bound, node_has_low_bound
3347 and node_is_bounded, above.)
3349 Where the test conditions can be shown to be redundant we emit
3350 an unconditional jump to the target code. As a further
3351 optimization, the subordinates of a tree node are examined to
3352 check for bounded nodes. In this case conditional and/or
3353 unconditional jumps as a result of the boundary check for the
3354 current node are arranged to target the subordinates associated
3355 code for out of bound conditions on the current node.
3357 We can assume that when control reaches the code generated here,
3358 the index value has already been compared with the parents
3359 of this node, and determined to be on the same side of each parent
3360 as this node is. Thus, if this node tests for the value 51,
3361 and a parent tested for 52, we don't need to consider
3362 the possibility of a value greater than 51. If another parent
3363 tests for the value 50, then this node need not test anything. */
3365 static void
3366 emit_case_nodes (rtx index, case_node_ptr node, rtx default_label,
3367 tree index_type)
3369 /* If INDEX has an unsigned type, we must make unsigned branches. */
3370 int unsignedp = TYPE_UNSIGNED (index_type);
3371 enum machine_mode mode = GET_MODE (index);
3372 enum machine_mode imode = TYPE_MODE (index_type);
3374 /* See if our parents have already tested everything for us.
3375 If they have, emit an unconditional jump for this node. */
3376 if (node_is_bounded (node, index_type))
3377 emit_jump (label_rtx (node->code_label));
3379 else if (tree_int_cst_equal (node->low, node->high))
3381 /* Node is single valued. First see if the index expression matches
3382 this node and then check our children, if any. */
3384 do_jump_if_equal (index,
3385 convert_modes (mode, imode,
3386 expand_expr (node->low, NULL_RTX,
3387 VOIDmode, 0),
3388 unsignedp),
3389 label_rtx (node->code_label), unsignedp);
3391 if (node->right != 0 && node->left != 0)
3393 /* This node has children on both sides.
3394 Dispatch to one side or the other
3395 by comparing the index value with this node's value.
3396 If one subtree is bounded, check that one first,
3397 so we can avoid real branches in the tree. */
3399 if (node_is_bounded (node->right, index_type))
3401 emit_cmp_and_jump_insns (index,
3402 convert_modes
3403 (mode, imode,
3404 expand_expr (node->high, NULL_RTX,
3405 VOIDmode, 0),
3406 unsignedp),
3407 GT, NULL_RTX, mode, unsignedp,
3408 label_rtx (node->right->code_label));
3409 emit_case_nodes (index, node->left, default_label, index_type);
3412 else if (node_is_bounded (node->left, index_type))
3414 emit_cmp_and_jump_insns (index,
3415 convert_modes
3416 (mode, imode,
3417 expand_expr (node->high, NULL_RTX,
3418 VOIDmode, 0),
3419 unsignedp),
3420 LT, NULL_RTX, mode, unsignedp,
3421 label_rtx (node->left->code_label));
3422 emit_case_nodes (index, node->right, default_label, index_type);
3425 /* If both children are single-valued cases with no
3426 children, finish up all the work. This way, we can save
3427 one ordered comparison. */
3428 else if (tree_int_cst_equal (node->right->low, node->right->high)
3429 && node->right->left == 0
3430 && node->right->right == 0
3431 && tree_int_cst_equal (node->left->low, node->left->high)
3432 && node->left->left == 0
3433 && node->left->right == 0)
3435 /* Neither node is bounded. First distinguish the two sides;
3436 then emit the code for one side at a time. */
3438 /* See if the value matches what the right hand side
3439 wants. */
3440 do_jump_if_equal (index,
3441 convert_modes (mode, imode,
3442 expand_expr (node->right->low,
3443 NULL_RTX,
3444 VOIDmode, 0),
3445 unsignedp),
3446 label_rtx (node->right->code_label),
3447 unsignedp);
3449 /* See if the value matches what the left hand side
3450 wants. */
3451 do_jump_if_equal (index,
3452 convert_modes (mode, imode,
3453 expand_expr (node->left->low,
3454 NULL_RTX,
3455 VOIDmode, 0),
3456 unsignedp),
3457 label_rtx (node->left->code_label),
3458 unsignedp);
3461 else
3463 /* Neither node is bounded. First distinguish the two sides;
3464 then emit the code for one side at a time. */
3466 tree test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
3468 /* See if the value is on the right. */
3469 emit_cmp_and_jump_insns (index,
3470 convert_modes
3471 (mode, imode,
3472 expand_expr (node->high, NULL_RTX,
3473 VOIDmode, 0),
3474 unsignedp),
3475 GT, NULL_RTX, mode, unsignedp,
3476 label_rtx (test_label));
3478 /* Value must be on the left.
3479 Handle the left-hand subtree. */
3480 emit_case_nodes (index, node->left, default_label, index_type);
3481 /* If left-hand subtree does nothing,
3482 go to default. */
3483 emit_jump (default_label);
3485 /* Code branches here for the right-hand subtree. */
3486 expand_label (test_label);
3487 emit_case_nodes (index, node->right, default_label, index_type);
3491 else if (node->right != 0 && node->left == 0)
3493 /* Here we have a right child but no left so we issue conditional
3494 branch to default and process the right child.
3496 Omit the conditional branch to default if we it avoid only one
3497 right child; it costs too much space to save so little time. */
3499 if (node->right->right || node->right->left
3500 || !tree_int_cst_equal (node->right->low, node->right->high))
3502 if (!node_has_low_bound (node, index_type))
3504 emit_cmp_and_jump_insns (index,
3505 convert_modes
3506 (mode, imode,
3507 expand_expr (node->high, NULL_RTX,
3508 VOIDmode, 0),
3509 unsignedp),
3510 LT, NULL_RTX, mode, unsignedp,
3511 default_label);
3514 emit_case_nodes (index, node->right, default_label, index_type);
3516 else
3517 /* We cannot process node->right normally
3518 since we haven't ruled out the numbers less than
3519 this node's value. So handle node->right explicitly. */
3520 do_jump_if_equal (index,
3521 convert_modes
3522 (mode, imode,
3523 expand_expr (node->right->low, NULL_RTX,
3524 VOIDmode, 0),
3525 unsignedp),
3526 label_rtx (node->right->code_label), unsignedp);
3529 else if (node->right == 0 && node->left != 0)
3531 /* Just one subtree, on the left. */
3532 if (node->left->left || node->left->right
3533 || !tree_int_cst_equal (node->left->low, node->left->high))
3535 if (!node_has_high_bound (node, index_type))
3537 emit_cmp_and_jump_insns (index,
3538 convert_modes
3539 (mode, imode,
3540 expand_expr (node->high, NULL_RTX,
3541 VOIDmode, 0),
3542 unsignedp),
3543 GT, NULL_RTX, mode, unsignedp,
3544 default_label);
3547 emit_case_nodes (index, node->left, default_label, index_type);
3549 else
3550 /* We cannot process node->left normally
3551 since we haven't ruled out the numbers less than
3552 this node's value. So handle node->left explicitly. */
3553 do_jump_if_equal (index,
3554 convert_modes
3555 (mode, imode,
3556 expand_expr (node->left->low, NULL_RTX,
3557 VOIDmode, 0),
3558 unsignedp),
3559 label_rtx (node->left->code_label), unsignedp);
3562 else
3564 /* Node is a range. These cases are very similar to those for a single
3565 value, except that we do not start by testing whether this node
3566 is the one to branch to. */
3568 if (node->right != 0 && node->left != 0)
3570 /* Node has subtrees on both sides.
3571 If the right-hand subtree is bounded,
3572 test for it first, since we can go straight there.
3573 Otherwise, we need to make a branch in the control structure,
3574 then handle the two subtrees. */
3575 tree test_label = 0;
3577 if (node_is_bounded (node->right, index_type))
3578 /* Right hand node is fully bounded so we can eliminate any
3579 testing and branch directly to the target code. */
3580 emit_cmp_and_jump_insns (index,
3581 convert_modes
3582 (mode, imode,
3583 expand_expr (node->high, NULL_RTX,
3584 VOIDmode, 0),
3585 unsignedp),
3586 GT, NULL_RTX, mode, unsignedp,
3587 label_rtx (node->right->code_label));
3588 else
3590 /* Right hand node requires testing.
3591 Branch to a label where we will handle it later. */
3593 test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
3594 emit_cmp_and_jump_insns (index,
3595 convert_modes
3596 (mode, imode,
3597 expand_expr (node->high, NULL_RTX,
3598 VOIDmode, 0),
3599 unsignedp),
3600 GT, NULL_RTX, mode, unsignedp,
3601 label_rtx (test_label));
3604 /* Value belongs to this node or to the left-hand subtree. */
3606 emit_cmp_and_jump_insns (index,
3607 convert_modes
3608 (mode, imode,
3609 expand_expr (node->low, NULL_RTX,
3610 VOIDmode, 0),
3611 unsignedp),
3612 GE, NULL_RTX, mode, unsignedp,
3613 label_rtx (node->code_label));
3615 /* Handle the left-hand subtree. */
3616 emit_case_nodes (index, node->left, default_label, index_type);
3618 /* If right node had to be handled later, do that now. */
3620 if (test_label)
3622 /* If the left-hand subtree fell through,
3623 don't let it fall into the right-hand subtree. */
3624 emit_jump (default_label);
3626 expand_label (test_label);
3627 emit_case_nodes (index, node->right, default_label, index_type);
3631 else if (node->right != 0 && node->left == 0)
3633 /* Deal with values to the left of this node,
3634 if they are possible. */
3635 if (!node_has_low_bound (node, index_type))
3637 emit_cmp_and_jump_insns (index,
3638 convert_modes
3639 (mode, imode,
3640 expand_expr (node->low, NULL_RTX,
3641 VOIDmode, 0),
3642 unsignedp),
3643 LT, NULL_RTX, mode, unsignedp,
3644 default_label);
3647 /* Value belongs to this node or to the right-hand subtree. */
3649 emit_cmp_and_jump_insns (index,
3650 convert_modes
3651 (mode, imode,
3652 expand_expr (node->high, NULL_RTX,
3653 VOIDmode, 0),
3654 unsignedp),
3655 LE, NULL_RTX, mode, unsignedp,
3656 label_rtx (node->code_label));
3658 emit_case_nodes (index, node->right, default_label, index_type);
3661 else if (node->right == 0 && node->left != 0)
3663 /* Deal with values to the right of this node,
3664 if they are possible. */
3665 if (!node_has_high_bound (node, index_type))
3667 emit_cmp_and_jump_insns (index,
3668 convert_modes
3669 (mode, imode,
3670 expand_expr (node->high, NULL_RTX,
3671 VOIDmode, 0),
3672 unsignedp),
3673 GT, NULL_RTX, mode, unsignedp,
3674 default_label);
3677 /* Value belongs to this node or to the left-hand subtree. */
3679 emit_cmp_and_jump_insns (index,
3680 convert_modes
3681 (mode, imode,
3682 expand_expr (node->low, NULL_RTX,
3683 VOIDmode, 0),
3684 unsignedp),
3685 GE, NULL_RTX, mode, unsignedp,
3686 label_rtx (node->code_label));
3688 emit_case_nodes (index, node->left, default_label, index_type);
3691 else
3693 /* Node has no children so we check low and high bounds to remove
3694 redundant tests. Only one of the bounds can exist,
3695 since otherwise this node is bounded--a case tested already. */
3696 int high_bound = node_has_high_bound (node, index_type);
3697 int low_bound = node_has_low_bound (node, index_type);
3699 if (!high_bound && low_bound)
3701 emit_cmp_and_jump_insns (index,
3702 convert_modes
3703 (mode, imode,
3704 expand_expr (node->high, NULL_RTX,
3705 VOIDmode, 0),
3706 unsignedp),
3707 GT, NULL_RTX, mode, unsignedp,
3708 default_label);
3711 else if (!low_bound && high_bound)
3713 emit_cmp_and_jump_insns (index,
3714 convert_modes
3715 (mode, imode,
3716 expand_expr (node->low, NULL_RTX,
3717 VOIDmode, 0),
3718 unsignedp),
3719 LT, NULL_RTX, mode, unsignedp,
3720 default_label);
3722 else if (!low_bound && !high_bound)
3724 /* Widen LOW and HIGH to the same width as INDEX. */
3725 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
3726 tree low = build1 (CONVERT_EXPR, type, node->low);
3727 tree high = build1 (CONVERT_EXPR, type, node->high);
3728 rtx low_rtx, new_index, new_bound;
3730 /* Instead of doing two branches, emit one unsigned branch for
3731 (index-low) > (high-low). */
3732 low_rtx = expand_expr (low, NULL_RTX, mode, 0);
3733 new_index = expand_simple_binop (mode, MINUS, index, low_rtx,
3734 NULL_RTX, unsignedp,
3735 OPTAB_WIDEN);
3736 new_bound = expand_expr (fold (build2 (MINUS_EXPR, type,
3737 high, low)),
3738 NULL_RTX, mode, 0);
3740 emit_cmp_and_jump_insns (new_index, new_bound, GT, NULL_RTX,
3741 mode, 1, default_label);
3744 emit_jump (label_rtx (node->code_label));
3749 #include "gt-stmt.h"