fixes for scoping
[official-gcc.git] / gcc / stmt.c
blob0b9ce8184f67e4d3185b31e9d2bddc2bb55ae8f6
1 /* Expands front end tree to back end RTL for GCC
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009,
4 2010 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /* This file handles the generation of rtl code from tree structure
23 above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
24 The functions whose names start with `expand_' are called by the
25 expander to generate RTL instructions for various kinds of constructs. */
27 #include "config.h"
28 #include "system.h"
29 #include "coretypes.h"
30 #include "tm.h"
32 #include "rtl.h"
33 #include "hard-reg-set.h"
34 #include "tree.h"
35 #include "tm_p.h"
36 #include "flags.h"
37 #include "except.h"
38 #include "function.h"
39 #include "insn-config.h"
40 #include "expr.h"
41 #include "libfuncs.h"
42 #include "recog.h"
43 #include "machmode.h"
44 #include "diagnostic-core.h"
45 #include "toplev.h"
46 #include "output.h"
47 #include "ggc.h"
48 #include "langhooks.h"
49 #include "predict.h"
50 #include "optabs.h"
51 #include "target.h"
52 #include "gimple.h"
53 #include "regs.h"
54 #include "alloc-pool.h"
55 #include "pretty-print.h"
56 #include "bitmap.h"
59 /* Functions and data structures for expanding case statements. */
61 /* Case label structure, used to hold info on labels within case
62 statements. We handle "range" labels; for a single-value label
63 as in C, the high and low limits are the same.
65 We start with a vector of case nodes sorted in ascending order, and
66 the default label as the last element in the vector. Before expanding
67 to RTL, we transform this vector into a list linked via the RIGHT
68 fields in the case_node struct. Nodes with higher case values are
69 later in the list.
71 Switch statements can be output in three forms. A branch table is
72 used if there are more than a few labels and the labels are dense
73 within the range between the smallest and largest case value. If a
74 branch table is used, no further manipulations are done with the case
75 node chain.
77 The alternative to the use of a branch table is to generate a series
78 of compare and jump insns. When that is done, we use the LEFT, RIGHT,
79 and PARENT fields to hold a binary tree. Initially the tree is
80 totally unbalanced, with everything on the right. We balance the tree
81 with nodes on the left having lower case values than the parent
82 and nodes on the right having higher values. We then output the tree
83 in order.
85 For very small, suitable switch statements, we can generate a series
86 of simple bit test and branches instead. */
88 struct case_node
90 struct case_node *left; /* Left son in binary tree */
91 struct case_node *right; /* Right son in binary tree; also node chain */
92 struct case_node *parent; /* Parent of node in binary tree */
93 tree low; /* Lowest index value for this label */
94 tree high; /* Highest index value for this label */
95 tree code_label; /* Label to jump to when node matches */
98 typedef struct case_node case_node;
99 typedef struct case_node *case_node_ptr;
101 /* These are used by estimate_case_costs and balance_case_nodes. */
103 /* This must be a signed type, and non-ANSI compilers lack signed char. */
104 static short cost_table_[129];
105 static int use_cost_table;
106 static int cost_table_initialized;
108 /* Special care is needed because we allow -1, but TREE_INT_CST_LOW
109 is unsigned. */
110 #define COST_TABLE(I) cost_table_[(unsigned HOST_WIDE_INT) ((I) + 1)]
112 static int n_occurrences (int, const char *);
113 static bool tree_conflicts_with_clobbers_p (tree, HARD_REG_SET *);
114 static void expand_nl_goto_receiver (void);
115 static bool check_operand_nalternatives (tree, tree);
116 static bool check_unique_operand_names (tree, tree, tree);
117 static char *resolve_operand_name_1 (char *, tree, tree, tree);
118 static void expand_null_return_1 (void);
119 static void expand_value_return (rtx);
120 static int estimate_case_costs (case_node_ptr);
121 static bool lshift_cheap_p (void);
122 static int case_bit_test_cmp (const void *, const void *);
123 static void emit_case_bit_tests (tree, tree, tree, tree, case_node_ptr, rtx);
124 static void balance_case_nodes (case_node_ptr *, case_node_ptr);
125 static int node_has_low_bound (case_node_ptr, tree);
126 static int node_has_high_bound (case_node_ptr, tree);
127 static int node_is_bounded (case_node_ptr, tree);
128 static void emit_case_nodes (rtx, case_node_ptr, rtx, tree);
129 static struct case_node *add_case_node (struct case_node *, tree,
130 tree, tree, tree, alloc_pool);
133 /* Return the rtx-label that corresponds to a LABEL_DECL,
134 creating it if necessary. */
137 label_rtx (tree label)
139 gcc_assert (TREE_CODE (label) == LABEL_DECL);
141 if (!DECL_RTL_SET_P (label))
143 rtx r = gen_label_rtx ();
144 SET_DECL_RTL (label, r);
145 if (FORCED_LABEL (label) || DECL_NONLOCAL (label))
146 LABEL_PRESERVE_P (r) = 1;
149 return DECL_RTL (label);
152 /* As above, but also put it on the forced-reference list of the
153 function that contains it. */
155 force_label_rtx (tree label)
157 rtx ref = label_rtx (label);
158 tree function = decl_function_context (label);
160 gcc_assert (function);
162 forced_labels = gen_rtx_EXPR_LIST (VOIDmode, ref, forced_labels);
163 return ref;
166 /* Add an unconditional jump to LABEL as the next sequential instruction. */
168 void
169 emit_jump (rtx label)
171 do_pending_stack_adjust ();
172 emit_jump_insn (gen_jump (label));
173 emit_barrier ();
176 /* Emit code to jump to the address
177 specified by the pointer expression EXP. */
179 void
180 expand_computed_goto (tree exp)
182 rtx x = expand_normal (exp);
184 x = convert_memory_address (Pmode, x);
186 do_pending_stack_adjust ();
187 emit_indirect_jump (x);
190 /* Handle goto statements and the labels that they can go to. */
192 /* Specify the location in the RTL code of a label LABEL,
193 which is a LABEL_DECL tree node.
195 This is used for the kind of label that the user can jump to with a
196 goto statement, and for alternatives of a switch or case statement.
197 RTL labels generated for loops and conditionals don't go through here;
198 they are generated directly at the RTL level, by other functions below.
200 Note that this has nothing to do with defining label *names*.
201 Languages vary in how they do that and what that even means. */
203 void
204 expand_label (tree label)
206 rtx label_r = label_rtx (label);
208 do_pending_stack_adjust ();
209 emit_label (label_r);
210 if (DECL_NAME (label))
211 LABEL_NAME (DECL_RTL (label)) = IDENTIFIER_POINTER (DECL_NAME (label));
213 if (DECL_NONLOCAL (label))
215 expand_nl_goto_receiver ();
216 nonlocal_goto_handler_labels
217 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
218 nonlocal_goto_handler_labels);
221 if (FORCED_LABEL (label))
222 forced_labels = gen_rtx_EXPR_LIST (VOIDmode, label_r, forced_labels);
224 if (DECL_NONLOCAL (label) || FORCED_LABEL (label))
225 maybe_set_first_label_num (label_r);
228 /* Generate RTL code for a `goto' statement with target label LABEL.
229 LABEL should be a LABEL_DECL tree node that was or will later be
230 defined with `expand_label'. */
232 void
233 expand_goto (tree label)
235 #ifdef ENABLE_CHECKING
236 /* Check for a nonlocal goto to a containing function. Should have
237 gotten translated to __builtin_nonlocal_goto. */
238 tree context = decl_function_context (label);
239 gcc_assert (!context || context == current_function_decl);
240 #endif
242 emit_jump (label_rtx (label));
245 /* Return the number of times character C occurs in string S. */
246 static int
247 n_occurrences (int c, const char *s)
249 int n = 0;
250 while (*s)
251 n += (*s++ == c);
252 return n;
255 /* Generate RTL for an asm statement (explicit assembler code).
256 STRING is a STRING_CST node containing the assembler code text,
257 or an ADDR_EXPR containing a STRING_CST. VOL nonzero means the
258 insn is volatile; don't optimize it. */
260 static void
261 expand_asm_loc (tree string, int vol, location_t locus)
263 rtx body;
265 if (TREE_CODE (string) == ADDR_EXPR)
266 string = TREE_OPERAND (string, 0);
268 body = gen_rtx_ASM_INPUT_loc (VOIDmode,
269 ggc_strdup (TREE_STRING_POINTER (string)),
270 locus);
272 MEM_VOLATILE_P (body) = vol;
274 emit_insn (body);
277 /* Parse the output constraint pointed to by *CONSTRAINT_P. It is the
278 OPERAND_NUMth output operand, indexed from zero. There are NINPUTS
279 inputs and NOUTPUTS outputs to this extended-asm. Upon return,
280 *ALLOWS_MEM will be TRUE iff the constraint allows the use of a
281 memory operand. Similarly, *ALLOWS_REG will be TRUE iff the
282 constraint allows the use of a register operand. And, *IS_INOUT
283 will be true if the operand is read-write, i.e., if it is used as
284 an input as well as an output. If *CONSTRAINT_P is not in
285 canonical form, it will be made canonical. (Note that `+' will be
286 replaced with `=' as part of this process.)
288 Returns TRUE if all went well; FALSE if an error occurred. */
290 bool
291 parse_output_constraint (const char **constraint_p, int operand_num,
292 int ninputs, int noutputs, bool *allows_mem,
293 bool *allows_reg, bool *is_inout)
295 const char *constraint = *constraint_p;
296 const char *p;
298 /* Assume the constraint doesn't allow the use of either a register
299 or memory. */
300 *allows_mem = false;
301 *allows_reg = false;
303 /* Allow the `=' or `+' to not be at the beginning of the string,
304 since it wasn't explicitly documented that way, and there is a
305 large body of code that puts it last. Swap the character to
306 the front, so as not to uglify any place else. */
307 p = strchr (constraint, '=');
308 if (!p)
309 p = strchr (constraint, '+');
311 /* If the string doesn't contain an `=', issue an error
312 message. */
313 if (!p)
315 error ("output operand constraint lacks %<=%>");
316 return false;
319 /* If the constraint begins with `+', then the operand is both read
320 from and written to. */
321 *is_inout = (*p == '+');
323 /* Canonicalize the output constraint so that it begins with `='. */
324 if (p != constraint || *is_inout)
326 char *buf;
327 size_t c_len = strlen (constraint);
329 if (p != constraint)
330 warning (0, "output constraint %qc for operand %d "
331 "is not at the beginning",
332 *p, operand_num);
334 /* Make a copy of the constraint. */
335 buf = XALLOCAVEC (char, c_len + 1);
336 strcpy (buf, constraint);
337 /* Swap the first character and the `=' or `+'. */
338 buf[p - constraint] = buf[0];
339 /* Make sure the first character is an `='. (Until we do this,
340 it might be a `+'.) */
341 buf[0] = '=';
342 /* Replace the constraint with the canonicalized string. */
343 *constraint_p = ggc_alloc_string (buf, c_len);
344 constraint = *constraint_p;
347 /* Loop through the constraint string. */
348 for (p = constraint + 1; *p; p += CONSTRAINT_LEN (*p, p))
349 switch (*p)
351 case '+':
352 case '=':
353 error ("operand constraint contains incorrectly positioned "
354 "%<+%> or %<=%>");
355 return false;
357 case '%':
358 if (operand_num + 1 == ninputs + noutputs)
360 error ("%<%%%> constraint used with last operand");
361 return false;
363 break;
365 case 'V': case TARGET_MEM_CONSTRAINT: case 'o':
366 *allows_mem = true;
367 break;
369 case '?': case '!': case '*': case '&': case '#':
370 case 'E': case 'F': case 'G': case 'H':
371 case 's': case 'i': case 'n':
372 case 'I': case 'J': case 'K': case 'L': case 'M':
373 case 'N': case 'O': case 'P': case ',':
374 break;
376 case '0': case '1': case '2': case '3': case '4':
377 case '5': case '6': case '7': case '8': case '9':
378 case '[':
379 error ("matching constraint not valid in output operand");
380 return false;
382 case '<': case '>':
383 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
384 excepting those that expand_call created. So match memory
385 and hope. */
386 *allows_mem = true;
387 break;
389 case 'g': case 'X':
390 *allows_reg = true;
391 *allows_mem = true;
392 break;
394 case 'p': case 'r':
395 *allows_reg = true;
396 break;
398 default:
399 if (!ISALPHA (*p))
400 break;
401 if (REG_CLASS_FROM_CONSTRAINT (*p, p) != NO_REGS)
402 *allows_reg = true;
403 #ifdef EXTRA_CONSTRAINT_STR
404 else if (EXTRA_ADDRESS_CONSTRAINT (*p, p))
405 *allows_reg = true;
406 else if (EXTRA_MEMORY_CONSTRAINT (*p, p))
407 *allows_mem = true;
408 else
410 /* Otherwise we can't assume anything about the nature of
411 the constraint except that it isn't purely registers.
412 Treat it like "g" and hope for the best. */
413 *allows_reg = true;
414 *allows_mem = true;
416 #endif
417 break;
420 return true;
423 /* Similar, but for input constraints. */
425 bool
426 parse_input_constraint (const char **constraint_p, int input_num,
427 int ninputs, int noutputs, int ninout,
428 const char * const * constraints,
429 bool *allows_mem, bool *allows_reg)
431 const char *constraint = *constraint_p;
432 const char *orig_constraint = constraint;
433 size_t c_len = strlen (constraint);
434 size_t j;
435 bool saw_match = false;
437 /* Assume the constraint doesn't allow the use of either
438 a register or memory. */
439 *allows_mem = false;
440 *allows_reg = false;
442 /* Make sure constraint has neither `=', `+', nor '&'. */
444 for (j = 0; j < c_len; j += CONSTRAINT_LEN (constraint[j], constraint+j))
445 switch (constraint[j])
447 case '+': case '=': case '&':
448 if (constraint == orig_constraint)
450 error ("input operand constraint contains %qc", constraint[j]);
451 return false;
453 break;
455 case '%':
456 if (constraint == orig_constraint
457 && input_num + 1 == ninputs - ninout)
459 error ("%<%%%> constraint used with last operand");
460 return false;
462 break;
464 case 'V': case TARGET_MEM_CONSTRAINT: case 'o':
465 *allows_mem = true;
466 break;
468 case '<': case '>':
469 case '?': case '!': case '*': case '#':
470 case 'E': case 'F': case 'G': case 'H':
471 case 's': case 'i': case 'n':
472 case 'I': case 'J': case 'K': case 'L': case 'M':
473 case 'N': case 'O': case 'P': case ',':
474 break;
476 /* Whether or not a numeric constraint allows a register is
477 decided by the matching constraint, and so there is no need
478 to do anything special with them. We must handle them in
479 the default case, so that we don't unnecessarily force
480 operands to memory. */
481 case '0': case '1': case '2': case '3': case '4':
482 case '5': case '6': case '7': case '8': case '9':
484 char *end;
485 unsigned long match;
487 saw_match = true;
489 match = strtoul (constraint + j, &end, 10);
490 if (match >= (unsigned long) noutputs)
492 error ("matching constraint references invalid operand number");
493 return false;
496 /* Try and find the real constraint for this dup. Only do this
497 if the matching constraint is the only alternative. */
498 if (*end == '\0'
499 && (j == 0 || (j == 1 && constraint[0] == '%')))
501 constraint = constraints[match];
502 *constraint_p = constraint;
503 c_len = strlen (constraint);
504 j = 0;
505 /* ??? At the end of the loop, we will skip the first part of
506 the matched constraint. This assumes not only that the
507 other constraint is an output constraint, but also that
508 the '=' or '+' come first. */
509 break;
511 else
512 j = end - constraint;
513 /* Anticipate increment at end of loop. */
514 j--;
516 /* Fall through. */
518 case 'p': case 'r':
519 *allows_reg = true;
520 break;
522 case 'g': case 'X':
523 *allows_reg = true;
524 *allows_mem = true;
525 break;
527 default:
528 if (! ISALPHA (constraint[j]))
530 error ("invalid punctuation %qc in constraint", constraint[j]);
531 return false;
533 if (REG_CLASS_FROM_CONSTRAINT (constraint[j], constraint + j)
534 != NO_REGS)
535 *allows_reg = true;
536 #ifdef EXTRA_CONSTRAINT_STR
537 else if (EXTRA_ADDRESS_CONSTRAINT (constraint[j], constraint + j))
538 *allows_reg = true;
539 else if (EXTRA_MEMORY_CONSTRAINT (constraint[j], constraint + j))
540 *allows_mem = true;
541 else
543 /* Otherwise we can't assume anything about the nature of
544 the constraint except that it isn't purely registers.
545 Treat it like "g" and hope for the best. */
546 *allows_reg = true;
547 *allows_mem = true;
549 #endif
550 break;
553 if (saw_match && !*allows_reg)
554 warning (0, "matching constraint does not allow a register");
556 return true;
559 /* Return DECL iff there's an overlap between *REGS and DECL, where DECL
560 can be an asm-declared register. Called via walk_tree. */
562 static tree
563 decl_overlaps_hard_reg_set_p (tree *declp, int *walk_subtrees ATTRIBUTE_UNUSED,
564 void *data)
566 tree decl = *declp;
567 const HARD_REG_SET *const regs = (const HARD_REG_SET *) data;
569 if (TREE_CODE (decl) == VAR_DECL)
571 if (DECL_HARD_REGISTER (decl)
572 && REG_P (DECL_RTL (decl))
573 && REGNO (DECL_RTL (decl)) < FIRST_PSEUDO_REGISTER)
575 rtx reg = DECL_RTL (decl);
577 if (overlaps_hard_reg_set_p (*regs, GET_MODE (reg), REGNO (reg)))
578 return decl;
580 walk_subtrees = 0;
582 else if (TYPE_P (decl) || TREE_CODE (decl) == PARM_DECL)
583 walk_subtrees = 0;
584 return NULL_TREE;
587 /* If there is an overlap between *REGS and DECL, return the first overlap
588 found. */
589 tree
590 tree_overlaps_hard_reg_set (tree decl, HARD_REG_SET *regs)
592 return walk_tree (&decl, decl_overlaps_hard_reg_set_p, regs, NULL);
595 /* Check for overlap between registers marked in CLOBBERED_REGS and
596 anything inappropriate in T. Emit error and return the register
597 variable definition for error, NULL_TREE for ok. */
599 static bool
600 tree_conflicts_with_clobbers_p (tree t, HARD_REG_SET *clobbered_regs)
602 /* Conflicts between asm-declared register variables and the clobber
603 list are not allowed. */
604 tree overlap = tree_overlaps_hard_reg_set (t, clobbered_regs);
606 if (overlap)
608 error ("asm-specifier for variable %qE conflicts with asm clobber list",
609 DECL_NAME (overlap));
611 /* Reset registerness to stop multiple errors emitted for a single
612 variable. */
613 DECL_REGISTER (overlap) = 0;
614 return true;
617 return false;
620 /* Generate RTL for an asm statement with arguments.
621 STRING is the instruction template.
622 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
623 Each output or input has an expression in the TREE_VALUE and
624 a tree list in TREE_PURPOSE which in turn contains a constraint
625 name in TREE_VALUE (or NULL_TREE) and a constraint string
626 in TREE_PURPOSE.
627 CLOBBERS is a list of STRING_CST nodes each naming a hard register
628 that is clobbered by this insn.
630 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
631 Some elements of OUTPUTS may be replaced with trees representing temporary
632 values. The caller should copy those temporary values to the originally
633 specified lvalues.
635 VOL nonzero means the insn is volatile; don't optimize it. */
637 static void
638 expand_asm_operands (tree string, tree outputs, tree inputs,
639 tree clobbers, tree labels, int vol, location_t locus)
641 rtvec argvec, constraintvec, labelvec;
642 rtx body;
643 int ninputs = list_length (inputs);
644 int noutputs = list_length (outputs);
645 int nlabels = list_length (labels);
646 int ninout;
647 int nclobbers;
648 HARD_REG_SET clobbered_regs;
649 int clobber_conflict_found = 0;
650 tree tail;
651 tree t;
652 int i;
653 /* Vector of RTX's of evaluated output operands. */
654 rtx *output_rtx = XALLOCAVEC (rtx, noutputs);
655 int *inout_opnum = XALLOCAVEC (int, noutputs);
656 rtx *real_output_rtx = XALLOCAVEC (rtx, noutputs);
657 enum machine_mode *inout_mode = XALLOCAVEC (enum machine_mode, noutputs);
658 const char **constraints = XALLOCAVEC (const char *, noutputs + ninputs);
659 int old_generating_concat_p = generating_concat_p;
661 /* An ASM with no outputs needs to be treated as volatile, for now. */
662 if (noutputs == 0)
663 vol = 1;
665 if (! check_operand_nalternatives (outputs, inputs))
666 return;
668 string = resolve_asm_operand_names (string, outputs, inputs, labels);
670 /* Collect constraints. */
671 i = 0;
672 for (t = outputs; t ; t = TREE_CHAIN (t), i++)
673 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
674 for (t = inputs; t ; t = TREE_CHAIN (t), i++)
675 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
677 /* Sometimes we wish to automatically clobber registers across an asm.
678 Case in point is when the i386 backend moved from cc0 to a hard reg --
679 maintaining source-level compatibility means automatically clobbering
680 the flags register. */
681 clobbers = targetm.md_asm_clobbers (outputs, inputs, clobbers);
683 /* Count the number of meaningful clobbered registers, ignoring what
684 we would ignore later. */
685 nclobbers = 0;
686 CLEAR_HARD_REG_SET (clobbered_regs);
687 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
689 const char *regname;
691 if (TREE_VALUE (tail) == error_mark_node)
692 return;
693 regname = TREE_STRING_POINTER (TREE_VALUE (tail));
695 i = decode_reg_name (regname);
696 if (i >= 0 || i == -4)
697 ++nclobbers;
698 else if (i == -2)
699 error ("unknown register name %qs in %<asm%>", regname);
701 /* Mark clobbered registers. */
702 if (i >= 0)
704 /* Clobbering the PIC register is an error. */
705 if (i == (int) PIC_OFFSET_TABLE_REGNUM)
707 error ("PIC register %qs clobbered in %<asm%>", regname);
708 return;
711 SET_HARD_REG_BIT (clobbered_regs, i);
715 /* First pass over inputs and outputs checks validity and sets
716 mark_addressable if needed. */
718 ninout = 0;
719 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
721 tree val = TREE_VALUE (tail);
722 tree type = TREE_TYPE (val);
723 const char *constraint;
724 bool is_inout;
725 bool allows_reg;
726 bool allows_mem;
728 /* If there's an erroneous arg, emit no insn. */
729 if (type == error_mark_node)
730 return;
732 /* Try to parse the output constraint. If that fails, there's
733 no point in going further. */
734 constraint = constraints[i];
735 if (!parse_output_constraint (&constraint, i, ninputs, noutputs,
736 &allows_mem, &allows_reg, &is_inout))
737 return;
739 if (! allows_reg
740 && (allows_mem
741 || is_inout
742 || (DECL_P (val)
743 && REG_P (DECL_RTL (val))
744 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type))))
745 mark_addressable (val);
747 if (is_inout)
748 ninout++;
751 ninputs += ninout;
752 if (ninputs + noutputs > MAX_RECOG_OPERANDS)
754 error ("more than %d operands in %<asm%>", MAX_RECOG_OPERANDS);
755 return;
758 for (i = 0, tail = inputs; tail; i++, tail = TREE_CHAIN (tail))
760 bool allows_reg, allows_mem;
761 const char *constraint;
763 /* If there's an erroneous arg, emit no insn, because the ASM_INPUT
764 would get VOIDmode and that could cause a crash in reload. */
765 if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
766 return;
768 constraint = constraints[i + noutputs];
769 if (! parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
770 constraints, &allows_mem, &allows_reg))
771 return;
773 if (! allows_reg && allows_mem)
774 mark_addressable (TREE_VALUE (tail));
777 /* Second pass evaluates arguments. */
779 ninout = 0;
780 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
782 tree val = TREE_VALUE (tail);
783 tree type = TREE_TYPE (val);
784 bool is_inout;
785 bool allows_reg;
786 bool allows_mem;
787 rtx op;
788 bool ok;
790 ok = parse_output_constraint (&constraints[i], i, ninputs,
791 noutputs, &allows_mem, &allows_reg,
792 &is_inout);
793 gcc_assert (ok);
795 /* If an output operand is not a decl or indirect ref and our constraint
796 allows a register, make a temporary to act as an intermediate.
797 Make the asm insn write into that, then our caller will copy it to
798 the real output operand. Likewise for promoted variables. */
800 generating_concat_p = 0;
802 real_output_rtx[i] = NULL_RTX;
803 if ((TREE_CODE (val) == INDIRECT_REF
804 && allows_mem)
805 || (DECL_P (val)
806 && (allows_mem || REG_P (DECL_RTL (val)))
807 && ! (REG_P (DECL_RTL (val))
808 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
809 || ! allows_reg
810 || is_inout)
812 op = expand_expr (val, NULL_RTX, VOIDmode, EXPAND_WRITE);
813 if (MEM_P (op))
814 op = validize_mem (op);
816 if (! allows_reg && !MEM_P (op))
817 error ("output number %d not directly addressable", i);
818 if ((! allows_mem && MEM_P (op))
819 || GET_CODE (op) == CONCAT)
821 real_output_rtx[i] = op;
822 op = gen_reg_rtx (GET_MODE (op));
823 if (is_inout)
824 emit_move_insn (op, real_output_rtx[i]);
827 else
829 op = assign_temp (type, 0, 0, 1);
830 op = validize_mem (op);
831 if (!MEM_P (op) && TREE_CODE (TREE_VALUE (tail)) == SSA_NAME)
832 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (TREE_VALUE (tail)), op);
833 TREE_VALUE (tail) = make_tree (type, op);
835 output_rtx[i] = op;
837 generating_concat_p = old_generating_concat_p;
839 if (is_inout)
841 inout_mode[ninout] = TYPE_MODE (type);
842 inout_opnum[ninout++] = i;
845 if (tree_conflicts_with_clobbers_p (val, &clobbered_regs))
846 clobber_conflict_found = 1;
849 /* Make vectors for the expression-rtx, constraint strings,
850 and named operands. */
852 argvec = rtvec_alloc (ninputs);
853 constraintvec = rtvec_alloc (ninputs);
854 labelvec = rtvec_alloc (nlabels);
856 body = gen_rtx_ASM_OPERANDS ((noutputs == 0 ? VOIDmode
857 : GET_MODE (output_rtx[0])),
858 ggc_strdup (TREE_STRING_POINTER (string)),
859 empty_string, 0, argvec, constraintvec,
860 labelvec, locus);
862 MEM_VOLATILE_P (body) = vol;
864 /* Eval the inputs and put them into ARGVEC.
865 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
867 for (i = 0, tail = inputs; tail; tail = TREE_CHAIN (tail), ++i)
869 bool allows_reg, allows_mem;
870 const char *constraint;
871 tree val, type;
872 rtx op;
873 bool ok;
875 constraint = constraints[i + noutputs];
876 ok = parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
877 constraints, &allows_mem, &allows_reg);
878 gcc_assert (ok);
880 generating_concat_p = 0;
882 val = TREE_VALUE (tail);
883 type = TREE_TYPE (val);
884 /* EXPAND_INITIALIZER will not generate code for valid initializer
885 constants, but will still generate code for other types of operand.
886 This is the behavior we want for constant constraints. */
887 op = expand_expr (val, NULL_RTX, VOIDmode,
888 allows_reg ? EXPAND_NORMAL
889 : allows_mem ? EXPAND_MEMORY
890 : EXPAND_INITIALIZER);
892 /* Never pass a CONCAT to an ASM. */
893 if (GET_CODE (op) == CONCAT)
894 op = force_reg (GET_MODE (op), op);
895 else if (MEM_P (op))
896 op = validize_mem (op);
898 if (asm_operand_ok (op, constraint, NULL) <= 0)
900 if (allows_reg && TYPE_MODE (type) != BLKmode)
901 op = force_reg (TYPE_MODE (type), op);
902 else if (!allows_mem)
903 warning (0, "asm operand %d probably doesn%'t match constraints",
904 i + noutputs);
905 else if (MEM_P (op))
907 /* We won't recognize either volatile memory or memory
908 with a queued address as available a memory_operand
909 at this point. Ignore it: clearly this *is* a memory. */
911 else
913 warning (0, "use of memory input without lvalue in "
914 "asm operand %d is deprecated", i + noutputs);
916 if (CONSTANT_P (op))
918 rtx mem = force_const_mem (TYPE_MODE (type), op);
919 if (mem)
920 op = validize_mem (mem);
921 else
922 op = force_reg (TYPE_MODE (type), op);
924 if (REG_P (op)
925 || GET_CODE (op) == SUBREG
926 || GET_CODE (op) == CONCAT)
928 tree qual_type = build_qualified_type (type,
929 (TYPE_QUALS (type)
930 | TYPE_QUAL_CONST));
931 rtx memloc = assign_temp (qual_type, 1, 1, 1);
932 memloc = validize_mem (memloc);
933 emit_move_insn (memloc, op);
934 op = memloc;
939 generating_concat_p = old_generating_concat_p;
940 ASM_OPERANDS_INPUT (body, i) = op;
942 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, i)
943 = gen_rtx_ASM_INPUT (TYPE_MODE (type),
944 ggc_strdup (constraints[i + noutputs]));
946 if (tree_conflicts_with_clobbers_p (val, &clobbered_regs))
947 clobber_conflict_found = 1;
950 /* Protect all the operands from the queue now that they have all been
951 evaluated. */
953 generating_concat_p = 0;
955 /* For in-out operands, copy output rtx to input rtx. */
956 for (i = 0; i < ninout; i++)
958 int j = inout_opnum[i];
959 char buffer[16];
961 ASM_OPERANDS_INPUT (body, ninputs - ninout + i)
962 = output_rtx[j];
964 sprintf (buffer, "%d", j);
965 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, ninputs - ninout + i)
966 = gen_rtx_ASM_INPUT (inout_mode[i], ggc_strdup (buffer));
969 /* Copy labels to the vector. */
970 for (i = 0, tail = labels; i < nlabels; ++i, tail = TREE_CHAIN (tail))
971 ASM_OPERANDS_LABEL (body, i)
972 = gen_rtx_LABEL_REF (Pmode, label_rtx (TREE_VALUE (tail)));
974 generating_concat_p = old_generating_concat_p;
976 /* Now, for each output, construct an rtx
977 (set OUTPUT (asm_operands INSN OUTPUTCONSTRAINT OUTPUTNUMBER
978 ARGVEC CONSTRAINTS OPNAMES))
979 If there is more than one, put them inside a PARALLEL. */
981 if (nlabels > 0 && nclobbers == 0)
983 gcc_assert (noutputs == 0);
984 emit_jump_insn (body);
986 else if (noutputs == 0 && nclobbers == 0)
988 /* No output operands: put in a raw ASM_OPERANDS rtx. */
989 emit_insn (body);
991 else if (noutputs == 1 && nclobbers == 0)
993 ASM_OPERANDS_OUTPUT_CONSTRAINT (body) = ggc_strdup (constraints[0]);
994 emit_insn (gen_rtx_SET (VOIDmode, output_rtx[0], body));
996 else
998 rtx obody = body;
999 int num = noutputs;
1001 if (num == 0)
1002 num = 1;
1004 body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
1006 /* For each output operand, store a SET. */
1007 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1009 XVECEXP (body, 0, i)
1010 = gen_rtx_SET (VOIDmode,
1011 output_rtx[i],
1012 gen_rtx_ASM_OPERANDS
1013 (GET_MODE (output_rtx[i]),
1014 ggc_strdup (TREE_STRING_POINTER (string)),
1015 ggc_strdup (constraints[i]),
1016 i, argvec, constraintvec, labelvec, locus));
1018 MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
1021 /* If there are no outputs (but there are some clobbers)
1022 store the bare ASM_OPERANDS into the PARALLEL. */
1024 if (i == 0)
1025 XVECEXP (body, 0, i++) = obody;
1027 /* Store (clobber REG) for each clobbered register specified. */
1029 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1031 const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1032 int j = decode_reg_name (regname);
1033 rtx clobbered_reg;
1035 if (j < 0)
1037 if (j == -3) /* `cc', which is not a register */
1038 continue;
1040 if (j == -4) /* `memory', don't cache memory across asm */
1042 XVECEXP (body, 0, i++)
1043 = gen_rtx_CLOBBER (VOIDmode,
1044 gen_rtx_MEM
1045 (BLKmode,
1046 gen_rtx_SCRATCH (VOIDmode)));
1047 continue;
1050 /* Ignore unknown register, error already signaled. */
1051 continue;
1054 /* Use QImode since that's guaranteed to clobber just one reg. */
1055 clobbered_reg = gen_rtx_REG (QImode, j);
1057 /* Do sanity check for overlap between clobbers and respectively
1058 input and outputs that hasn't been handled. Such overlap
1059 should have been detected and reported above. */
1060 if (!clobber_conflict_found)
1062 int opno;
1064 /* We test the old body (obody) contents to avoid tripping
1065 over the under-construction body. */
1066 for (opno = 0; opno < noutputs; opno++)
1067 if (reg_overlap_mentioned_p (clobbered_reg, output_rtx[opno]))
1068 internal_error ("asm clobber conflict with output operand");
1070 for (opno = 0; opno < ninputs - ninout; opno++)
1071 if (reg_overlap_mentioned_p (clobbered_reg,
1072 ASM_OPERANDS_INPUT (obody, opno)))
1073 internal_error ("asm clobber conflict with input operand");
1076 XVECEXP (body, 0, i++)
1077 = gen_rtx_CLOBBER (VOIDmode, clobbered_reg);
1080 if (nlabels > 0)
1081 emit_jump_insn (body);
1082 else
1083 emit_insn (body);
1086 /* For any outputs that needed reloading into registers, spill them
1087 back to where they belong. */
1088 for (i = 0; i < noutputs; ++i)
1089 if (real_output_rtx[i])
1090 emit_move_insn (real_output_rtx[i], output_rtx[i]);
1092 crtl->has_asm_statement = 1;
1093 free_temp_slots ();
1096 void
1097 expand_asm_stmt (gimple stmt)
1099 int noutputs;
1100 tree outputs, tail, t;
1101 tree *o;
1102 size_t i, n;
1103 const char *s;
1104 tree str, out, in, cl, labels;
1105 location_t locus = gimple_location (stmt);
1107 /* Meh... convert the gimple asm operands into real tree lists.
1108 Eventually we should make all routines work on the vectors instead
1109 of relying on TREE_CHAIN. */
1110 out = NULL_TREE;
1111 n = gimple_asm_noutputs (stmt);
1112 if (n > 0)
1114 t = out = gimple_asm_output_op (stmt, 0);
1115 for (i = 1; i < n; i++)
1116 t = TREE_CHAIN (t) = gimple_asm_output_op (stmt, i);
1119 in = NULL_TREE;
1120 n = gimple_asm_ninputs (stmt);
1121 if (n > 0)
1123 t = in = gimple_asm_input_op (stmt, 0);
1124 for (i = 1; i < n; i++)
1125 t = TREE_CHAIN (t) = gimple_asm_input_op (stmt, i);
1128 cl = NULL_TREE;
1129 n = gimple_asm_nclobbers (stmt);
1130 if (n > 0)
1132 t = cl = gimple_asm_clobber_op (stmt, 0);
1133 for (i = 1; i < n; i++)
1134 t = TREE_CHAIN (t) = gimple_asm_clobber_op (stmt, i);
1137 labels = NULL_TREE;
1138 n = gimple_asm_nlabels (stmt);
1139 if (n > 0)
1141 t = labels = gimple_asm_label_op (stmt, 0);
1142 for (i = 1; i < n; i++)
1143 t = TREE_CHAIN (t) = gimple_asm_label_op (stmt, i);
1146 s = gimple_asm_string (stmt);
1147 str = build_string (strlen (s), s);
1149 if (gimple_asm_input_p (stmt))
1151 expand_asm_loc (str, gimple_asm_volatile_p (stmt), locus);
1152 return;
1155 outputs = out;
1156 noutputs = gimple_asm_noutputs (stmt);
1157 /* o[I] is the place that output number I should be written. */
1158 o = (tree *) alloca (noutputs * sizeof (tree));
1160 /* Record the contents of OUTPUTS before it is modified. */
1161 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1162 o[i] = TREE_VALUE (tail);
1164 /* Generate the ASM_OPERANDS insn; store into the TREE_VALUEs of
1165 OUTPUTS some trees for where the values were actually stored. */
1166 expand_asm_operands (str, outputs, in, cl, labels,
1167 gimple_asm_volatile_p (stmt), locus);
1169 /* Copy all the intermediate outputs into the specified outputs. */
1170 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1172 if (o[i] != TREE_VALUE (tail))
1174 expand_assignment (o[i], TREE_VALUE (tail), false);
1175 free_temp_slots ();
1177 /* Restore the original value so that it's correct the next
1178 time we expand this function. */
1179 TREE_VALUE (tail) = o[i];
1184 /* A subroutine of expand_asm_operands. Check that all operands have
1185 the same number of alternatives. Return true if so. */
1187 static bool
1188 check_operand_nalternatives (tree outputs, tree inputs)
1190 if (outputs || inputs)
1192 tree tmp = TREE_PURPOSE (outputs ? outputs : inputs);
1193 int nalternatives
1194 = n_occurrences (',', TREE_STRING_POINTER (TREE_VALUE (tmp)));
1195 tree next = inputs;
1197 if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
1199 error ("too many alternatives in %<asm%>");
1200 return false;
1203 tmp = outputs;
1204 while (tmp)
1206 const char *constraint
1207 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (tmp)));
1209 if (n_occurrences (',', constraint) != nalternatives)
1211 error ("operand constraints for %<asm%> differ "
1212 "in number of alternatives");
1213 return false;
1216 if (TREE_CHAIN (tmp))
1217 tmp = TREE_CHAIN (tmp);
1218 else
1219 tmp = next, next = 0;
1223 return true;
1226 /* A subroutine of expand_asm_operands. Check that all operand names
1227 are unique. Return true if so. We rely on the fact that these names
1228 are identifiers, and so have been canonicalized by get_identifier,
1229 so all we need are pointer comparisons. */
1231 static bool
1232 check_unique_operand_names (tree outputs, tree inputs, tree labels)
1234 tree i, j;
1236 for (i = outputs; i ; i = TREE_CHAIN (i))
1238 tree i_name = TREE_PURPOSE (TREE_PURPOSE (i));
1239 if (! i_name)
1240 continue;
1242 for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
1243 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1244 goto failure;
1247 for (i = inputs; i ; i = TREE_CHAIN (i))
1249 tree i_name = TREE_PURPOSE (TREE_PURPOSE (i));
1250 if (! i_name)
1251 continue;
1253 for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
1254 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1255 goto failure;
1256 for (j = outputs; j ; j = TREE_CHAIN (j))
1257 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1258 goto failure;
1261 for (i = labels; i ; i = TREE_CHAIN (i))
1263 tree i_name = TREE_PURPOSE (i);
1264 if (! i_name)
1265 continue;
1267 for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
1268 if (simple_cst_equal (i_name, TREE_PURPOSE (j)))
1269 goto failure;
1270 for (j = inputs; j ; j = TREE_CHAIN (j))
1271 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1272 goto failure;
1275 return true;
1277 failure:
1278 error ("duplicate asm operand name %qs",
1279 TREE_STRING_POINTER (TREE_PURPOSE (TREE_PURPOSE (i))));
1280 return false;
1283 /* A subroutine of expand_asm_operands. Resolve the names of the operands
1284 in *POUTPUTS and *PINPUTS to numbers, and replace the name expansions in
1285 STRING and in the constraints to those numbers. */
1287 tree
1288 resolve_asm_operand_names (tree string, tree outputs, tree inputs, tree labels)
1290 char *buffer;
1291 char *p;
1292 const char *c;
1293 tree t;
1295 check_unique_operand_names (outputs, inputs, labels);
1297 /* Substitute [<name>] in input constraint strings. There should be no
1298 named operands in output constraints. */
1299 for (t = inputs; t ; t = TREE_CHAIN (t))
1301 c = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
1302 if (strchr (c, '[') != NULL)
1304 p = buffer = xstrdup (c);
1305 while ((p = strchr (p, '[')) != NULL)
1306 p = resolve_operand_name_1 (p, outputs, inputs, NULL);
1307 TREE_VALUE (TREE_PURPOSE (t))
1308 = build_string (strlen (buffer), buffer);
1309 free (buffer);
1313 /* Now check for any needed substitutions in the template. */
1314 c = TREE_STRING_POINTER (string);
1315 while ((c = strchr (c, '%')) != NULL)
1317 if (c[1] == '[')
1318 break;
1319 else if (ISALPHA (c[1]) && c[2] == '[')
1320 break;
1321 else
1323 c += 1 + (c[1] == '%');
1324 continue;
1328 if (c)
1330 /* OK, we need to make a copy so we can perform the substitutions.
1331 Assume that we will not need extra space--we get to remove '['
1332 and ']', which means we cannot have a problem until we have more
1333 than 999 operands. */
1334 buffer = xstrdup (TREE_STRING_POINTER (string));
1335 p = buffer + (c - TREE_STRING_POINTER (string));
1337 while ((p = strchr (p, '%')) != NULL)
1339 if (p[1] == '[')
1340 p += 1;
1341 else if (ISALPHA (p[1]) && p[2] == '[')
1342 p += 2;
1343 else
1345 p += 1 + (p[1] == '%');
1346 continue;
1349 p = resolve_operand_name_1 (p, outputs, inputs, labels);
1352 string = build_string (strlen (buffer), buffer);
1353 free (buffer);
1356 return string;
1359 /* A subroutine of resolve_operand_names. P points to the '[' for a
1360 potential named operand of the form [<name>]. In place, replace
1361 the name and brackets with a number. Return a pointer to the
1362 balance of the string after substitution. */
1364 static char *
1365 resolve_operand_name_1 (char *p, tree outputs, tree inputs, tree labels)
1367 char *q;
1368 int op;
1369 tree t;
1371 /* Collect the operand name. */
1372 q = strchr (++p, ']');
1373 if (!q)
1375 error ("missing close brace for named operand");
1376 return strchr (p, '\0');
1378 *q = '\0';
1380 /* Resolve the name to a number. */
1381 for (op = 0, t = outputs; t ; t = TREE_CHAIN (t), op++)
1383 tree name = TREE_PURPOSE (TREE_PURPOSE (t));
1384 if (name && strcmp (TREE_STRING_POINTER (name), p) == 0)
1385 goto found;
1387 for (t = inputs; t ; t = TREE_CHAIN (t), op++)
1389 tree name = TREE_PURPOSE (TREE_PURPOSE (t));
1390 if (name && strcmp (TREE_STRING_POINTER (name), p) == 0)
1391 goto found;
1393 for (t = labels; t ; t = TREE_CHAIN (t), op++)
1395 tree name = TREE_PURPOSE (t);
1396 if (name && strcmp (TREE_STRING_POINTER (name), p) == 0)
1397 goto found;
1400 error ("undefined named operand %qs", identifier_to_locale (p));
1401 op = 0;
1403 found:
1404 /* Replace the name with the number. Unfortunately, not all libraries
1405 get the return value of sprintf correct, so search for the end of the
1406 generated string by hand. */
1407 sprintf (--p, "%d", op);
1408 p = strchr (p, '\0');
1410 /* Verify the no extra buffer space assumption. */
1411 gcc_assert (p <= q);
1413 /* Shift the rest of the buffer down to fill the gap. */
1414 memmove (p, q + 1, strlen (q + 1) + 1);
1416 return p;
1419 /* Generate RTL to evaluate the expression EXP. */
1421 void
1422 expand_expr_stmt (tree exp)
1424 rtx value;
1425 tree type;
1427 value = expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
1428 type = TREE_TYPE (exp);
1430 /* If all we do is reference a volatile value in memory,
1431 copy it to a register to be sure it is actually touched. */
1432 if (value && MEM_P (value) && TREE_THIS_VOLATILE (exp))
1434 if (TYPE_MODE (type) == VOIDmode)
1436 else if (TYPE_MODE (type) != BLKmode)
1437 value = copy_to_reg (value);
1438 else
1440 rtx lab = gen_label_rtx ();
1442 /* Compare the value with itself to reference it. */
1443 emit_cmp_and_jump_insns (value, value, EQ,
1444 expand_normal (TYPE_SIZE (type)),
1445 BLKmode, 0, lab);
1446 emit_label (lab);
1450 /* Free any temporaries used to evaluate this expression. */
1451 free_temp_slots ();
1454 /* Warn if EXP contains any computations whose results are not used.
1455 Return 1 if a warning is printed; 0 otherwise. LOCUS is the
1456 (potential) location of the expression. */
1459 warn_if_unused_value (const_tree exp, location_t locus)
1461 restart:
1462 if (TREE_USED (exp) || TREE_NO_WARNING (exp))
1463 return 0;
1465 /* Don't warn about void constructs. This includes casting to void,
1466 void function calls, and statement expressions with a final cast
1467 to void. */
1468 if (VOID_TYPE_P (TREE_TYPE (exp)))
1469 return 0;
1471 if (EXPR_HAS_LOCATION (exp))
1472 locus = EXPR_LOCATION (exp);
1474 switch (TREE_CODE (exp))
1476 case PREINCREMENT_EXPR:
1477 case POSTINCREMENT_EXPR:
1478 case PREDECREMENT_EXPR:
1479 case POSTDECREMENT_EXPR:
1480 case MODIFY_EXPR:
1481 case INIT_EXPR:
1482 case TARGET_EXPR:
1483 case CALL_EXPR:
1484 case TRY_CATCH_EXPR:
1485 case WITH_CLEANUP_EXPR:
1486 case EXIT_EXPR:
1487 case VA_ARG_EXPR:
1488 return 0;
1490 case BIND_EXPR:
1491 /* For a binding, warn if no side effect within it. */
1492 exp = BIND_EXPR_BODY (exp);
1493 goto restart;
1495 case SAVE_EXPR:
1496 case NON_LVALUE_EXPR:
1497 exp = TREE_OPERAND (exp, 0);
1498 goto restart;
1500 case TRUTH_ORIF_EXPR:
1501 case TRUTH_ANDIF_EXPR:
1502 /* In && or ||, warn if 2nd operand has no side effect. */
1503 exp = TREE_OPERAND (exp, 1);
1504 goto restart;
1506 case COMPOUND_EXPR:
1507 if (warn_if_unused_value (TREE_OPERAND (exp, 0), locus))
1508 return 1;
1509 /* Let people do `(foo (), 0)' without a warning. */
1510 if (TREE_CONSTANT (TREE_OPERAND (exp, 1)))
1511 return 0;
1512 exp = TREE_OPERAND (exp, 1);
1513 goto restart;
1515 case COND_EXPR:
1516 /* If this is an expression with side effects, don't warn; this
1517 case commonly appears in macro expansions. */
1518 if (TREE_SIDE_EFFECTS (exp))
1519 return 0;
1520 goto warn;
1522 case INDIRECT_REF:
1523 /* Don't warn about automatic dereferencing of references, since
1524 the user cannot control it. */
1525 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == REFERENCE_TYPE)
1527 exp = TREE_OPERAND (exp, 0);
1528 goto restart;
1530 /* Fall through. */
1532 default:
1533 /* Referencing a volatile value is a side effect, so don't warn. */
1534 if ((DECL_P (exp) || REFERENCE_CLASS_P (exp))
1535 && TREE_THIS_VOLATILE (exp))
1536 return 0;
1538 /* If this is an expression which has no operands, there is no value
1539 to be unused. There are no such language-independent codes,
1540 but front ends may define such. */
1541 if (EXPRESSION_CLASS_P (exp) && TREE_OPERAND_LENGTH (exp) == 0)
1542 return 0;
1544 warn:
1545 warning_at (locus, OPT_Wunused_value, "value computed is not used");
1546 return 1;
1551 /* Generate RTL to return from the current function, with no value.
1552 (That is, we do not do anything about returning any value.) */
1554 void
1555 expand_null_return (void)
1557 /* If this function was declared to return a value, but we
1558 didn't, clobber the return registers so that they are not
1559 propagated live to the rest of the function. */
1560 clobber_return_register ();
1562 expand_null_return_1 ();
1565 /* Generate RTL to return directly from the current function.
1566 (That is, we bypass any return value.) */
1568 void
1569 expand_naked_return (void)
1571 rtx end_label;
1573 clear_pending_stack_adjust ();
1574 do_pending_stack_adjust ();
1576 end_label = naked_return_label;
1577 if (end_label == 0)
1578 end_label = naked_return_label = gen_label_rtx ();
1580 emit_jump (end_label);
1583 /* Generate RTL to return from the current function, with value VAL. */
1585 static void
1586 expand_value_return (rtx val)
1588 /* Copy the value to the return location unless it's already there. */
1590 tree decl = DECL_RESULT (current_function_decl);
1591 rtx return_reg = DECL_RTL (decl);
1592 if (return_reg != val)
1594 tree funtype = TREE_TYPE (current_function_decl);
1595 tree type = TREE_TYPE (decl);
1596 int unsignedp = TYPE_UNSIGNED (type);
1597 enum machine_mode old_mode = DECL_MODE (decl);
1598 enum machine_mode mode;
1599 if (DECL_BY_REFERENCE (decl))
1600 mode = promote_function_mode (type, old_mode, &unsignedp, funtype, 2);
1601 else
1602 mode = promote_function_mode (type, old_mode, &unsignedp, funtype, 1);
1604 if (mode != old_mode)
1605 val = convert_modes (mode, old_mode, val, unsignedp);
1607 if (GET_CODE (return_reg) == PARALLEL)
1608 emit_group_load (return_reg, val, type, int_size_in_bytes (type));
1609 else
1610 emit_move_insn (return_reg, val);
1613 expand_null_return_1 ();
1616 /* Output a return with no value. */
1618 static void
1619 expand_null_return_1 (void)
1621 clear_pending_stack_adjust ();
1622 do_pending_stack_adjust ();
1623 emit_jump (return_label);
1626 /* Generate RTL to evaluate the expression RETVAL and return it
1627 from the current function. */
1629 void
1630 expand_return (tree retval)
1632 rtx result_rtl;
1633 rtx val = 0;
1634 tree retval_rhs;
1636 /* If function wants no value, give it none. */
1637 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
1639 expand_normal (retval);
1640 expand_null_return ();
1641 return;
1644 if (retval == error_mark_node)
1646 /* Treat this like a return of no value from a function that
1647 returns a value. */
1648 expand_null_return ();
1649 return;
1651 else if ((TREE_CODE (retval) == MODIFY_EXPR
1652 || TREE_CODE (retval) == INIT_EXPR)
1653 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
1654 retval_rhs = TREE_OPERAND (retval, 1);
1655 else
1656 retval_rhs = retval;
1658 result_rtl = DECL_RTL (DECL_RESULT (current_function_decl));
1660 /* If we are returning the RESULT_DECL, then the value has already
1661 been stored into it, so we don't have to do anything special. */
1662 if (TREE_CODE (retval_rhs) == RESULT_DECL)
1663 expand_value_return (result_rtl);
1665 /* If the result is an aggregate that is being returned in one (or more)
1666 registers, load the registers here. The compiler currently can't handle
1667 copying a BLKmode value into registers. We could put this code in a
1668 more general area (for use by everyone instead of just function
1669 call/return), but until this feature is generally usable it is kept here
1670 (and in expand_call). */
1672 else if (retval_rhs != 0
1673 && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
1674 && REG_P (result_rtl))
1676 int i;
1677 unsigned HOST_WIDE_INT bitpos, xbitpos;
1678 unsigned HOST_WIDE_INT padding_correction = 0;
1679 unsigned HOST_WIDE_INT bytes
1680 = int_size_in_bytes (TREE_TYPE (retval_rhs));
1681 int n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1682 unsigned int bitsize
1683 = MIN (TYPE_ALIGN (TREE_TYPE (retval_rhs)), BITS_PER_WORD);
1684 rtx *result_pseudos = XALLOCAVEC (rtx, n_regs);
1685 rtx result_reg, src = NULL_RTX, dst = NULL_RTX;
1686 rtx result_val = expand_normal (retval_rhs);
1687 enum machine_mode tmpmode, result_reg_mode;
1689 if (bytes == 0)
1691 expand_null_return ();
1692 return;
1695 /* If the structure doesn't take up a whole number of words, see
1696 whether the register value should be padded on the left or on
1697 the right. Set PADDING_CORRECTION to the number of padding
1698 bits needed on the left side.
1700 In most ABIs, the structure will be returned at the least end of
1701 the register, which translates to right padding on little-endian
1702 targets and left padding on big-endian targets. The opposite
1703 holds if the structure is returned at the most significant
1704 end of the register. */
1705 if (bytes % UNITS_PER_WORD != 0
1706 && (targetm.calls.return_in_msb (TREE_TYPE (retval_rhs))
1707 ? !BYTES_BIG_ENDIAN
1708 : BYTES_BIG_ENDIAN))
1709 padding_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
1710 * BITS_PER_UNIT));
1712 /* Copy the structure BITSIZE bits at a time. */
1713 for (bitpos = 0, xbitpos = padding_correction;
1714 bitpos < bytes * BITS_PER_UNIT;
1715 bitpos += bitsize, xbitpos += bitsize)
1717 /* We need a new destination pseudo each time xbitpos is
1718 on a word boundary and when xbitpos == padding_correction
1719 (the first time through). */
1720 if (xbitpos % BITS_PER_WORD == 0
1721 || xbitpos == padding_correction)
1723 /* Generate an appropriate register. */
1724 dst = gen_reg_rtx (word_mode);
1725 result_pseudos[xbitpos / BITS_PER_WORD] = dst;
1727 /* Clear the destination before we move anything into it. */
1728 emit_move_insn (dst, CONST0_RTX (GET_MODE (dst)));
1731 /* We need a new source operand each time bitpos is on a word
1732 boundary. */
1733 if (bitpos % BITS_PER_WORD == 0)
1734 src = operand_subword_force (result_val,
1735 bitpos / BITS_PER_WORD,
1736 BLKmode);
1738 /* Use bitpos for the source extraction (left justified) and
1739 xbitpos for the destination store (right justified). */
1740 store_bit_field (dst, bitsize, xbitpos % BITS_PER_WORD, word_mode,
1741 extract_bit_field (src, bitsize,
1742 bitpos % BITS_PER_WORD, 1,
1743 NULL_RTX, word_mode, word_mode));
1746 tmpmode = GET_MODE (result_rtl);
1747 if (tmpmode == BLKmode)
1749 /* Find the smallest integer mode large enough to hold the
1750 entire structure and use that mode instead of BLKmode
1751 on the USE insn for the return register. */
1752 for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1753 tmpmode != VOIDmode;
1754 tmpmode = GET_MODE_WIDER_MODE (tmpmode))
1755 /* Have we found a large enough mode? */
1756 if (GET_MODE_SIZE (tmpmode) >= bytes)
1757 break;
1759 /* A suitable mode should have been found. */
1760 gcc_assert (tmpmode != VOIDmode);
1762 PUT_MODE (result_rtl, tmpmode);
1765 if (GET_MODE_SIZE (tmpmode) < GET_MODE_SIZE (word_mode))
1766 result_reg_mode = word_mode;
1767 else
1768 result_reg_mode = tmpmode;
1769 result_reg = gen_reg_rtx (result_reg_mode);
1771 for (i = 0; i < n_regs; i++)
1772 emit_move_insn (operand_subword (result_reg, i, 0, result_reg_mode),
1773 result_pseudos[i]);
1775 if (tmpmode != result_reg_mode)
1776 result_reg = gen_lowpart (tmpmode, result_reg);
1778 expand_value_return (result_reg);
1780 else if (retval_rhs != 0
1781 && !VOID_TYPE_P (TREE_TYPE (retval_rhs))
1782 && (REG_P (result_rtl)
1783 || (GET_CODE (result_rtl) == PARALLEL)))
1785 /* Calculate the return value into a temporary (usually a pseudo
1786 reg). */
1787 tree ot = TREE_TYPE (DECL_RESULT (current_function_decl));
1788 tree nt = build_qualified_type (ot, TYPE_QUALS (ot) | TYPE_QUAL_CONST);
1790 val = assign_temp (nt, 0, 0, 1);
1791 val = expand_expr (retval_rhs, val, GET_MODE (val), EXPAND_NORMAL);
1792 val = force_not_mem (val);
1793 /* Return the calculated value. */
1794 expand_value_return (val);
1796 else
1798 /* No hard reg used; calculate value into hard return reg. */
1799 expand_expr (retval, const0_rtx, VOIDmode, EXPAND_NORMAL);
1800 expand_value_return (result_rtl);
1804 /* Emit code to restore vital registers at the beginning of a nonlocal goto
1805 handler. */
1806 static void
1807 expand_nl_goto_receiver (void)
1809 rtx chain;
1811 /* Clobber the FP when we get here, so we have to make sure it's
1812 marked as used by this function. */
1813 emit_use (hard_frame_pointer_rtx);
1815 /* Mark the static chain as clobbered here so life information
1816 doesn't get messed up for it. */
1817 chain = targetm.calls.static_chain (current_function_decl, true);
1818 if (chain && REG_P (chain))
1819 emit_clobber (chain);
1821 #ifdef HAVE_nonlocal_goto
1822 if (! HAVE_nonlocal_goto)
1823 #endif
1824 /* First adjust our frame pointer to its actual value. It was
1825 previously set to the start of the virtual area corresponding to
1826 the stacked variables when we branched here and now needs to be
1827 adjusted to the actual hardware fp value.
1829 Assignments are to virtual registers are converted by
1830 instantiate_virtual_regs into the corresponding assignment
1831 to the underlying register (fp in this case) that makes
1832 the original assignment true.
1833 So the following insn will actually be
1834 decrementing fp by STARTING_FRAME_OFFSET. */
1835 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
1837 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
1838 if (fixed_regs[ARG_POINTER_REGNUM])
1840 #ifdef ELIMINABLE_REGS
1841 /* If the argument pointer can be eliminated in favor of the
1842 frame pointer, we don't need to restore it. We assume here
1843 that if such an elimination is present, it can always be used.
1844 This is the case on all known machines; if we don't make this
1845 assumption, we do unnecessary saving on many machines. */
1846 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
1847 size_t i;
1849 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
1850 if (elim_regs[i].from == ARG_POINTER_REGNUM
1851 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
1852 break;
1854 if (i == ARRAY_SIZE (elim_regs))
1855 #endif
1857 /* Now restore our arg pointer from the address at which it
1858 was saved in our stack frame. */
1859 emit_move_insn (crtl->args.internal_arg_pointer,
1860 copy_to_reg (get_arg_pointer_save_area ()));
1863 #endif
1865 #ifdef HAVE_nonlocal_goto_receiver
1866 if (HAVE_nonlocal_goto_receiver)
1867 emit_insn (gen_nonlocal_goto_receiver ());
1868 #endif
1870 /* We must not allow the code we just generated to be reordered by
1871 scheduling. Specifically, the update of the frame pointer must
1872 happen immediately, not later. */
1873 emit_insn (gen_blockage ());
1876 /* Generate RTL for the automatic variable declaration DECL.
1877 (Other kinds of declarations are simply ignored if seen here.) */
1879 void
1880 expand_decl (tree decl)
1882 tree type;
1884 type = TREE_TYPE (decl);
1886 /* For a CONST_DECL, set mode, alignment, and sizes from those of the
1887 type in case this node is used in a reference. */
1888 if (TREE_CODE (decl) == CONST_DECL)
1890 DECL_MODE (decl) = TYPE_MODE (type);
1891 DECL_ALIGN (decl) = TYPE_ALIGN (type);
1892 DECL_SIZE (decl) = TYPE_SIZE (type);
1893 DECL_SIZE_UNIT (decl) = TYPE_SIZE_UNIT (type);
1894 return;
1897 /* Otherwise, only automatic variables need any expansion done. Static and
1898 external variables, and external functions, will be handled by
1899 `assemble_variable' (called from finish_decl). TYPE_DECL requires
1900 nothing. PARM_DECLs are handled in `assign_parms'. */
1901 if (TREE_CODE (decl) != VAR_DECL)
1902 return;
1904 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
1905 return;
1907 /* Create the RTL representation for the variable. */
1909 if (type == error_mark_node)
1910 SET_DECL_RTL (decl, gen_rtx_MEM (BLKmode, const0_rtx));
1912 else if (DECL_SIZE (decl) == 0)
1914 /* Variable with incomplete type. */
1915 rtx x;
1916 if (DECL_INITIAL (decl) == 0)
1917 /* Error message was already done; now avoid a crash. */
1918 x = gen_rtx_MEM (BLKmode, const0_rtx);
1919 else
1920 /* An initializer is going to decide the size of this array.
1921 Until we know the size, represent its address with a reg. */
1922 x = gen_rtx_MEM (BLKmode, gen_reg_rtx (Pmode));
1924 set_mem_attributes (x, decl, 1);
1925 SET_DECL_RTL (decl, x);
1927 else if (use_register_for_decl (decl))
1929 /* Automatic variable that can go in a register. */
1930 enum machine_mode reg_mode = promote_decl_mode (decl, NULL);
1932 SET_DECL_RTL (decl, gen_reg_rtx (reg_mode));
1934 /* Note if the object is a user variable. */
1935 if (!DECL_ARTIFICIAL (decl))
1936 mark_user_reg (DECL_RTL (decl));
1938 if (POINTER_TYPE_P (type))
1939 mark_reg_pointer (DECL_RTL (decl),
1940 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (decl))));
1943 else
1945 rtx oldaddr = 0;
1946 rtx addr;
1947 rtx x;
1949 /* Variable-sized decls are dealt with in the gimplifier. */
1950 gcc_assert (TREE_CODE (DECL_SIZE_UNIT (decl)) == INTEGER_CST);
1952 /* If we previously made RTL for this decl, it must be an array
1953 whose size was determined by the initializer.
1954 The old address was a register; set that register now
1955 to the proper address. */
1956 if (DECL_RTL_SET_P (decl))
1958 gcc_assert (MEM_P (DECL_RTL (decl)));
1959 gcc_assert (REG_P (XEXP (DECL_RTL (decl), 0)));
1960 oldaddr = XEXP (DECL_RTL (decl), 0);
1963 /* Set alignment we actually gave this decl. */
1964 DECL_ALIGN (decl) = (DECL_MODE (decl) == BLKmode ? BIGGEST_ALIGNMENT
1965 : GET_MODE_BITSIZE (DECL_MODE (decl)));
1966 DECL_USER_ALIGN (decl) = 0;
1968 x = assign_temp (decl, 1, 1, 1);
1969 set_mem_attributes (x, decl, 1);
1970 SET_DECL_RTL (decl, x);
1972 if (oldaddr)
1974 addr = force_operand (XEXP (DECL_RTL (decl), 0), oldaddr);
1975 if (addr != oldaddr)
1976 emit_move_insn (oldaddr, addr);
1981 /* Emit code to save the current value of stack. */
1983 expand_stack_save (void)
1985 rtx ret = NULL_RTX;
1987 do_pending_stack_adjust ();
1988 emit_stack_save (SAVE_BLOCK, &ret, NULL_RTX);
1989 return ret;
1992 /* Emit code to restore the current value of stack. */
1993 void
1994 expand_stack_restore (tree var)
1996 rtx sa = expand_normal (var);
1998 sa = convert_memory_address (Pmode, sa);
1999 emit_stack_restore (SAVE_BLOCK, sa, NULL_RTX);
2002 /* Do the insertion of a case label into case_list. The labels are
2003 fed to us in descending order from the sorted vector of case labels used
2004 in the tree part of the middle end. So the list we construct is
2005 sorted in ascending order. The bounds on the case range, LOW and HIGH,
2006 are converted to case's index type TYPE. */
2008 static struct case_node *
2009 add_case_node (struct case_node *head, tree type, tree low, tree high,
2010 tree label, alloc_pool case_node_pool)
2012 tree min_value, max_value;
2013 struct case_node *r;
2015 gcc_assert (TREE_CODE (low) == INTEGER_CST);
2016 gcc_assert (!high || TREE_CODE (high) == INTEGER_CST);
2018 min_value = TYPE_MIN_VALUE (type);
2019 max_value = TYPE_MAX_VALUE (type);
2021 /* If there's no HIGH value, then this is not a case range; it's
2022 just a simple case label. But that's just a degenerate case
2023 range.
2024 If the bounds are equal, turn this into the one-value case. */
2025 if (!high || tree_int_cst_equal (low, high))
2027 /* If the simple case value is unreachable, ignore it. */
2028 if ((TREE_CODE (min_value) == INTEGER_CST
2029 && tree_int_cst_compare (low, min_value) < 0)
2030 || (TREE_CODE (max_value) == INTEGER_CST
2031 && tree_int_cst_compare (low, max_value) > 0))
2032 return head;
2033 low = fold_convert (type, low);
2034 high = low;
2036 else
2038 /* If the entire case range is unreachable, ignore it. */
2039 if ((TREE_CODE (min_value) == INTEGER_CST
2040 && tree_int_cst_compare (high, min_value) < 0)
2041 || (TREE_CODE (max_value) == INTEGER_CST
2042 && tree_int_cst_compare (low, max_value) > 0))
2043 return head;
2045 /* If the lower bound is less than the index type's minimum
2046 value, truncate the range bounds. */
2047 if (TREE_CODE (min_value) == INTEGER_CST
2048 && tree_int_cst_compare (low, min_value) < 0)
2049 low = min_value;
2050 low = fold_convert (type, low);
2052 /* If the upper bound is greater than the index type's maximum
2053 value, truncate the range bounds. */
2054 if (TREE_CODE (max_value) == INTEGER_CST
2055 && tree_int_cst_compare (high, max_value) > 0)
2056 high = max_value;
2057 high = fold_convert (type, high);
2061 /* Add this label to the chain. Make sure to drop overflow flags. */
2062 r = (struct case_node *) pool_alloc (case_node_pool);
2063 r->low = build_int_cst_wide (TREE_TYPE (low), TREE_INT_CST_LOW (low),
2064 TREE_INT_CST_HIGH (low));
2065 r->high = build_int_cst_wide (TREE_TYPE (high), TREE_INT_CST_LOW (high),
2066 TREE_INT_CST_HIGH (high));
2067 r->code_label = label;
2068 r->parent = r->left = NULL;
2069 r->right = head;
2070 return r;
2073 /* Maximum number of case bit tests. */
2074 #define MAX_CASE_BIT_TESTS 3
2076 /* By default, enable case bit tests on targets with ashlsi3. */
2077 #ifndef CASE_USE_BIT_TESTS
2078 #define CASE_USE_BIT_TESTS (optab_handler (ashl_optab, word_mode) \
2079 != CODE_FOR_nothing)
2080 #endif
2083 /* A case_bit_test represents a set of case nodes that may be
2084 selected from using a bit-wise comparison. HI and LO hold
2085 the integer to be tested against, LABEL contains the label
2086 to jump to upon success and BITS counts the number of case
2087 nodes handled by this test, typically the number of bits
2088 set in HI:LO. */
2090 struct case_bit_test
2092 HOST_WIDE_INT hi;
2093 HOST_WIDE_INT lo;
2094 rtx label;
2095 int bits;
2098 /* Determine whether "1 << x" is relatively cheap in word_mode. */
2100 static
2101 bool lshift_cheap_p (void)
2103 static bool init = false;
2104 static bool cheap = true;
2106 if (!init)
2108 rtx reg = gen_rtx_REG (word_mode, 10000);
2109 int cost = rtx_cost (gen_rtx_ASHIFT (word_mode, const1_rtx, reg), SET,
2110 optimize_insn_for_speed_p ());
2111 cheap = cost < COSTS_N_INSNS (3);
2112 init = true;
2115 return cheap;
2118 /* Comparison function for qsort to order bit tests by decreasing
2119 number of case nodes, i.e. the node with the most cases gets
2120 tested first. */
2122 static int
2123 case_bit_test_cmp (const void *p1, const void *p2)
2125 const struct case_bit_test *const d1 = (const struct case_bit_test *) p1;
2126 const struct case_bit_test *const d2 = (const struct case_bit_test *) p2;
2128 if (d2->bits != d1->bits)
2129 return d2->bits - d1->bits;
2131 /* Stabilize the sort. */
2132 return CODE_LABEL_NUMBER (d2->label) - CODE_LABEL_NUMBER (d1->label);
2135 /* Expand a switch statement by a short sequence of bit-wise
2136 comparisons. "switch(x)" is effectively converted into
2137 "if ((1 << (x-MINVAL)) & CST)" where CST and MINVAL are
2138 integer constants.
2140 INDEX_EXPR is the value being switched on, which is of
2141 type INDEX_TYPE. MINVAL is the lowest case value of in
2142 the case nodes, of INDEX_TYPE type, and RANGE is highest
2143 value minus MINVAL, also of type INDEX_TYPE. NODES is
2144 the set of case nodes, and DEFAULT_LABEL is the label to
2145 branch to should none of the cases match.
2147 There *MUST* be MAX_CASE_BIT_TESTS or less unique case
2148 node targets. */
2150 static void
2151 emit_case_bit_tests (tree index_type, tree index_expr, tree minval,
2152 tree range, case_node_ptr nodes, rtx default_label)
2154 struct case_bit_test test[MAX_CASE_BIT_TESTS];
2155 enum machine_mode mode;
2156 rtx expr, index, label;
2157 unsigned int i,j,lo,hi;
2158 struct case_node *n;
2159 unsigned int count;
2161 count = 0;
2162 for (n = nodes; n; n = n->right)
2164 label = label_rtx (n->code_label);
2165 for (i = 0; i < count; i++)
2166 if (label == test[i].label)
2167 break;
2169 if (i == count)
2171 gcc_assert (count < MAX_CASE_BIT_TESTS);
2172 test[i].hi = 0;
2173 test[i].lo = 0;
2174 test[i].label = label;
2175 test[i].bits = 1;
2176 count++;
2178 else
2179 test[i].bits++;
2181 lo = tree_low_cst (fold_build2 (MINUS_EXPR, index_type,
2182 n->low, minval), 1);
2183 hi = tree_low_cst (fold_build2 (MINUS_EXPR, index_type,
2184 n->high, minval), 1);
2185 for (j = lo; j <= hi; j++)
2186 if (j >= HOST_BITS_PER_WIDE_INT)
2187 test[i].hi |= (HOST_WIDE_INT) 1 << (j - HOST_BITS_PER_INT);
2188 else
2189 test[i].lo |= (HOST_WIDE_INT) 1 << j;
2192 qsort (test, count, sizeof(*test), case_bit_test_cmp);
2194 index_expr = fold_build2 (MINUS_EXPR, index_type,
2195 fold_convert (index_type, index_expr),
2196 fold_convert (index_type, minval));
2197 index = expand_normal (index_expr);
2198 do_pending_stack_adjust ();
2200 mode = TYPE_MODE (index_type);
2201 expr = expand_normal (range);
2202 if (default_label)
2203 emit_cmp_and_jump_insns (index, expr, GTU, NULL_RTX, mode, 1,
2204 default_label);
2206 index = convert_to_mode (word_mode, index, 0);
2207 index = expand_binop (word_mode, ashl_optab, const1_rtx,
2208 index, NULL_RTX, 1, OPTAB_WIDEN);
2210 for (i = 0; i < count; i++)
2212 expr = immed_double_const (test[i].lo, test[i].hi, word_mode);
2213 expr = expand_binop (word_mode, and_optab, index, expr,
2214 NULL_RTX, 1, OPTAB_WIDEN);
2215 emit_cmp_and_jump_insns (expr, const0_rtx, NE, NULL_RTX,
2216 word_mode, 1, test[i].label);
2219 if (default_label)
2220 emit_jump (default_label);
2223 #ifndef HAVE_casesi
2224 #define HAVE_casesi 0
2225 #endif
2227 #ifndef HAVE_tablejump
2228 #define HAVE_tablejump 0
2229 #endif
2231 /* Terminate a case (Pascal/Ada) or switch (C) statement
2232 in which ORIG_INDEX is the expression to be tested.
2233 If ORIG_TYPE is not NULL, it is the original ORIG_INDEX
2234 type as given in the source before any compiler conversions.
2235 Generate the code to test it and jump to the right place. */
2237 void
2238 expand_case (gimple stmt)
2240 tree minval = NULL_TREE, maxval = NULL_TREE, range = NULL_TREE;
2241 rtx default_label = 0;
2242 struct case_node *n;
2243 unsigned int count, uniq;
2244 rtx index;
2245 rtx table_label;
2246 int ncases;
2247 rtx *labelvec;
2248 int i;
2249 rtx before_case, end, lab;
2251 tree index_expr = gimple_switch_index (stmt);
2252 tree index_type = TREE_TYPE (index_expr);
2253 int unsignedp = TYPE_UNSIGNED (index_type);
2255 /* The insn after which the case dispatch should finally
2256 be emitted. Zero for a dummy. */
2257 rtx start;
2259 /* A list of case labels; it is first built as a list and it may then
2260 be rearranged into a nearly balanced binary tree. */
2261 struct case_node *case_list = 0;
2263 /* Label to jump to if no case matches. */
2264 tree default_label_decl = NULL_TREE;
2266 alloc_pool case_node_pool = create_alloc_pool ("struct case_node pool",
2267 sizeof (struct case_node),
2268 100);
2270 do_pending_stack_adjust ();
2272 /* An ERROR_MARK occurs for various reasons including invalid data type. */
2273 if (index_type != error_mark_node)
2275 tree elt;
2276 bitmap label_bitmap;
2277 int stopi = 0;
2279 /* cleanup_tree_cfg removes all SWITCH_EXPR with their index
2280 expressions being INTEGER_CST. */
2281 gcc_assert (TREE_CODE (index_expr) != INTEGER_CST);
2283 /* The default case, if ever taken, is the first element. */
2284 elt = gimple_switch_label (stmt, 0);
2285 if (!CASE_LOW (elt) && !CASE_HIGH (elt))
2287 default_label_decl = CASE_LABEL (elt);
2288 stopi = 1;
2291 for (i = gimple_switch_num_labels (stmt) - 1; i >= stopi; --i)
2293 tree low, high;
2294 elt = gimple_switch_label (stmt, i);
2296 low = CASE_LOW (elt);
2297 gcc_assert (low);
2298 high = CASE_HIGH (elt);
2300 /* Discard empty ranges. */
2301 if (high && tree_int_cst_lt (high, low))
2302 continue;
2304 case_list = add_case_node (case_list, index_type, low, high,
2305 CASE_LABEL (elt), case_node_pool);
2309 before_case = start = get_last_insn ();
2310 if (default_label_decl)
2311 default_label = label_rtx (default_label_decl);
2313 /* Get upper and lower bounds of case values. */
2315 uniq = 0;
2316 count = 0;
2317 label_bitmap = BITMAP_ALLOC (NULL);
2318 for (n = case_list; n; n = n->right)
2320 /* Count the elements and track the largest and smallest
2321 of them (treating them as signed even if they are not). */
2322 if (count++ == 0)
2324 minval = n->low;
2325 maxval = n->high;
2327 else
2329 if (tree_int_cst_lt (n->low, minval))
2330 minval = n->low;
2331 if (tree_int_cst_lt (maxval, n->high))
2332 maxval = n->high;
2334 /* A range counts double, since it requires two compares. */
2335 if (! tree_int_cst_equal (n->low, n->high))
2336 count++;
2338 /* If we have not seen this label yet, then increase the
2339 number of unique case node targets seen. */
2340 lab = label_rtx (n->code_label);
2341 if (!bitmap_bit_p (label_bitmap, CODE_LABEL_NUMBER (lab)))
2343 bitmap_set_bit (label_bitmap, CODE_LABEL_NUMBER (lab));
2344 uniq++;
2348 BITMAP_FREE (label_bitmap);
2350 /* cleanup_tree_cfg removes all SWITCH_EXPR with a single
2351 destination, such as one with a default case only. However,
2352 it doesn't remove cases that are out of range for the switch
2353 type, so we may still get a zero here. */
2354 if (count == 0)
2356 if (default_label)
2357 emit_jump (default_label);
2358 free_alloc_pool (case_node_pool);
2359 return;
2362 /* Compute span of values. */
2363 range = fold_build2 (MINUS_EXPR, index_type, maxval, minval);
2365 /* Try implementing this switch statement by a short sequence of
2366 bit-wise comparisons. However, we let the binary-tree case
2367 below handle constant index expressions. */
2368 if (CASE_USE_BIT_TESTS
2369 && ! TREE_CONSTANT (index_expr)
2370 && compare_tree_int (range, GET_MODE_BITSIZE (word_mode)) < 0
2371 && compare_tree_int (range, 0) > 0
2372 && lshift_cheap_p ()
2373 && ((uniq == 1 && count >= 3)
2374 || (uniq == 2 && count >= 5)
2375 || (uniq == 3 && count >= 6)))
2377 /* Optimize the case where all the case values fit in a
2378 word without having to subtract MINVAL. In this case,
2379 we can optimize away the subtraction. */
2380 if (compare_tree_int (minval, 0) > 0
2381 && compare_tree_int (maxval, GET_MODE_BITSIZE (word_mode)) < 0)
2383 minval = build_int_cst (index_type, 0);
2384 range = maxval;
2386 emit_case_bit_tests (index_type, index_expr, minval, range,
2387 case_list, default_label);
2390 /* If range of values is much bigger than number of values,
2391 make a sequence of conditional branches instead of a dispatch.
2392 If the switch-index is a constant, do it this way
2393 because we can optimize it. */
2395 else if (count < targetm.case_values_threshold ()
2396 || compare_tree_int (range,
2397 (optimize_insn_for_size_p () ? 3 : 10) * count) > 0
2398 /* RANGE may be signed, and really large ranges will show up
2399 as negative numbers. */
2400 || compare_tree_int (range, 0) < 0
2401 #ifndef ASM_OUTPUT_ADDR_DIFF_ELT
2402 || flag_pic
2403 #endif
2404 || !flag_jump_tables
2405 || TREE_CONSTANT (index_expr)
2406 /* If neither casesi or tablejump is available, we can
2407 only go this way. */
2408 || (!HAVE_casesi && !HAVE_tablejump))
2410 index = expand_normal (index_expr);
2412 /* If the index is a short or char that we do not have
2413 an insn to handle comparisons directly, convert it to
2414 a full integer now, rather than letting each comparison
2415 generate the conversion. */
2417 if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT
2418 && ! have_insn_for (COMPARE, GET_MODE (index)))
2420 enum machine_mode wider_mode;
2421 for (wider_mode = GET_MODE (index); wider_mode != VOIDmode;
2422 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
2423 if (have_insn_for (COMPARE, wider_mode))
2425 index = convert_to_mode (wider_mode, index, unsignedp);
2426 break;
2430 do_pending_stack_adjust ();
2432 if (MEM_P (index))
2433 index = copy_to_reg (index);
2435 /* We generate a binary decision tree to select the
2436 appropriate target code. This is done as follows:
2438 The list of cases is rearranged into a binary tree,
2439 nearly optimal assuming equal probability for each case.
2441 The tree is transformed into RTL, eliminating
2442 redundant test conditions at the same time.
2444 If program flow could reach the end of the
2445 decision tree an unconditional jump to the
2446 default code is emitted. */
2448 use_cost_table = estimate_case_costs (case_list);
2449 balance_case_nodes (&case_list, NULL);
2450 emit_case_nodes (index, case_list, default_label, index_type);
2451 if (default_label)
2452 emit_jump (default_label);
2454 else
2456 rtx fallback_label = label_rtx (case_list->code_label);
2457 table_label = gen_label_rtx ();
2458 if (! try_casesi (index_type, index_expr, minval, range,
2459 table_label, default_label, fallback_label))
2461 bool ok;
2463 /* Index jumptables from zero for suitable values of
2464 minval to avoid a subtraction. */
2465 if (optimize_insn_for_speed_p ()
2466 && compare_tree_int (minval, 0) > 0
2467 && compare_tree_int (minval, 3) < 0)
2469 minval = build_int_cst (index_type, 0);
2470 range = maxval;
2473 ok = try_tablejump (index_type, index_expr, minval, range,
2474 table_label, default_label);
2475 gcc_assert (ok);
2478 /* Get table of labels to jump to, in order of case index. */
2480 ncases = tree_low_cst (range, 0) + 1;
2481 labelvec = XALLOCAVEC (rtx, ncases);
2482 memset (labelvec, 0, ncases * sizeof (rtx));
2484 for (n = case_list; n; n = n->right)
2486 /* Compute the low and high bounds relative to the minimum
2487 value since that should fit in a HOST_WIDE_INT while the
2488 actual values may not. */
2489 HOST_WIDE_INT i_low
2490 = tree_low_cst (fold_build2 (MINUS_EXPR, index_type,
2491 n->low, minval), 1);
2492 HOST_WIDE_INT i_high
2493 = tree_low_cst (fold_build2 (MINUS_EXPR, index_type,
2494 n->high, minval), 1);
2495 HOST_WIDE_INT i;
2497 for (i = i_low; i <= i_high; i ++)
2498 labelvec[i]
2499 = gen_rtx_LABEL_REF (Pmode, label_rtx (n->code_label));
2502 /* Fill in the gaps with the default. We may have gaps at
2503 the beginning if we tried to avoid the minval subtraction,
2504 so substitute some label even if the default label was
2505 deemed unreachable. */
2506 if (!default_label)
2507 default_label = fallback_label;
2508 for (i = 0; i < ncases; i++)
2509 if (labelvec[i] == 0)
2510 labelvec[i] = gen_rtx_LABEL_REF (Pmode, default_label);
2512 /* Output the table. */
2513 emit_label (table_label);
2515 if (CASE_VECTOR_PC_RELATIVE || flag_pic)
2516 emit_jump_insn (gen_rtx_ADDR_DIFF_VEC (CASE_VECTOR_MODE,
2517 gen_rtx_LABEL_REF (Pmode, table_label),
2518 gen_rtvec_v (ncases, labelvec),
2519 const0_rtx, const0_rtx));
2520 else
2521 emit_jump_insn (gen_rtx_ADDR_VEC (CASE_VECTOR_MODE,
2522 gen_rtvec_v (ncases, labelvec)));
2524 /* Record no drop-through after the table. */
2525 emit_barrier ();
2528 before_case = NEXT_INSN (before_case);
2529 end = get_last_insn ();
2530 reorder_insns (before_case, end, start);
2533 free_temp_slots ();
2534 free_alloc_pool (case_node_pool);
2537 /* Generate code to jump to LABEL if OP0 and OP1 are equal in mode MODE. */
2539 static void
2540 do_jump_if_equal (enum machine_mode mode, rtx op0, rtx op1, rtx label,
2541 int unsignedp)
2543 do_compare_rtx_and_jump (op0, op1, EQ, unsignedp, mode,
2544 NULL_RTX, NULL_RTX, label, -1);
2547 /* Not all case values are encountered equally. This function
2548 uses a heuristic to weight case labels, in cases where that
2549 looks like a reasonable thing to do.
2551 Right now, all we try to guess is text, and we establish the
2552 following weights:
2554 chars above space: 16
2555 digits: 16
2556 default: 12
2557 space, punct: 8
2558 tab: 4
2559 newline: 2
2560 other "\" chars: 1
2561 remaining chars: 0
2563 If we find any cases in the switch that are not either -1 or in the range
2564 of valid ASCII characters, or are control characters other than those
2565 commonly used with "\", don't treat this switch scanning text.
2567 Return 1 if these nodes are suitable for cost estimation, otherwise
2568 return 0. */
2570 static int
2571 estimate_case_costs (case_node_ptr node)
2573 tree min_ascii = integer_minus_one_node;
2574 tree max_ascii = build_int_cst (TREE_TYPE (node->high), 127);
2575 case_node_ptr n;
2576 int i;
2578 /* If we haven't already made the cost table, make it now. Note that the
2579 lower bound of the table is -1, not zero. */
2581 if (! cost_table_initialized)
2583 cost_table_initialized = 1;
2585 for (i = 0; i < 128; i++)
2587 if (ISALNUM (i))
2588 COST_TABLE (i) = 16;
2589 else if (ISPUNCT (i))
2590 COST_TABLE (i) = 8;
2591 else if (ISCNTRL (i))
2592 COST_TABLE (i) = -1;
2595 COST_TABLE (' ') = 8;
2596 COST_TABLE ('\t') = 4;
2597 COST_TABLE ('\0') = 4;
2598 COST_TABLE ('\n') = 2;
2599 COST_TABLE ('\f') = 1;
2600 COST_TABLE ('\v') = 1;
2601 COST_TABLE ('\b') = 1;
2604 /* See if all the case expressions look like text. It is text if the
2605 constant is >= -1 and the highest constant is <= 127. Do all comparisons
2606 as signed arithmetic since we don't want to ever access cost_table with a
2607 value less than -1. Also check that none of the constants in a range
2608 are strange control characters. */
2610 for (n = node; n; n = n->right)
2612 if (tree_int_cst_lt (n->low, min_ascii)
2613 || tree_int_cst_lt (max_ascii, n->high))
2614 return 0;
2616 for (i = (HOST_WIDE_INT) TREE_INT_CST_LOW (n->low);
2617 i <= (HOST_WIDE_INT) TREE_INT_CST_LOW (n->high); i++)
2618 if (COST_TABLE (i) < 0)
2619 return 0;
2622 /* All interesting values are within the range of interesting
2623 ASCII characters. */
2624 return 1;
2627 /* Take an ordered list of case nodes
2628 and transform them into a near optimal binary tree,
2629 on the assumption that any target code selection value is as
2630 likely as any other.
2632 The transformation is performed by splitting the ordered
2633 list into two equal sections plus a pivot. The parts are
2634 then attached to the pivot as left and right branches. Each
2635 branch is then transformed recursively. */
2637 static void
2638 balance_case_nodes (case_node_ptr *head, case_node_ptr parent)
2640 case_node_ptr np;
2642 np = *head;
2643 if (np)
2645 int cost = 0;
2646 int i = 0;
2647 int ranges = 0;
2648 case_node_ptr *npp;
2649 case_node_ptr left;
2651 /* Count the number of entries on branch. Also count the ranges. */
2653 while (np)
2655 if (!tree_int_cst_equal (np->low, np->high))
2657 ranges++;
2658 if (use_cost_table)
2659 cost += COST_TABLE (TREE_INT_CST_LOW (np->high));
2662 if (use_cost_table)
2663 cost += COST_TABLE (TREE_INT_CST_LOW (np->low));
2665 i++;
2666 np = np->right;
2669 if (i > 2)
2671 /* Split this list if it is long enough for that to help. */
2672 npp = head;
2673 left = *npp;
2674 if (use_cost_table)
2676 /* Find the place in the list that bisects the list's total cost,
2677 Here I gets half the total cost. */
2678 int n_moved = 0;
2679 i = (cost + 1) / 2;
2680 while (1)
2682 /* Skip nodes while their cost does not reach that amount. */
2683 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
2684 i -= COST_TABLE (TREE_INT_CST_LOW ((*npp)->high));
2685 i -= COST_TABLE (TREE_INT_CST_LOW ((*npp)->low));
2686 if (i <= 0)
2687 break;
2688 npp = &(*npp)->right;
2689 n_moved += 1;
2691 if (n_moved == 0)
2693 /* Leave this branch lopsided, but optimize left-hand
2694 side and fill in `parent' fields for right-hand side. */
2695 np = *head;
2696 np->parent = parent;
2697 balance_case_nodes (&np->left, np);
2698 for (; np->right; np = np->right)
2699 np->right->parent = np;
2700 return;
2703 /* If there are just three nodes, split at the middle one. */
2704 else if (i == 3)
2705 npp = &(*npp)->right;
2706 else
2708 /* Find the place in the list that bisects the list's total cost,
2709 where ranges count as 2.
2710 Here I gets half the total cost. */
2711 i = (i + ranges + 1) / 2;
2712 while (1)
2714 /* Skip nodes while their cost does not reach that amount. */
2715 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
2716 i--;
2717 i--;
2718 if (i <= 0)
2719 break;
2720 npp = &(*npp)->right;
2723 *head = np = *npp;
2724 *npp = 0;
2725 np->parent = parent;
2726 np->left = left;
2728 /* Optimize each of the two split parts. */
2729 balance_case_nodes (&np->left, np);
2730 balance_case_nodes (&np->right, np);
2732 else
2734 /* Else leave this branch as one level,
2735 but fill in `parent' fields. */
2736 np = *head;
2737 np->parent = parent;
2738 for (; np->right; np = np->right)
2739 np->right->parent = np;
2744 /* Search the parent sections of the case node tree
2745 to see if a test for the lower bound of NODE would be redundant.
2746 INDEX_TYPE is the type of the index expression.
2748 The instructions to generate the case decision tree are
2749 output in the same order as nodes are processed so it is
2750 known that if a parent node checks the range of the current
2751 node minus one that the current node is bounded at its lower
2752 span. Thus the test would be redundant. */
2754 static int
2755 node_has_low_bound (case_node_ptr node, tree index_type)
2757 tree low_minus_one;
2758 case_node_ptr pnode;
2760 /* If the lower bound of this node is the lowest value in the index type,
2761 we need not test it. */
2763 if (tree_int_cst_equal (node->low, TYPE_MIN_VALUE (index_type)))
2764 return 1;
2766 /* If this node has a left branch, the value at the left must be less
2767 than that at this node, so it cannot be bounded at the bottom and
2768 we need not bother testing any further. */
2770 if (node->left)
2771 return 0;
2773 low_minus_one = fold_build2 (MINUS_EXPR, TREE_TYPE (node->low),
2774 node->low,
2775 build_int_cst (TREE_TYPE (node->low), 1));
2777 /* If the subtraction above overflowed, we can't verify anything.
2778 Otherwise, look for a parent that tests our value - 1. */
2780 if (! tree_int_cst_lt (low_minus_one, node->low))
2781 return 0;
2783 for (pnode = node->parent; pnode; pnode = pnode->parent)
2784 if (tree_int_cst_equal (low_minus_one, pnode->high))
2785 return 1;
2787 return 0;
2790 /* Search the parent sections of the case node tree
2791 to see if a test for the upper bound of NODE would be redundant.
2792 INDEX_TYPE is the type of the index expression.
2794 The instructions to generate the case decision tree are
2795 output in the same order as nodes are processed so it is
2796 known that if a parent node checks the range of the current
2797 node plus one that the current node is bounded at its upper
2798 span. Thus the test would be redundant. */
2800 static int
2801 node_has_high_bound (case_node_ptr node, tree index_type)
2803 tree high_plus_one;
2804 case_node_ptr pnode;
2806 /* If there is no upper bound, obviously no test is needed. */
2808 if (TYPE_MAX_VALUE (index_type) == NULL)
2809 return 1;
2811 /* If the upper bound of this node is the highest value in the type
2812 of the index expression, we need not test against it. */
2814 if (tree_int_cst_equal (node->high, TYPE_MAX_VALUE (index_type)))
2815 return 1;
2817 /* If this node has a right branch, the value at the right must be greater
2818 than that at this node, so it cannot be bounded at the top and
2819 we need not bother testing any further. */
2821 if (node->right)
2822 return 0;
2824 high_plus_one = fold_build2 (PLUS_EXPR, TREE_TYPE (node->high),
2825 node->high,
2826 build_int_cst (TREE_TYPE (node->high), 1));
2828 /* If the addition above overflowed, we can't verify anything.
2829 Otherwise, look for a parent that tests our value + 1. */
2831 if (! tree_int_cst_lt (node->high, high_plus_one))
2832 return 0;
2834 for (pnode = node->parent; pnode; pnode = pnode->parent)
2835 if (tree_int_cst_equal (high_plus_one, pnode->low))
2836 return 1;
2838 return 0;
2841 /* Search the parent sections of the
2842 case node tree to see if both tests for the upper and lower
2843 bounds of NODE would be redundant. */
2845 static int
2846 node_is_bounded (case_node_ptr node, tree index_type)
2848 return (node_has_low_bound (node, index_type)
2849 && node_has_high_bound (node, index_type));
2852 /* Emit step-by-step code to select a case for the value of INDEX.
2853 The thus generated decision tree follows the form of the
2854 case-node binary tree NODE, whose nodes represent test conditions.
2855 INDEX_TYPE is the type of the index of the switch.
2857 Care is taken to prune redundant tests from the decision tree
2858 by detecting any boundary conditions already checked by
2859 emitted rtx. (See node_has_high_bound, node_has_low_bound
2860 and node_is_bounded, above.)
2862 Where the test conditions can be shown to be redundant we emit
2863 an unconditional jump to the target code. As a further
2864 optimization, the subordinates of a tree node are examined to
2865 check for bounded nodes. In this case conditional and/or
2866 unconditional jumps as a result of the boundary check for the
2867 current node are arranged to target the subordinates associated
2868 code for out of bound conditions on the current node.
2870 We can assume that when control reaches the code generated here,
2871 the index value has already been compared with the parents
2872 of this node, and determined to be on the same side of each parent
2873 as this node is. Thus, if this node tests for the value 51,
2874 and a parent tested for 52, we don't need to consider
2875 the possibility of a value greater than 51. If another parent
2876 tests for the value 50, then this node need not test anything. */
2878 static void
2879 emit_case_nodes (rtx index, case_node_ptr node, rtx default_label,
2880 tree index_type)
2882 /* If INDEX has an unsigned type, we must make unsigned branches. */
2883 int unsignedp = TYPE_UNSIGNED (index_type);
2884 enum machine_mode mode = GET_MODE (index);
2885 enum machine_mode imode = TYPE_MODE (index_type);
2887 /* Handle indices detected as constant during RTL expansion. */
2888 if (mode == VOIDmode)
2889 mode = imode;
2891 /* See if our parents have already tested everything for us.
2892 If they have, emit an unconditional jump for this node. */
2893 if (node_is_bounded (node, index_type))
2894 emit_jump (label_rtx (node->code_label));
2896 else if (tree_int_cst_equal (node->low, node->high))
2898 /* Node is single valued. First see if the index expression matches
2899 this node and then check our children, if any. */
2901 do_jump_if_equal (mode, index,
2902 convert_modes (mode, imode,
2903 expand_normal (node->low),
2904 unsignedp),
2905 label_rtx (node->code_label), unsignedp);
2907 if (node->right != 0 && node->left != 0)
2909 /* This node has children on both sides.
2910 Dispatch to one side or the other
2911 by comparing the index value with this node's value.
2912 If one subtree is bounded, check that one first,
2913 so we can avoid real branches in the tree. */
2915 if (node_is_bounded (node->right, index_type))
2917 emit_cmp_and_jump_insns (index,
2918 convert_modes
2919 (mode, imode,
2920 expand_normal (node->high),
2921 unsignedp),
2922 GT, NULL_RTX, mode, unsignedp,
2923 label_rtx (node->right->code_label));
2924 emit_case_nodes (index, node->left, default_label, index_type);
2927 else if (node_is_bounded (node->left, index_type))
2929 emit_cmp_and_jump_insns (index,
2930 convert_modes
2931 (mode, imode,
2932 expand_normal (node->high),
2933 unsignedp),
2934 LT, NULL_RTX, mode, unsignedp,
2935 label_rtx (node->left->code_label));
2936 emit_case_nodes (index, node->right, default_label, index_type);
2939 /* If both children are single-valued cases with no
2940 children, finish up all the work. This way, we can save
2941 one ordered comparison. */
2942 else if (tree_int_cst_equal (node->right->low, node->right->high)
2943 && node->right->left == 0
2944 && node->right->right == 0
2945 && tree_int_cst_equal (node->left->low, node->left->high)
2946 && node->left->left == 0
2947 && node->left->right == 0)
2949 /* Neither node is bounded. First distinguish the two sides;
2950 then emit the code for one side at a time. */
2952 /* See if the value matches what the right hand side
2953 wants. */
2954 do_jump_if_equal (mode, index,
2955 convert_modes (mode, imode,
2956 expand_normal (node->right->low),
2957 unsignedp),
2958 label_rtx (node->right->code_label),
2959 unsignedp);
2961 /* See if the value matches what the left hand side
2962 wants. */
2963 do_jump_if_equal (mode, index,
2964 convert_modes (mode, imode,
2965 expand_normal (node->left->low),
2966 unsignedp),
2967 label_rtx (node->left->code_label),
2968 unsignedp);
2971 else
2973 /* Neither node is bounded. First distinguish the two sides;
2974 then emit the code for one side at a time. */
2976 tree test_label
2977 = build_decl (CURR_INSN_LOCATION,
2978 LABEL_DECL, NULL_TREE, NULL_TREE);
2980 /* See if the value is on the right. */
2981 emit_cmp_and_jump_insns (index,
2982 convert_modes
2983 (mode, imode,
2984 expand_normal (node->high),
2985 unsignedp),
2986 GT, NULL_RTX, mode, unsignedp,
2987 label_rtx (test_label));
2989 /* Value must be on the left.
2990 Handle the left-hand subtree. */
2991 emit_case_nodes (index, node->left, default_label, index_type);
2992 /* If left-hand subtree does nothing,
2993 go to default. */
2994 if (default_label)
2995 emit_jump (default_label);
2997 /* Code branches here for the right-hand subtree. */
2998 expand_label (test_label);
2999 emit_case_nodes (index, node->right, default_label, index_type);
3003 else if (node->right != 0 && node->left == 0)
3005 /* Here we have a right child but no left so we issue a conditional
3006 branch to default and process the right child.
3008 Omit the conditional branch to default if the right child
3009 does not have any children and is single valued; it would
3010 cost too much space to save so little time. */
3012 if (node->right->right || node->right->left
3013 || !tree_int_cst_equal (node->right->low, node->right->high))
3015 if (!node_has_low_bound (node, index_type))
3017 emit_cmp_and_jump_insns (index,
3018 convert_modes
3019 (mode, imode,
3020 expand_normal (node->high),
3021 unsignedp),
3022 LT, NULL_RTX, mode, unsignedp,
3023 default_label);
3026 emit_case_nodes (index, node->right, default_label, index_type);
3028 else
3029 /* We cannot process node->right normally
3030 since we haven't ruled out the numbers less than
3031 this node's value. So handle node->right explicitly. */
3032 do_jump_if_equal (mode, index,
3033 convert_modes
3034 (mode, imode,
3035 expand_normal (node->right->low),
3036 unsignedp),
3037 label_rtx (node->right->code_label), unsignedp);
3040 else if (node->right == 0 && node->left != 0)
3042 /* Just one subtree, on the left. */
3043 if (node->left->left || node->left->right
3044 || !tree_int_cst_equal (node->left->low, node->left->high))
3046 if (!node_has_high_bound (node, index_type))
3048 emit_cmp_and_jump_insns (index,
3049 convert_modes
3050 (mode, imode,
3051 expand_normal (node->high),
3052 unsignedp),
3053 GT, NULL_RTX, mode, unsignedp,
3054 default_label);
3057 emit_case_nodes (index, node->left, default_label, index_type);
3059 else
3060 /* We cannot process node->left normally
3061 since we haven't ruled out the numbers less than
3062 this node's value. So handle node->left explicitly. */
3063 do_jump_if_equal (mode, index,
3064 convert_modes
3065 (mode, imode,
3066 expand_normal (node->left->low),
3067 unsignedp),
3068 label_rtx (node->left->code_label), unsignedp);
3071 else
3073 /* Node is a range. These cases are very similar to those for a single
3074 value, except that we do not start by testing whether this node
3075 is the one to branch to. */
3077 if (node->right != 0 && node->left != 0)
3079 /* Node has subtrees on both sides.
3080 If the right-hand subtree is bounded,
3081 test for it first, since we can go straight there.
3082 Otherwise, we need to make a branch in the control structure,
3083 then handle the two subtrees. */
3084 tree test_label = 0;
3086 if (node_is_bounded (node->right, index_type))
3087 /* Right hand node is fully bounded so we can eliminate any
3088 testing and branch directly to the target code. */
3089 emit_cmp_and_jump_insns (index,
3090 convert_modes
3091 (mode, imode,
3092 expand_normal (node->high),
3093 unsignedp),
3094 GT, NULL_RTX, mode, unsignedp,
3095 label_rtx (node->right->code_label));
3096 else
3098 /* Right hand node requires testing.
3099 Branch to a label where we will handle it later. */
3101 test_label = build_decl (CURR_INSN_LOCATION,
3102 LABEL_DECL, NULL_TREE, NULL_TREE);
3103 emit_cmp_and_jump_insns (index,
3104 convert_modes
3105 (mode, imode,
3106 expand_normal (node->high),
3107 unsignedp),
3108 GT, NULL_RTX, mode, unsignedp,
3109 label_rtx (test_label));
3112 /* Value belongs to this node or to the left-hand subtree. */
3114 emit_cmp_and_jump_insns (index,
3115 convert_modes
3116 (mode, imode,
3117 expand_normal (node->low),
3118 unsignedp),
3119 GE, NULL_RTX, mode, unsignedp,
3120 label_rtx (node->code_label));
3122 /* Handle the left-hand subtree. */
3123 emit_case_nodes (index, node->left, default_label, index_type);
3125 /* If right node had to be handled later, do that now. */
3127 if (test_label)
3129 /* If the left-hand subtree fell through,
3130 don't let it fall into the right-hand subtree. */
3131 if (default_label)
3132 emit_jump (default_label);
3134 expand_label (test_label);
3135 emit_case_nodes (index, node->right, default_label, index_type);
3139 else if (node->right != 0 && node->left == 0)
3141 /* Deal with values to the left of this node,
3142 if they are possible. */
3143 if (!node_has_low_bound (node, index_type))
3145 emit_cmp_and_jump_insns (index,
3146 convert_modes
3147 (mode, imode,
3148 expand_normal (node->low),
3149 unsignedp),
3150 LT, NULL_RTX, mode, unsignedp,
3151 default_label);
3154 /* Value belongs to this node or to the right-hand subtree. */
3156 emit_cmp_and_jump_insns (index,
3157 convert_modes
3158 (mode, imode,
3159 expand_normal (node->high),
3160 unsignedp),
3161 LE, NULL_RTX, mode, unsignedp,
3162 label_rtx (node->code_label));
3164 emit_case_nodes (index, node->right, default_label, index_type);
3167 else if (node->right == 0 && node->left != 0)
3169 /* Deal with values to the right of this node,
3170 if they are possible. */
3171 if (!node_has_high_bound (node, index_type))
3173 emit_cmp_and_jump_insns (index,
3174 convert_modes
3175 (mode, imode,
3176 expand_normal (node->high),
3177 unsignedp),
3178 GT, NULL_RTX, mode, unsignedp,
3179 default_label);
3182 /* Value belongs to this node or to the left-hand subtree. */
3184 emit_cmp_and_jump_insns (index,
3185 convert_modes
3186 (mode, imode,
3187 expand_normal (node->low),
3188 unsignedp),
3189 GE, NULL_RTX, mode, unsignedp,
3190 label_rtx (node->code_label));
3192 emit_case_nodes (index, node->left, default_label, index_type);
3195 else
3197 /* Node has no children so we check low and high bounds to remove
3198 redundant tests. Only one of the bounds can exist,
3199 since otherwise this node is bounded--a case tested already. */
3200 int high_bound = node_has_high_bound (node, index_type);
3201 int low_bound = node_has_low_bound (node, index_type);
3203 if (!high_bound && low_bound)
3205 emit_cmp_and_jump_insns (index,
3206 convert_modes
3207 (mode, imode,
3208 expand_normal (node->high),
3209 unsignedp),
3210 GT, NULL_RTX, mode, unsignedp,
3211 default_label);
3214 else if (!low_bound && high_bound)
3216 emit_cmp_and_jump_insns (index,
3217 convert_modes
3218 (mode, imode,
3219 expand_normal (node->low),
3220 unsignedp),
3221 LT, NULL_RTX, mode, unsignedp,
3222 default_label);
3224 else if (!low_bound && !high_bound)
3226 /* Widen LOW and HIGH to the same width as INDEX. */
3227 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
3228 tree low = build1 (CONVERT_EXPR, type, node->low);
3229 tree high = build1 (CONVERT_EXPR, type, node->high);
3230 rtx low_rtx, new_index, new_bound;
3232 /* Instead of doing two branches, emit one unsigned branch for
3233 (index-low) > (high-low). */
3234 low_rtx = expand_expr (low, NULL_RTX, mode, EXPAND_NORMAL);
3235 new_index = expand_simple_binop (mode, MINUS, index, low_rtx,
3236 NULL_RTX, unsignedp,
3237 OPTAB_WIDEN);
3238 new_bound = expand_expr (fold_build2 (MINUS_EXPR, type,
3239 high, low),
3240 NULL_RTX, mode, EXPAND_NORMAL);
3242 emit_cmp_and_jump_insns (new_index, new_bound, GT, NULL_RTX,
3243 mode, 1, default_label);
3246 emit_jump (label_rtx (node->code_label));