PR testsuite/44195
[official-gcc.git] / gcc / stmt.c
blob65f3a815ff3cd674009ccf31eb2e2a488ded089c
1 /* Expands front end tree to back end RTL for GCC
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009,
4 2010 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /* This file handles the generation of rtl code from tree structure
23 above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
24 The functions whose names start with `expand_' are called by the
25 expander to generate RTL instructions for various kinds of constructs. */
27 #include "config.h"
28 #include "system.h"
29 #include "coretypes.h"
30 #include "tm.h"
32 #include "rtl.h"
33 #include "hard-reg-set.h"
34 #include "tree.h"
35 #include "tm_p.h"
36 #include "flags.h"
37 #include "except.h"
38 #include "function.h"
39 #include "insn-config.h"
40 #include "expr.h"
41 #include "libfuncs.h"
42 #include "recog.h"
43 #include "machmode.h"
44 #include "toplev.h"
45 #include "output.h"
46 #include "ggc.h"
47 #include "langhooks.h"
48 #include "predict.h"
49 #include "optabs.h"
50 #include "target.h"
51 #include "gimple.h"
52 #include "regs.h"
53 #include "alloc-pool.h"
54 #include "pretty-print.h"
55 #include "bitmap.h"
58 /* Functions and data structures for expanding case statements. */
60 /* Case label structure, used to hold info on labels within case
61 statements. We handle "range" labels; for a single-value label
62 as in C, the high and low limits are the same.
64 We start with a vector of case nodes sorted in ascending order, and
65 the default label as the last element in the vector. Before expanding
66 to RTL, we transform this vector into a list linked via the RIGHT
67 fields in the case_node struct. Nodes with higher case values are
68 later in the list.
70 Switch statements can be output in three forms. A branch table is
71 used if there are more than a few labels and the labels are dense
72 within the range between the smallest and largest case value. If a
73 branch table is used, no further manipulations are done with the case
74 node chain.
76 The alternative to the use of a branch table is to generate a series
77 of compare and jump insns. When that is done, we use the LEFT, RIGHT,
78 and PARENT fields to hold a binary tree. Initially the tree is
79 totally unbalanced, with everything on the right. We balance the tree
80 with nodes on the left having lower case values than the parent
81 and nodes on the right having higher values. We then output the tree
82 in order.
84 For very small, suitable switch statements, we can generate a series
85 of simple bit test and branches instead. */
87 struct case_node
89 struct case_node *left; /* Left son in binary tree */
90 struct case_node *right; /* Right son in binary tree; also node chain */
91 struct case_node *parent; /* Parent of node in binary tree */
92 tree low; /* Lowest index value for this label */
93 tree high; /* Highest index value for this label */
94 tree code_label; /* Label to jump to when node matches */
97 typedef struct case_node case_node;
98 typedef struct case_node *case_node_ptr;
100 /* These are used by estimate_case_costs and balance_case_nodes. */
102 /* This must be a signed type, and non-ANSI compilers lack signed char. */
103 static short cost_table_[129];
104 static int use_cost_table;
105 static int cost_table_initialized;
107 /* Special care is needed because we allow -1, but TREE_INT_CST_LOW
108 is unsigned. */
109 #define COST_TABLE(I) cost_table_[(unsigned HOST_WIDE_INT) ((I) + 1)]
111 static int n_occurrences (int, const char *);
112 static bool tree_conflicts_with_clobbers_p (tree, HARD_REG_SET *);
113 static void expand_nl_goto_receiver (void);
114 static bool check_operand_nalternatives (tree, tree);
115 static bool check_unique_operand_names (tree, tree, tree);
116 static char *resolve_operand_name_1 (char *, tree, tree, tree);
117 static void expand_null_return_1 (void);
118 static void expand_value_return (rtx);
119 static int estimate_case_costs (case_node_ptr);
120 static bool lshift_cheap_p (void);
121 static int case_bit_test_cmp (const void *, const void *);
122 static void emit_case_bit_tests (tree, tree, tree, tree, case_node_ptr, rtx);
123 static void balance_case_nodes (case_node_ptr *, case_node_ptr);
124 static int node_has_low_bound (case_node_ptr, tree);
125 static int node_has_high_bound (case_node_ptr, tree);
126 static int node_is_bounded (case_node_ptr, tree);
127 static void emit_case_nodes (rtx, case_node_ptr, rtx, tree);
128 static struct case_node *add_case_node (struct case_node *, tree,
129 tree, tree, tree, alloc_pool);
132 /* Return the rtx-label that corresponds to a LABEL_DECL,
133 creating it if necessary. */
136 label_rtx (tree label)
138 gcc_assert (TREE_CODE (label) == LABEL_DECL);
140 if (!DECL_RTL_SET_P (label))
142 rtx r = gen_label_rtx ();
143 SET_DECL_RTL (label, r);
144 if (FORCED_LABEL (label) || DECL_NONLOCAL (label))
145 LABEL_PRESERVE_P (r) = 1;
148 return DECL_RTL (label);
151 /* As above, but also put it on the forced-reference list of the
152 function that contains it. */
154 force_label_rtx (tree label)
156 rtx ref = label_rtx (label);
157 tree function = decl_function_context (label);
159 gcc_assert (function);
161 forced_labels = gen_rtx_EXPR_LIST (VOIDmode, ref, forced_labels);
162 return ref;
165 /* Add an unconditional jump to LABEL as the next sequential instruction. */
167 void
168 emit_jump (rtx label)
170 do_pending_stack_adjust ();
171 emit_jump_insn (gen_jump (label));
172 emit_barrier ();
175 /* Emit code to jump to the address
176 specified by the pointer expression EXP. */
178 void
179 expand_computed_goto (tree exp)
181 rtx x = expand_normal (exp);
183 x = convert_memory_address (Pmode, x);
185 do_pending_stack_adjust ();
186 emit_indirect_jump (x);
189 /* Handle goto statements and the labels that they can go to. */
191 /* Specify the location in the RTL code of a label LABEL,
192 which is a LABEL_DECL tree node.
194 This is used for the kind of label that the user can jump to with a
195 goto statement, and for alternatives of a switch or case statement.
196 RTL labels generated for loops and conditionals don't go through here;
197 they are generated directly at the RTL level, by other functions below.
199 Note that this has nothing to do with defining label *names*.
200 Languages vary in how they do that and what that even means. */
202 void
203 expand_label (tree label)
205 rtx label_r = label_rtx (label);
207 do_pending_stack_adjust ();
208 emit_label (label_r);
209 if (DECL_NAME (label))
210 LABEL_NAME (DECL_RTL (label)) = IDENTIFIER_POINTER (DECL_NAME (label));
212 if (DECL_NONLOCAL (label))
214 expand_nl_goto_receiver ();
215 nonlocal_goto_handler_labels
216 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
217 nonlocal_goto_handler_labels);
220 if (FORCED_LABEL (label))
221 forced_labels = gen_rtx_EXPR_LIST (VOIDmode, label_r, forced_labels);
223 if (DECL_NONLOCAL (label) || FORCED_LABEL (label))
224 maybe_set_first_label_num (label_r);
227 /* Generate RTL code for a `goto' statement with target label LABEL.
228 LABEL should be a LABEL_DECL tree node that was or will later be
229 defined with `expand_label'. */
231 void
232 expand_goto (tree label)
234 #ifdef ENABLE_CHECKING
235 /* Check for a nonlocal goto to a containing function. Should have
236 gotten translated to __builtin_nonlocal_goto. */
237 tree context = decl_function_context (label);
238 gcc_assert (!context || context == current_function_decl);
239 #endif
241 emit_jump (label_rtx (label));
244 /* Return the number of times character C occurs in string S. */
245 static int
246 n_occurrences (int c, const char *s)
248 int n = 0;
249 while (*s)
250 n += (*s++ == c);
251 return n;
254 /* Generate RTL for an asm statement (explicit assembler code).
255 STRING is a STRING_CST node containing the assembler code text,
256 or an ADDR_EXPR containing a STRING_CST. VOL nonzero means the
257 insn is volatile; don't optimize it. */
259 static void
260 expand_asm_loc (tree string, int vol, location_t locus)
262 rtx body;
264 if (TREE_CODE (string) == ADDR_EXPR)
265 string = TREE_OPERAND (string, 0);
267 body = gen_rtx_ASM_INPUT_loc (VOIDmode,
268 ggc_strdup (TREE_STRING_POINTER (string)),
269 locus);
271 MEM_VOLATILE_P (body) = vol;
273 emit_insn (body);
276 /* Parse the output constraint pointed to by *CONSTRAINT_P. It is the
277 OPERAND_NUMth output operand, indexed from zero. There are NINPUTS
278 inputs and NOUTPUTS outputs to this extended-asm. Upon return,
279 *ALLOWS_MEM will be TRUE iff the constraint allows the use of a
280 memory operand. Similarly, *ALLOWS_REG will be TRUE iff the
281 constraint allows the use of a register operand. And, *IS_INOUT
282 will be true if the operand is read-write, i.e., if it is used as
283 an input as well as an output. If *CONSTRAINT_P is not in
284 canonical form, it will be made canonical. (Note that `+' will be
285 replaced with `=' as part of this process.)
287 Returns TRUE if all went well; FALSE if an error occurred. */
289 bool
290 parse_output_constraint (const char **constraint_p, int operand_num,
291 int ninputs, int noutputs, bool *allows_mem,
292 bool *allows_reg, bool *is_inout)
294 const char *constraint = *constraint_p;
295 const char *p;
297 /* Assume the constraint doesn't allow the use of either a register
298 or memory. */
299 *allows_mem = false;
300 *allows_reg = false;
302 /* Allow the `=' or `+' to not be at the beginning of the string,
303 since it wasn't explicitly documented that way, and there is a
304 large body of code that puts it last. Swap the character to
305 the front, so as not to uglify any place else. */
306 p = strchr (constraint, '=');
307 if (!p)
308 p = strchr (constraint, '+');
310 /* If the string doesn't contain an `=', issue an error
311 message. */
312 if (!p)
314 error ("output operand constraint lacks %<=%>");
315 return false;
318 /* If the constraint begins with `+', then the operand is both read
319 from and written to. */
320 *is_inout = (*p == '+');
322 /* Canonicalize the output constraint so that it begins with `='. */
323 if (p != constraint || *is_inout)
325 char *buf;
326 size_t c_len = strlen (constraint);
328 if (p != constraint)
329 warning (0, "output constraint %qc for operand %d "
330 "is not at the beginning",
331 *p, operand_num);
333 /* Make a copy of the constraint. */
334 buf = XALLOCAVEC (char, c_len + 1);
335 strcpy (buf, constraint);
336 /* Swap the first character and the `=' or `+'. */
337 buf[p - constraint] = buf[0];
338 /* Make sure the first character is an `='. (Until we do this,
339 it might be a `+'.) */
340 buf[0] = '=';
341 /* Replace the constraint with the canonicalized string. */
342 *constraint_p = ggc_alloc_string (buf, c_len);
343 constraint = *constraint_p;
346 /* Loop through the constraint string. */
347 for (p = constraint + 1; *p; p += CONSTRAINT_LEN (*p, p))
348 switch (*p)
350 case '+':
351 case '=':
352 error ("operand constraint contains incorrectly positioned "
353 "%<+%> or %<=%>");
354 return false;
356 case '%':
357 if (operand_num + 1 == ninputs + noutputs)
359 error ("%<%%%> constraint used with last operand");
360 return false;
362 break;
364 case 'V': case TARGET_MEM_CONSTRAINT: case 'o':
365 *allows_mem = true;
366 break;
368 case '?': case '!': case '*': case '&': case '#':
369 case 'E': case 'F': case 'G': case 'H':
370 case 's': case 'i': case 'n':
371 case 'I': case 'J': case 'K': case 'L': case 'M':
372 case 'N': case 'O': case 'P': case ',':
373 break;
375 case '0': case '1': case '2': case '3': case '4':
376 case '5': case '6': case '7': case '8': case '9':
377 case '[':
378 error ("matching constraint not valid in output operand");
379 return false;
381 case '<': case '>':
382 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
383 excepting those that expand_call created. So match memory
384 and hope. */
385 *allows_mem = true;
386 break;
388 case 'g': case 'X':
389 *allows_reg = true;
390 *allows_mem = true;
391 break;
393 case 'p': case 'r':
394 *allows_reg = true;
395 break;
397 default:
398 if (!ISALPHA (*p))
399 break;
400 if (REG_CLASS_FROM_CONSTRAINT (*p, p) != NO_REGS)
401 *allows_reg = true;
402 #ifdef EXTRA_CONSTRAINT_STR
403 else if (EXTRA_ADDRESS_CONSTRAINT (*p, p))
404 *allows_reg = true;
405 else if (EXTRA_MEMORY_CONSTRAINT (*p, p))
406 *allows_mem = true;
407 else
409 /* Otherwise we can't assume anything about the nature of
410 the constraint except that it isn't purely registers.
411 Treat it like "g" and hope for the best. */
412 *allows_reg = true;
413 *allows_mem = true;
415 #endif
416 break;
419 return true;
422 /* Similar, but for input constraints. */
424 bool
425 parse_input_constraint (const char **constraint_p, int input_num,
426 int ninputs, int noutputs, int ninout,
427 const char * const * constraints,
428 bool *allows_mem, bool *allows_reg)
430 const char *constraint = *constraint_p;
431 const char *orig_constraint = constraint;
432 size_t c_len = strlen (constraint);
433 size_t j;
434 bool saw_match = false;
436 /* Assume the constraint doesn't allow the use of either
437 a register or memory. */
438 *allows_mem = false;
439 *allows_reg = false;
441 /* Make sure constraint has neither `=', `+', nor '&'. */
443 for (j = 0; j < c_len; j += CONSTRAINT_LEN (constraint[j], constraint+j))
444 switch (constraint[j])
446 case '+': case '=': case '&':
447 if (constraint == orig_constraint)
449 error ("input operand constraint contains %qc", constraint[j]);
450 return false;
452 break;
454 case '%':
455 if (constraint == orig_constraint
456 && input_num + 1 == ninputs - ninout)
458 error ("%<%%%> constraint used with last operand");
459 return false;
461 break;
463 case 'V': case TARGET_MEM_CONSTRAINT: case 'o':
464 *allows_mem = true;
465 break;
467 case '<': case '>':
468 case '?': case '!': case '*': case '#':
469 case 'E': case 'F': case 'G': case 'H':
470 case 's': case 'i': case 'n':
471 case 'I': case 'J': case 'K': case 'L': case 'M':
472 case 'N': case 'O': case 'P': case ',':
473 break;
475 /* Whether or not a numeric constraint allows a register is
476 decided by the matching constraint, and so there is no need
477 to do anything special with them. We must handle them in
478 the default case, so that we don't unnecessarily force
479 operands to memory. */
480 case '0': case '1': case '2': case '3': case '4':
481 case '5': case '6': case '7': case '8': case '9':
483 char *end;
484 unsigned long match;
486 saw_match = true;
488 match = strtoul (constraint + j, &end, 10);
489 if (match >= (unsigned long) noutputs)
491 error ("matching constraint references invalid operand number");
492 return false;
495 /* Try and find the real constraint for this dup. Only do this
496 if the matching constraint is the only alternative. */
497 if (*end == '\0'
498 && (j == 0 || (j == 1 && constraint[0] == '%')))
500 constraint = constraints[match];
501 *constraint_p = constraint;
502 c_len = strlen (constraint);
503 j = 0;
504 /* ??? At the end of the loop, we will skip the first part of
505 the matched constraint. This assumes not only that the
506 other constraint is an output constraint, but also that
507 the '=' or '+' come first. */
508 break;
510 else
511 j = end - constraint;
512 /* Anticipate increment at end of loop. */
513 j--;
515 /* Fall through. */
517 case 'p': case 'r':
518 *allows_reg = true;
519 break;
521 case 'g': case 'X':
522 *allows_reg = true;
523 *allows_mem = true;
524 break;
526 default:
527 if (! ISALPHA (constraint[j]))
529 error ("invalid punctuation %qc in constraint", constraint[j]);
530 return false;
532 if (REG_CLASS_FROM_CONSTRAINT (constraint[j], constraint + j)
533 != NO_REGS)
534 *allows_reg = true;
535 #ifdef EXTRA_CONSTRAINT_STR
536 else if (EXTRA_ADDRESS_CONSTRAINT (constraint[j], constraint + j))
537 *allows_reg = true;
538 else if (EXTRA_MEMORY_CONSTRAINT (constraint[j], constraint + j))
539 *allows_mem = true;
540 else
542 /* Otherwise we can't assume anything about the nature of
543 the constraint except that it isn't purely registers.
544 Treat it like "g" and hope for the best. */
545 *allows_reg = true;
546 *allows_mem = true;
548 #endif
549 break;
552 if (saw_match && !*allows_reg)
553 warning (0, "matching constraint does not allow a register");
555 return true;
558 /* Return DECL iff there's an overlap between *REGS and DECL, where DECL
559 can be an asm-declared register. Called via walk_tree. */
561 static tree
562 decl_overlaps_hard_reg_set_p (tree *declp, int *walk_subtrees ATTRIBUTE_UNUSED,
563 void *data)
565 tree decl = *declp;
566 const HARD_REG_SET *const regs = (const HARD_REG_SET *) data;
568 if (TREE_CODE (decl) == VAR_DECL)
570 if (DECL_HARD_REGISTER (decl)
571 && REG_P (DECL_RTL (decl))
572 && REGNO (DECL_RTL (decl)) < FIRST_PSEUDO_REGISTER)
574 rtx reg = DECL_RTL (decl);
576 if (overlaps_hard_reg_set_p (*regs, GET_MODE (reg), REGNO (reg)))
577 return decl;
579 walk_subtrees = 0;
581 else if (TYPE_P (decl) || TREE_CODE (decl) == PARM_DECL)
582 walk_subtrees = 0;
583 return NULL_TREE;
586 /* If there is an overlap between *REGS and DECL, return the first overlap
587 found. */
588 tree
589 tree_overlaps_hard_reg_set (tree decl, HARD_REG_SET *regs)
591 return walk_tree (&decl, decl_overlaps_hard_reg_set_p, regs, NULL);
594 /* Check for overlap between registers marked in CLOBBERED_REGS and
595 anything inappropriate in T. Emit error and return the register
596 variable definition for error, NULL_TREE for ok. */
598 static bool
599 tree_conflicts_with_clobbers_p (tree t, HARD_REG_SET *clobbered_regs)
601 /* Conflicts between asm-declared register variables and the clobber
602 list are not allowed. */
603 tree overlap = tree_overlaps_hard_reg_set (t, clobbered_regs);
605 if (overlap)
607 error ("asm-specifier for variable %qE conflicts with asm clobber list",
608 DECL_NAME (overlap));
610 /* Reset registerness to stop multiple errors emitted for a single
611 variable. */
612 DECL_REGISTER (overlap) = 0;
613 return true;
616 return false;
619 /* Generate RTL for an asm statement with arguments.
620 STRING is the instruction template.
621 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
622 Each output or input has an expression in the TREE_VALUE and
623 a tree list in TREE_PURPOSE which in turn contains a constraint
624 name in TREE_VALUE (or NULL_TREE) and a constraint string
625 in TREE_PURPOSE.
626 CLOBBERS is a list of STRING_CST nodes each naming a hard register
627 that is clobbered by this insn.
629 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
630 Some elements of OUTPUTS may be replaced with trees representing temporary
631 values. The caller should copy those temporary values to the originally
632 specified lvalues.
634 VOL nonzero means the insn is volatile; don't optimize it. */
636 static void
637 expand_asm_operands (tree string, tree outputs, tree inputs,
638 tree clobbers, tree labels, int vol, location_t locus)
640 rtvec argvec, constraintvec, labelvec;
641 rtx body;
642 int ninputs = list_length (inputs);
643 int noutputs = list_length (outputs);
644 int nlabels = list_length (labels);
645 int ninout;
646 int nclobbers;
647 HARD_REG_SET clobbered_regs;
648 int clobber_conflict_found = 0;
649 tree tail;
650 tree t;
651 int i;
652 /* Vector of RTX's of evaluated output operands. */
653 rtx *output_rtx = XALLOCAVEC (rtx, noutputs);
654 int *inout_opnum = XALLOCAVEC (int, noutputs);
655 rtx *real_output_rtx = XALLOCAVEC (rtx, noutputs);
656 enum machine_mode *inout_mode = XALLOCAVEC (enum machine_mode, noutputs);
657 const char **constraints = XALLOCAVEC (const char *, noutputs + ninputs);
658 int old_generating_concat_p = generating_concat_p;
660 /* An ASM with no outputs needs to be treated as volatile, for now. */
661 if (noutputs == 0)
662 vol = 1;
664 if (! check_operand_nalternatives (outputs, inputs))
665 return;
667 string = resolve_asm_operand_names (string, outputs, inputs, labels);
669 /* Collect constraints. */
670 i = 0;
671 for (t = outputs; t ; t = TREE_CHAIN (t), i++)
672 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
673 for (t = inputs; t ; t = TREE_CHAIN (t), i++)
674 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
676 /* Sometimes we wish to automatically clobber registers across an asm.
677 Case in point is when the i386 backend moved from cc0 to a hard reg --
678 maintaining source-level compatibility means automatically clobbering
679 the flags register. */
680 clobbers = targetm.md_asm_clobbers (outputs, inputs, clobbers);
682 /* Count the number of meaningful clobbered registers, ignoring what
683 we would ignore later. */
684 nclobbers = 0;
685 CLEAR_HARD_REG_SET (clobbered_regs);
686 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
688 const char *regname;
690 if (TREE_VALUE (tail) == error_mark_node)
691 return;
692 regname = TREE_STRING_POINTER (TREE_VALUE (tail));
694 i = decode_reg_name (regname);
695 if (i >= 0 || i == -4)
696 ++nclobbers;
697 else if (i == -2)
698 error ("unknown register name %qs in %<asm%>", regname);
700 /* Mark clobbered registers. */
701 if (i >= 0)
703 /* Clobbering the PIC register is an error. */
704 if (i == (int) PIC_OFFSET_TABLE_REGNUM)
706 error ("PIC register %qs clobbered in %<asm%>", regname);
707 return;
710 SET_HARD_REG_BIT (clobbered_regs, i);
714 /* First pass over inputs and outputs checks validity and sets
715 mark_addressable if needed. */
717 ninout = 0;
718 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
720 tree val = TREE_VALUE (tail);
721 tree type = TREE_TYPE (val);
722 const char *constraint;
723 bool is_inout;
724 bool allows_reg;
725 bool allows_mem;
727 /* If there's an erroneous arg, emit no insn. */
728 if (type == error_mark_node)
729 return;
731 /* Try to parse the output constraint. If that fails, there's
732 no point in going further. */
733 constraint = constraints[i];
734 if (!parse_output_constraint (&constraint, i, ninputs, noutputs,
735 &allows_mem, &allows_reg, &is_inout))
736 return;
738 if (! allows_reg
739 && (allows_mem
740 || is_inout
741 || (DECL_P (val)
742 && REG_P (DECL_RTL (val))
743 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type))))
744 mark_addressable (val);
746 if (is_inout)
747 ninout++;
750 ninputs += ninout;
751 if (ninputs + noutputs > MAX_RECOG_OPERANDS)
753 error ("more than %d operands in %<asm%>", MAX_RECOG_OPERANDS);
754 return;
757 for (i = 0, tail = inputs; tail; i++, tail = TREE_CHAIN (tail))
759 bool allows_reg, allows_mem;
760 const char *constraint;
762 /* If there's an erroneous arg, emit no insn, because the ASM_INPUT
763 would get VOIDmode and that could cause a crash in reload. */
764 if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
765 return;
767 constraint = constraints[i + noutputs];
768 if (! parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
769 constraints, &allows_mem, &allows_reg))
770 return;
772 if (! allows_reg && allows_mem)
773 mark_addressable (TREE_VALUE (tail));
776 /* Second pass evaluates arguments. */
778 ninout = 0;
779 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
781 tree val = TREE_VALUE (tail);
782 tree type = TREE_TYPE (val);
783 bool is_inout;
784 bool allows_reg;
785 bool allows_mem;
786 rtx op;
787 bool ok;
789 ok = parse_output_constraint (&constraints[i], i, ninputs,
790 noutputs, &allows_mem, &allows_reg,
791 &is_inout);
792 gcc_assert (ok);
794 /* If an output operand is not a decl or indirect ref and our constraint
795 allows a register, make a temporary to act as an intermediate.
796 Make the asm insn write into that, then our caller will copy it to
797 the real output operand. Likewise for promoted variables. */
799 generating_concat_p = 0;
801 real_output_rtx[i] = NULL_RTX;
802 if ((TREE_CODE (val) == INDIRECT_REF
803 && allows_mem)
804 || (DECL_P (val)
805 && (allows_mem || REG_P (DECL_RTL (val)))
806 && ! (REG_P (DECL_RTL (val))
807 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
808 || ! allows_reg
809 || is_inout)
811 op = expand_expr (val, NULL_RTX, VOIDmode, EXPAND_WRITE);
812 if (MEM_P (op))
813 op = validize_mem (op);
815 if (! allows_reg && !MEM_P (op))
816 error ("output number %d not directly addressable", i);
817 if ((! allows_mem && MEM_P (op))
818 || GET_CODE (op) == CONCAT)
820 real_output_rtx[i] = op;
821 op = gen_reg_rtx (GET_MODE (op));
822 if (is_inout)
823 emit_move_insn (op, real_output_rtx[i]);
826 else
828 op = assign_temp (type, 0, 0, 1);
829 op = validize_mem (op);
830 if (!MEM_P (op) && TREE_CODE (TREE_VALUE (tail)) == SSA_NAME)
831 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (TREE_VALUE (tail)), op);
832 TREE_VALUE (tail) = make_tree (type, op);
834 output_rtx[i] = op;
836 generating_concat_p = old_generating_concat_p;
838 if (is_inout)
840 inout_mode[ninout] = TYPE_MODE (type);
841 inout_opnum[ninout++] = i;
844 if (tree_conflicts_with_clobbers_p (val, &clobbered_regs))
845 clobber_conflict_found = 1;
848 /* Make vectors for the expression-rtx, constraint strings,
849 and named operands. */
851 argvec = rtvec_alloc (ninputs);
852 constraintvec = rtvec_alloc (ninputs);
853 labelvec = rtvec_alloc (nlabels);
855 body = gen_rtx_ASM_OPERANDS ((noutputs == 0 ? VOIDmode
856 : GET_MODE (output_rtx[0])),
857 ggc_strdup (TREE_STRING_POINTER (string)),
858 empty_string, 0, argvec, constraintvec,
859 labelvec, locus);
861 MEM_VOLATILE_P (body) = vol;
863 /* Eval the inputs and put them into ARGVEC.
864 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
866 for (i = 0, tail = inputs; tail; tail = TREE_CHAIN (tail), ++i)
868 bool allows_reg, allows_mem;
869 const char *constraint;
870 tree val, type;
871 rtx op;
872 bool ok;
874 constraint = constraints[i + noutputs];
875 ok = parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
876 constraints, &allows_mem, &allows_reg);
877 gcc_assert (ok);
879 generating_concat_p = 0;
881 val = TREE_VALUE (tail);
882 type = TREE_TYPE (val);
883 /* EXPAND_INITIALIZER will not generate code for valid initializer
884 constants, but will still generate code for other types of operand.
885 This is the behavior we want for constant constraints. */
886 op = expand_expr (val, NULL_RTX, VOIDmode,
887 allows_reg ? EXPAND_NORMAL
888 : allows_mem ? EXPAND_MEMORY
889 : EXPAND_INITIALIZER);
891 /* Never pass a CONCAT to an ASM. */
892 if (GET_CODE (op) == CONCAT)
893 op = force_reg (GET_MODE (op), op);
894 else if (MEM_P (op))
895 op = validize_mem (op);
897 if (asm_operand_ok (op, constraint, NULL) <= 0)
899 if (allows_reg && TYPE_MODE (type) != BLKmode)
900 op = force_reg (TYPE_MODE (type), op);
901 else if (!allows_mem)
902 warning (0, "asm operand %d probably doesn%'t match constraints",
903 i + noutputs);
904 else if (MEM_P (op))
906 /* We won't recognize either volatile memory or memory
907 with a queued address as available a memory_operand
908 at this point. Ignore it: clearly this *is* a memory. */
910 else
912 warning (0, "use of memory input without lvalue in "
913 "asm operand %d is deprecated", i + noutputs);
915 if (CONSTANT_P (op))
917 rtx mem = force_const_mem (TYPE_MODE (type), op);
918 if (mem)
919 op = validize_mem (mem);
920 else
921 op = force_reg (TYPE_MODE (type), op);
923 if (REG_P (op)
924 || GET_CODE (op) == SUBREG
925 || GET_CODE (op) == CONCAT)
927 tree qual_type = build_qualified_type (type,
928 (TYPE_QUALS (type)
929 | TYPE_QUAL_CONST));
930 rtx memloc = assign_temp (qual_type, 1, 1, 1);
931 memloc = validize_mem (memloc);
932 emit_move_insn (memloc, op);
933 op = memloc;
938 generating_concat_p = old_generating_concat_p;
939 ASM_OPERANDS_INPUT (body, i) = op;
941 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, i)
942 = gen_rtx_ASM_INPUT (TYPE_MODE (type),
943 ggc_strdup (constraints[i + noutputs]));
945 if (tree_conflicts_with_clobbers_p (val, &clobbered_regs))
946 clobber_conflict_found = 1;
949 /* Protect all the operands from the queue now that they have all been
950 evaluated. */
952 generating_concat_p = 0;
954 /* For in-out operands, copy output rtx to input rtx. */
955 for (i = 0; i < ninout; i++)
957 int j = inout_opnum[i];
958 char buffer[16];
960 ASM_OPERANDS_INPUT (body, ninputs - ninout + i)
961 = output_rtx[j];
963 sprintf (buffer, "%d", j);
964 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, ninputs - ninout + i)
965 = gen_rtx_ASM_INPUT (inout_mode[i], ggc_strdup (buffer));
968 /* Copy labels to the vector. */
969 for (i = 0, tail = labels; i < nlabels; ++i, tail = TREE_CHAIN (tail))
970 ASM_OPERANDS_LABEL (body, i)
971 = gen_rtx_LABEL_REF (Pmode, label_rtx (TREE_VALUE (tail)));
973 generating_concat_p = old_generating_concat_p;
975 /* Now, for each output, construct an rtx
976 (set OUTPUT (asm_operands INSN OUTPUTCONSTRAINT OUTPUTNUMBER
977 ARGVEC CONSTRAINTS OPNAMES))
978 If there is more than one, put them inside a PARALLEL. */
980 if (nlabels > 0 && nclobbers == 0)
982 gcc_assert (noutputs == 0);
983 emit_jump_insn (body);
985 else if (noutputs == 0 && nclobbers == 0)
987 /* No output operands: put in a raw ASM_OPERANDS rtx. */
988 emit_insn (body);
990 else if (noutputs == 1 && nclobbers == 0)
992 ASM_OPERANDS_OUTPUT_CONSTRAINT (body) = ggc_strdup (constraints[0]);
993 emit_insn (gen_rtx_SET (VOIDmode, output_rtx[0], body));
995 else
997 rtx obody = body;
998 int num = noutputs;
1000 if (num == 0)
1001 num = 1;
1003 body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
1005 /* For each output operand, store a SET. */
1006 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1008 XVECEXP (body, 0, i)
1009 = gen_rtx_SET (VOIDmode,
1010 output_rtx[i],
1011 gen_rtx_ASM_OPERANDS
1012 (GET_MODE (output_rtx[i]),
1013 ggc_strdup (TREE_STRING_POINTER (string)),
1014 ggc_strdup (constraints[i]),
1015 i, argvec, constraintvec, labelvec, locus));
1017 MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
1020 /* If there are no outputs (but there are some clobbers)
1021 store the bare ASM_OPERANDS into the PARALLEL. */
1023 if (i == 0)
1024 XVECEXP (body, 0, i++) = obody;
1026 /* Store (clobber REG) for each clobbered register specified. */
1028 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1030 const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1031 int j = decode_reg_name (regname);
1032 rtx clobbered_reg;
1034 if (j < 0)
1036 if (j == -3) /* `cc', which is not a register */
1037 continue;
1039 if (j == -4) /* `memory', don't cache memory across asm */
1041 XVECEXP (body, 0, i++)
1042 = gen_rtx_CLOBBER (VOIDmode,
1043 gen_rtx_MEM
1044 (BLKmode,
1045 gen_rtx_SCRATCH (VOIDmode)));
1046 continue;
1049 /* Ignore unknown register, error already signaled. */
1050 continue;
1053 /* Use QImode since that's guaranteed to clobber just one reg. */
1054 clobbered_reg = gen_rtx_REG (QImode, j);
1056 /* Do sanity check for overlap between clobbers and respectively
1057 input and outputs that hasn't been handled. Such overlap
1058 should have been detected and reported above. */
1059 if (!clobber_conflict_found)
1061 int opno;
1063 /* We test the old body (obody) contents to avoid tripping
1064 over the under-construction body. */
1065 for (opno = 0; opno < noutputs; opno++)
1066 if (reg_overlap_mentioned_p (clobbered_reg, output_rtx[opno]))
1067 internal_error ("asm clobber conflict with output operand");
1069 for (opno = 0; opno < ninputs - ninout; opno++)
1070 if (reg_overlap_mentioned_p (clobbered_reg,
1071 ASM_OPERANDS_INPUT (obody, opno)))
1072 internal_error ("asm clobber conflict with input operand");
1075 XVECEXP (body, 0, i++)
1076 = gen_rtx_CLOBBER (VOIDmode, clobbered_reg);
1079 if (nlabels > 0)
1080 emit_jump_insn (body);
1081 else
1082 emit_insn (body);
1085 /* For any outputs that needed reloading into registers, spill them
1086 back to where they belong. */
1087 for (i = 0; i < noutputs; ++i)
1088 if (real_output_rtx[i])
1089 emit_move_insn (real_output_rtx[i], output_rtx[i]);
1091 crtl->has_asm_statement = 1;
1092 free_temp_slots ();
1095 void
1096 expand_asm_stmt (gimple stmt)
1098 int noutputs;
1099 tree outputs, tail, t;
1100 tree *o;
1101 size_t i, n;
1102 const char *s;
1103 tree str, out, in, cl, labels;
1104 location_t locus = gimple_location (stmt);
1106 /* Meh... convert the gimple asm operands into real tree lists.
1107 Eventually we should make all routines work on the vectors instead
1108 of relying on TREE_CHAIN. */
1109 out = NULL_TREE;
1110 n = gimple_asm_noutputs (stmt);
1111 if (n > 0)
1113 t = out = gimple_asm_output_op (stmt, 0);
1114 for (i = 1; i < n; i++)
1115 t = TREE_CHAIN (t) = gimple_asm_output_op (stmt, i);
1118 in = NULL_TREE;
1119 n = gimple_asm_ninputs (stmt);
1120 if (n > 0)
1122 t = in = gimple_asm_input_op (stmt, 0);
1123 for (i = 1; i < n; i++)
1124 t = TREE_CHAIN (t) = gimple_asm_input_op (stmt, i);
1127 cl = NULL_TREE;
1128 n = gimple_asm_nclobbers (stmt);
1129 if (n > 0)
1131 t = cl = gimple_asm_clobber_op (stmt, 0);
1132 for (i = 1; i < n; i++)
1133 t = TREE_CHAIN (t) = gimple_asm_clobber_op (stmt, i);
1136 labels = NULL_TREE;
1137 n = gimple_asm_nlabels (stmt);
1138 if (n > 0)
1140 t = labels = gimple_asm_label_op (stmt, 0);
1141 for (i = 1; i < n; i++)
1142 t = TREE_CHAIN (t) = gimple_asm_label_op (stmt, i);
1145 s = gimple_asm_string (stmt);
1146 str = build_string (strlen (s), s);
1148 if (gimple_asm_input_p (stmt))
1150 expand_asm_loc (str, gimple_asm_volatile_p (stmt), locus);
1151 return;
1154 outputs = out;
1155 noutputs = gimple_asm_noutputs (stmt);
1156 /* o[I] is the place that output number I should be written. */
1157 o = (tree *) alloca (noutputs * sizeof (tree));
1159 /* Record the contents of OUTPUTS before it is modified. */
1160 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1161 o[i] = TREE_VALUE (tail);
1163 /* Generate the ASM_OPERANDS insn; store into the TREE_VALUEs of
1164 OUTPUTS some trees for where the values were actually stored. */
1165 expand_asm_operands (str, outputs, in, cl, labels,
1166 gimple_asm_volatile_p (stmt), locus);
1168 /* Copy all the intermediate outputs into the specified outputs. */
1169 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1171 if (o[i] != TREE_VALUE (tail))
1173 expand_assignment (o[i], TREE_VALUE (tail), false);
1174 free_temp_slots ();
1176 /* Restore the original value so that it's correct the next
1177 time we expand this function. */
1178 TREE_VALUE (tail) = o[i];
1183 /* A subroutine of expand_asm_operands. Check that all operands have
1184 the same number of alternatives. Return true if so. */
1186 static bool
1187 check_operand_nalternatives (tree outputs, tree inputs)
1189 if (outputs || inputs)
1191 tree tmp = TREE_PURPOSE (outputs ? outputs : inputs);
1192 int nalternatives
1193 = n_occurrences (',', TREE_STRING_POINTER (TREE_VALUE (tmp)));
1194 tree next = inputs;
1196 if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
1198 error ("too many alternatives in %<asm%>");
1199 return false;
1202 tmp = outputs;
1203 while (tmp)
1205 const char *constraint
1206 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (tmp)));
1208 if (n_occurrences (',', constraint) != nalternatives)
1210 error ("operand constraints for %<asm%> differ "
1211 "in number of alternatives");
1212 return false;
1215 if (TREE_CHAIN (tmp))
1216 tmp = TREE_CHAIN (tmp);
1217 else
1218 tmp = next, next = 0;
1222 return true;
1225 /* A subroutine of expand_asm_operands. Check that all operand names
1226 are unique. Return true if so. We rely on the fact that these names
1227 are identifiers, and so have been canonicalized by get_identifier,
1228 so all we need are pointer comparisons. */
1230 static bool
1231 check_unique_operand_names (tree outputs, tree inputs, tree labels)
1233 tree i, j;
1235 for (i = outputs; i ; i = TREE_CHAIN (i))
1237 tree i_name = TREE_PURPOSE (TREE_PURPOSE (i));
1238 if (! i_name)
1239 continue;
1241 for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
1242 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1243 goto failure;
1246 for (i = inputs; i ; i = TREE_CHAIN (i))
1248 tree i_name = TREE_PURPOSE (TREE_PURPOSE (i));
1249 if (! i_name)
1250 continue;
1252 for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
1253 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1254 goto failure;
1255 for (j = outputs; j ; j = TREE_CHAIN (j))
1256 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1257 goto failure;
1260 for (i = labels; i ; i = TREE_CHAIN (i))
1262 tree i_name = TREE_PURPOSE (i);
1263 if (! i_name)
1264 continue;
1266 for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
1267 if (simple_cst_equal (i_name, TREE_PURPOSE (j)))
1268 goto failure;
1269 for (j = inputs; j ; j = TREE_CHAIN (j))
1270 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1271 goto failure;
1274 return true;
1276 failure:
1277 error ("duplicate asm operand name %qs",
1278 TREE_STRING_POINTER (TREE_PURPOSE (TREE_PURPOSE (i))));
1279 return false;
1282 /* A subroutine of expand_asm_operands. Resolve the names of the operands
1283 in *POUTPUTS and *PINPUTS to numbers, and replace the name expansions in
1284 STRING and in the constraints to those numbers. */
1286 tree
1287 resolve_asm_operand_names (tree string, tree outputs, tree inputs, tree labels)
1289 char *buffer;
1290 char *p;
1291 const char *c;
1292 tree t;
1294 check_unique_operand_names (outputs, inputs, labels);
1296 /* Substitute [<name>] in input constraint strings. There should be no
1297 named operands in output constraints. */
1298 for (t = inputs; t ; t = TREE_CHAIN (t))
1300 c = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
1301 if (strchr (c, '[') != NULL)
1303 p = buffer = xstrdup (c);
1304 while ((p = strchr (p, '[')) != NULL)
1305 p = resolve_operand_name_1 (p, outputs, inputs, NULL);
1306 TREE_VALUE (TREE_PURPOSE (t))
1307 = build_string (strlen (buffer), buffer);
1308 free (buffer);
1312 /* Now check for any needed substitutions in the template. */
1313 c = TREE_STRING_POINTER (string);
1314 while ((c = strchr (c, '%')) != NULL)
1316 if (c[1] == '[')
1317 break;
1318 else if (ISALPHA (c[1]) && c[2] == '[')
1319 break;
1320 else
1322 c += 1 + (c[1] == '%');
1323 continue;
1327 if (c)
1329 /* OK, we need to make a copy so we can perform the substitutions.
1330 Assume that we will not need extra space--we get to remove '['
1331 and ']', which means we cannot have a problem until we have more
1332 than 999 operands. */
1333 buffer = xstrdup (TREE_STRING_POINTER (string));
1334 p = buffer + (c - TREE_STRING_POINTER (string));
1336 while ((p = strchr (p, '%')) != NULL)
1338 if (p[1] == '[')
1339 p += 1;
1340 else if (ISALPHA (p[1]) && p[2] == '[')
1341 p += 2;
1342 else
1344 p += 1 + (p[1] == '%');
1345 continue;
1348 p = resolve_operand_name_1 (p, outputs, inputs, labels);
1351 string = build_string (strlen (buffer), buffer);
1352 free (buffer);
1355 return string;
1358 /* A subroutine of resolve_operand_names. P points to the '[' for a
1359 potential named operand of the form [<name>]. In place, replace
1360 the name and brackets with a number. Return a pointer to the
1361 balance of the string after substitution. */
1363 static char *
1364 resolve_operand_name_1 (char *p, tree outputs, tree inputs, tree labels)
1366 char *q;
1367 int op;
1368 tree t;
1370 /* Collect the operand name. */
1371 q = strchr (++p, ']');
1372 if (!q)
1374 error ("missing close brace for named operand");
1375 return strchr (p, '\0');
1377 *q = '\0';
1379 /* Resolve the name to a number. */
1380 for (op = 0, t = outputs; t ; t = TREE_CHAIN (t), op++)
1382 tree name = TREE_PURPOSE (TREE_PURPOSE (t));
1383 if (name && strcmp (TREE_STRING_POINTER (name), p) == 0)
1384 goto found;
1386 for (t = inputs; t ; t = TREE_CHAIN (t), op++)
1388 tree name = TREE_PURPOSE (TREE_PURPOSE (t));
1389 if (name && strcmp (TREE_STRING_POINTER (name), p) == 0)
1390 goto found;
1392 for (t = labels; t ; t = TREE_CHAIN (t), op++)
1394 tree name = TREE_PURPOSE (t);
1395 if (name && strcmp (TREE_STRING_POINTER (name), p) == 0)
1396 goto found;
1399 error ("undefined named operand %qs", identifier_to_locale (p));
1400 op = 0;
1402 found:
1403 /* Replace the name with the number. Unfortunately, not all libraries
1404 get the return value of sprintf correct, so search for the end of the
1405 generated string by hand. */
1406 sprintf (--p, "%d", op);
1407 p = strchr (p, '\0');
1409 /* Verify the no extra buffer space assumption. */
1410 gcc_assert (p <= q);
1412 /* Shift the rest of the buffer down to fill the gap. */
1413 memmove (p, q + 1, strlen (q + 1) + 1);
1415 return p;
1418 /* Generate RTL to evaluate the expression EXP. */
1420 void
1421 expand_expr_stmt (tree exp)
1423 rtx value;
1424 tree type;
1426 value = expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
1427 type = TREE_TYPE (exp);
1429 /* If all we do is reference a volatile value in memory,
1430 copy it to a register to be sure it is actually touched. */
1431 if (value && MEM_P (value) && TREE_THIS_VOLATILE (exp))
1433 if (TYPE_MODE (type) == VOIDmode)
1435 else if (TYPE_MODE (type) != BLKmode)
1436 value = copy_to_reg (value);
1437 else
1439 rtx lab = gen_label_rtx ();
1441 /* Compare the value with itself to reference it. */
1442 emit_cmp_and_jump_insns (value, value, EQ,
1443 expand_normal (TYPE_SIZE (type)),
1444 BLKmode, 0, lab);
1445 emit_label (lab);
1449 /* Free any temporaries used to evaluate this expression. */
1450 free_temp_slots ();
1453 /* Warn if EXP contains any computations whose results are not used.
1454 Return 1 if a warning is printed; 0 otherwise. LOCUS is the
1455 (potential) location of the expression. */
1458 warn_if_unused_value (const_tree exp, location_t locus)
1460 restart:
1461 if (TREE_USED (exp) || TREE_NO_WARNING (exp))
1462 return 0;
1464 /* Don't warn about void constructs. This includes casting to void,
1465 void function calls, and statement expressions with a final cast
1466 to void. */
1467 if (VOID_TYPE_P (TREE_TYPE (exp)))
1468 return 0;
1470 if (EXPR_HAS_LOCATION (exp))
1471 locus = EXPR_LOCATION (exp);
1473 switch (TREE_CODE (exp))
1475 case PREINCREMENT_EXPR:
1476 case POSTINCREMENT_EXPR:
1477 case PREDECREMENT_EXPR:
1478 case POSTDECREMENT_EXPR:
1479 case MODIFY_EXPR:
1480 case INIT_EXPR:
1481 case TARGET_EXPR:
1482 case CALL_EXPR:
1483 case TRY_CATCH_EXPR:
1484 case WITH_CLEANUP_EXPR:
1485 case EXIT_EXPR:
1486 case VA_ARG_EXPR:
1487 return 0;
1489 case BIND_EXPR:
1490 /* For a binding, warn if no side effect within it. */
1491 exp = BIND_EXPR_BODY (exp);
1492 goto restart;
1494 case SAVE_EXPR:
1495 case NON_LVALUE_EXPR:
1496 exp = TREE_OPERAND (exp, 0);
1497 goto restart;
1499 case TRUTH_ORIF_EXPR:
1500 case TRUTH_ANDIF_EXPR:
1501 /* In && or ||, warn if 2nd operand has no side effect. */
1502 exp = TREE_OPERAND (exp, 1);
1503 goto restart;
1505 case COMPOUND_EXPR:
1506 if (warn_if_unused_value (TREE_OPERAND (exp, 0), locus))
1507 return 1;
1508 /* Let people do `(foo (), 0)' without a warning. */
1509 if (TREE_CONSTANT (TREE_OPERAND (exp, 1)))
1510 return 0;
1511 exp = TREE_OPERAND (exp, 1);
1512 goto restart;
1514 case COND_EXPR:
1515 /* If this is an expression with side effects, don't warn; this
1516 case commonly appears in macro expansions. */
1517 if (TREE_SIDE_EFFECTS (exp))
1518 return 0;
1519 goto warn;
1521 case INDIRECT_REF:
1522 /* Don't warn about automatic dereferencing of references, since
1523 the user cannot control it. */
1524 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == REFERENCE_TYPE)
1526 exp = TREE_OPERAND (exp, 0);
1527 goto restart;
1529 /* Fall through. */
1531 default:
1532 /* Referencing a volatile value is a side effect, so don't warn. */
1533 if ((DECL_P (exp) || REFERENCE_CLASS_P (exp))
1534 && TREE_THIS_VOLATILE (exp))
1535 return 0;
1537 /* If this is an expression which has no operands, there is no value
1538 to be unused. There are no such language-independent codes,
1539 but front ends may define such. */
1540 if (EXPRESSION_CLASS_P (exp) && TREE_OPERAND_LENGTH (exp) == 0)
1541 return 0;
1543 warn:
1544 warning_at (locus, OPT_Wunused_value, "value computed is not used");
1545 return 1;
1550 /* Generate RTL to return from the current function, with no value.
1551 (That is, we do not do anything about returning any value.) */
1553 void
1554 expand_null_return (void)
1556 /* If this function was declared to return a value, but we
1557 didn't, clobber the return registers so that they are not
1558 propagated live to the rest of the function. */
1559 clobber_return_register ();
1561 expand_null_return_1 ();
1564 /* Generate RTL to return directly from the current function.
1565 (That is, we bypass any return value.) */
1567 void
1568 expand_naked_return (void)
1570 rtx end_label;
1572 clear_pending_stack_adjust ();
1573 do_pending_stack_adjust ();
1575 end_label = naked_return_label;
1576 if (end_label == 0)
1577 end_label = naked_return_label = gen_label_rtx ();
1579 emit_jump (end_label);
1582 /* Generate RTL to return from the current function, with value VAL. */
1584 static void
1585 expand_value_return (rtx val)
1587 /* Copy the value to the return location unless it's already there. */
1589 tree decl = DECL_RESULT (current_function_decl);
1590 rtx return_reg = DECL_RTL (decl);
1591 if (return_reg != val)
1593 tree funtype = TREE_TYPE (current_function_decl);
1594 tree type = TREE_TYPE (decl);
1595 int unsignedp = TYPE_UNSIGNED (type);
1596 enum machine_mode old_mode = DECL_MODE (decl);
1597 enum machine_mode mode = promote_function_mode (type, old_mode,
1598 &unsignedp, funtype, 1);
1600 if (mode != old_mode)
1601 val = convert_modes (mode, old_mode, val, unsignedp);
1603 if (GET_CODE (return_reg) == PARALLEL)
1604 emit_group_load (return_reg, val, type, int_size_in_bytes (type));
1605 else
1606 emit_move_insn (return_reg, val);
1609 expand_null_return_1 ();
1612 /* Output a return with no value. */
1614 static void
1615 expand_null_return_1 (void)
1617 clear_pending_stack_adjust ();
1618 do_pending_stack_adjust ();
1619 emit_jump (return_label);
1622 /* Generate RTL to evaluate the expression RETVAL and return it
1623 from the current function. */
1625 void
1626 expand_return (tree retval)
1628 rtx result_rtl;
1629 rtx val = 0;
1630 tree retval_rhs;
1632 /* If function wants no value, give it none. */
1633 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
1635 expand_normal (retval);
1636 expand_null_return ();
1637 return;
1640 if (retval == error_mark_node)
1642 /* Treat this like a return of no value from a function that
1643 returns a value. */
1644 expand_null_return ();
1645 return;
1647 else if ((TREE_CODE (retval) == MODIFY_EXPR
1648 || TREE_CODE (retval) == INIT_EXPR)
1649 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
1650 retval_rhs = TREE_OPERAND (retval, 1);
1651 else
1652 retval_rhs = retval;
1654 result_rtl = DECL_RTL (DECL_RESULT (current_function_decl));
1656 /* If we are returning the RESULT_DECL, then the value has already
1657 been stored into it, so we don't have to do anything special. */
1658 if (TREE_CODE (retval_rhs) == RESULT_DECL)
1659 expand_value_return (result_rtl);
1661 /* If the result is an aggregate that is being returned in one (or more)
1662 registers, load the registers here. The compiler currently can't handle
1663 copying a BLKmode value into registers. We could put this code in a
1664 more general area (for use by everyone instead of just function
1665 call/return), but until this feature is generally usable it is kept here
1666 (and in expand_call). */
1668 else if (retval_rhs != 0
1669 && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
1670 && REG_P (result_rtl))
1672 int i;
1673 unsigned HOST_WIDE_INT bitpos, xbitpos;
1674 unsigned HOST_WIDE_INT padding_correction = 0;
1675 unsigned HOST_WIDE_INT bytes
1676 = int_size_in_bytes (TREE_TYPE (retval_rhs));
1677 int n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1678 unsigned int bitsize
1679 = MIN (TYPE_ALIGN (TREE_TYPE (retval_rhs)), BITS_PER_WORD);
1680 rtx *result_pseudos = XALLOCAVEC (rtx, n_regs);
1681 rtx result_reg, src = NULL_RTX, dst = NULL_RTX;
1682 rtx result_val = expand_normal (retval_rhs);
1683 enum machine_mode tmpmode, result_reg_mode;
1685 if (bytes == 0)
1687 expand_null_return ();
1688 return;
1691 /* If the structure doesn't take up a whole number of words, see
1692 whether the register value should be padded on the left or on
1693 the right. Set PADDING_CORRECTION to the number of padding
1694 bits needed on the left side.
1696 In most ABIs, the structure will be returned at the least end of
1697 the register, which translates to right padding on little-endian
1698 targets and left padding on big-endian targets. The opposite
1699 holds if the structure is returned at the most significant
1700 end of the register. */
1701 if (bytes % UNITS_PER_WORD != 0
1702 && (targetm.calls.return_in_msb (TREE_TYPE (retval_rhs))
1703 ? !BYTES_BIG_ENDIAN
1704 : BYTES_BIG_ENDIAN))
1705 padding_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
1706 * BITS_PER_UNIT));
1708 /* Copy the structure BITSIZE bits at a time. */
1709 for (bitpos = 0, xbitpos = padding_correction;
1710 bitpos < bytes * BITS_PER_UNIT;
1711 bitpos += bitsize, xbitpos += bitsize)
1713 /* We need a new destination pseudo each time xbitpos is
1714 on a word boundary and when xbitpos == padding_correction
1715 (the first time through). */
1716 if (xbitpos % BITS_PER_WORD == 0
1717 || xbitpos == padding_correction)
1719 /* Generate an appropriate register. */
1720 dst = gen_reg_rtx (word_mode);
1721 result_pseudos[xbitpos / BITS_PER_WORD] = dst;
1723 /* Clear the destination before we move anything into it. */
1724 emit_move_insn (dst, CONST0_RTX (GET_MODE (dst)));
1727 /* We need a new source operand each time bitpos is on a word
1728 boundary. */
1729 if (bitpos % BITS_PER_WORD == 0)
1730 src = operand_subword_force (result_val,
1731 bitpos / BITS_PER_WORD,
1732 BLKmode);
1734 /* Use bitpos for the source extraction (left justified) and
1735 xbitpos for the destination store (right justified). */
1736 store_bit_field (dst, bitsize, xbitpos % BITS_PER_WORD, word_mode,
1737 extract_bit_field (src, bitsize,
1738 bitpos % BITS_PER_WORD, 1,
1739 NULL_RTX, word_mode, word_mode));
1742 tmpmode = GET_MODE (result_rtl);
1743 if (tmpmode == BLKmode)
1745 /* Find the smallest integer mode large enough to hold the
1746 entire structure and use that mode instead of BLKmode
1747 on the USE insn for the return register. */
1748 for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1749 tmpmode != VOIDmode;
1750 tmpmode = GET_MODE_WIDER_MODE (tmpmode))
1751 /* Have we found a large enough mode? */
1752 if (GET_MODE_SIZE (tmpmode) >= bytes)
1753 break;
1755 /* A suitable mode should have been found. */
1756 gcc_assert (tmpmode != VOIDmode);
1758 PUT_MODE (result_rtl, tmpmode);
1761 if (GET_MODE_SIZE (tmpmode) < GET_MODE_SIZE (word_mode))
1762 result_reg_mode = word_mode;
1763 else
1764 result_reg_mode = tmpmode;
1765 result_reg = gen_reg_rtx (result_reg_mode);
1767 for (i = 0; i < n_regs; i++)
1768 emit_move_insn (operand_subword (result_reg, i, 0, result_reg_mode),
1769 result_pseudos[i]);
1771 if (tmpmode != result_reg_mode)
1772 result_reg = gen_lowpart (tmpmode, result_reg);
1774 expand_value_return (result_reg);
1776 else if (retval_rhs != 0
1777 && !VOID_TYPE_P (TREE_TYPE (retval_rhs))
1778 && (REG_P (result_rtl)
1779 || (GET_CODE (result_rtl) == PARALLEL)))
1781 /* Calculate the return value into a temporary (usually a pseudo
1782 reg). */
1783 tree ot = TREE_TYPE (DECL_RESULT (current_function_decl));
1784 tree nt = build_qualified_type (ot, TYPE_QUALS (ot) | TYPE_QUAL_CONST);
1786 val = assign_temp (nt, 0, 0, 1);
1787 val = expand_expr (retval_rhs, val, GET_MODE (val), EXPAND_NORMAL);
1788 val = force_not_mem (val);
1789 /* Return the calculated value. */
1790 expand_value_return (val);
1792 else
1794 /* No hard reg used; calculate value into hard return reg. */
1795 expand_expr (retval, const0_rtx, VOIDmode, EXPAND_NORMAL);
1796 expand_value_return (result_rtl);
1800 /* Emit code to restore vital registers at the beginning of a nonlocal goto
1801 handler. */
1802 static void
1803 expand_nl_goto_receiver (void)
1805 rtx chain;
1807 /* Clobber the FP when we get here, so we have to make sure it's
1808 marked as used by this function. */
1809 emit_use (hard_frame_pointer_rtx);
1811 /* Mark the static chain as clobbered here so life information
1812 doesn't get messed up for it. */
1813 chain = targetm.calls.static_chain (current_function_decl, true);
1814 if (chain && REG_P (chain))
1815 emit_clobber (chain);
1817 #ifdef HAVE_nonlocal_goto
1818 if (! HAVE_nonlocal_goto)
1819 #endif
1820 /* First adjust our frame pointer to its actual value. It was
1821 previously set to the start of the virtual area corresponding to
1822 the stacked variables when we branched here and now needs to be
1823 adjusted to the actual hardware fp value.
1825 Assignments are to virtual registers are converted by
1826 instantiate_virtual_regs into the corresponding assignment
1827 to the underlying register (fp in this case) that makes
1828 the original assignment true.
1829 So the following insn will actually be
1830 decrementing fp by STARTING_FRAME_OFFSET. */
1831 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
1833 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
1834 if (fixed_regs[ARG_POINTER_REGNUM])
1836 #ifdef ELIMINABLE_REGS
1837 /* If the argument pointer can be eliminated in favor of the
1838 frame pointer, we don't need to restore it. We assume here
1839 that if such an elimination is present, it can always be used.
1840 This is the case on all known machines; if we don't make this
1841 assumption, we do unnecessary saving on many machines. */
1842 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
1843 size_t i;
1845 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
1846 if (elim_regs[i].from == ARG_POINTER_REGNUM
1847 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
1848 break;
1850 if (i == ARRAY_SIZE (elim_regs))
1851 #endif
1853 /* Now restore our arg pointer from the address at which it
1854 was saved in our stack frame. */
1855 emit_move_insn (crtl->args.internal_arg_pointer,
1856 copy_to_reg (get_arg_pointer_save_area ()));
1859 #endif
1861 #ifdef HAVE_nonlocal_goto_receiver
1862 if (HAVE_nonlocal_goto_receiver)
1863 emit_insn (gen_nonlocal_goto_receiver ());
1864 #endif
1866 /* We must not allow the code we just generated to be reordered by
1867 scheduling. Specifically, the update of the frame pointer must
1868 happen immediately, not later. */
1869 emit_insn (gen_blockage ());
1872 /* Generate RTL for the automatic variable declaration DECL.
1873 (Other kinds of declarations are simply ignored if seen here.) */
1875 void
1876 expand_decl (tree decl)
1878 tree type;
1880 type = TREE_TYPE (decl);
1882 /* For a CONST_DECL, set mode, alignment, and sizes from those of the
1883 type in case this node is used in a reference. */
1884 if (TREE_CODE (decl) == CONST_DECL)
1886 DECL_MODE (decl) = TYPE_MODE (type);
1887 DECL_ALIGN (decl) = TYPE_ALIGN (type);
1888 DECL_SIZE (decl) = TYPE_SIZE (type);
1889 DECL_SIZE_UNIT (decl) = TYPE_SIZE_UNIT (type);
1890 return;
1893 /* Otherwise, only automatic variables need any expansion done. Static and
1894 external variables, and external functions, will be handled by
1895 `assemble_variable' (called from finish_decl). TYPE_DECL requires
1896 nothing. PARM_DECLs are handled in `assign_parms'. */
1897 if (TREE_CODE (decl) != VAR_DECL)
1898 return;
1900 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
1901 return;
1903 /* Create the RTL representation for the variable. */
1905 if (type == error_mark_node)
1906 SET_DECL_RTL (decl, gen_rtx_MEM (BLKmode, const0_rtx));
1908 else if (DECL_SIZE (decl) == 0)
1910 /* Variable with incomplete type. */
1911 rtx x;
1912 if (DECL_INITIAL (decl) == 0)
1913 /* Error message was already done; now avoid a crash. */
1914 x = gen_rtx_MEM (BLKmode, const0_rtx);
1915 else
1916 /* An initializer is going to decide the size of this array.
1917 Until we know the size, represent its address with a reg. */
1918 x = gen_rtx_MEM (BLKmode, gen_reg_rtx (Pmode));
1920 set_mem_attributes (x, decl, 1);
1921 SET_DECL_RTL (decl, x);
1923 else if (use_register_for_decl (decl))
1925 /* Automatic variable that can go in a register. */
1926 enum machine_mode reg_mode = promote_decl_mode (decl, NULL);
1928 SET_DECL_RTL (decl, gen_reg_rtx (reg_mode));
1930 /* Note if the object is a user variable. */
1931 if (!DECL_ARTIFICIAL (decl))
1932 mark_user_reg (DECL_RTL (decl));
1934 if (POINTER_TYPE_P (type))
1935 mark_reg_pointer (DECL_RTL (decl),
1936 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (decl))));
1939 else
1941 rtx oldaddr = 0;
1942 rtx addr;
1943 rtx x;
1945 /* Variable-sized decls are dealt with in the gimplifier. */
1946 gcc_assert (TREE_CODE (DECL_SIZE_UNIT (decl)) == INTEGER_CST);
1948 /* If we previously made RTL for this decl, it must be an array
1949 whose size was determined by the initializer.
1950 The old address was a register; set that register now
1951 to the proper address. */
1952 if (DECL_RTL_SET_P (decl))
1954 gcc_assert (MEM_P (DECL_RTL (decl)));
1955 gcc_assert (REG_P (XEXP (DECL_RTL (decl), 0)));
1956 oldaddr = XEXP (DECL_RTL (decl), 0);
1959 /* Set alignment we actually gave this decl. */
1960 DECL_ALIGN (decl) = (DECL_MODE (decl) == BLKmode ? BIGGEST_ALIGNMENT
1961 : GET_MODE_BITSIZE (DECL_MODE (decl)));
1962 DECL_USER_ALIGN (decl) = 0;
1964 x = assign_temp (decl, 1, 1, 1);
1965 set_mem_attributes (x, decl, 1);
1966 SET_DECL_RTL (decl, x);
1968 if (oldaddr)
1970 addr = force_operand (XEXP (DECL_RTL (decl), 0), oldaddr);
1971 if (addr != oldaddr)
1972 emit_move_insn (oldaddr, addr);
1977 /* Emit code to save the current value of stack. */
1979 expand_stack_save (void)
1981 rtx ret = NULL_RTX;
1983 do_pending_stack_adjust ();
1984 emit_stack_save (SAVE_BLOCK, &ret, NULL_RTX);
1985 return ret;
1988 /* Emit code to restore the current value of stack. */
1989 void
1990 expand_stack_restore (tree var)
1992 rtx sa = expand_normal (var);
1994 sa = convert_memory_address (Pmode, sa);
1995 emit_stack_restore (SAVE_BLOCK, sa, NULL_RTX);
1998 /* Do the insertion of a case label into case_list. The labels are
1999 fed to us in descending order from the sorted vector of case labels used
2000 in the tree part of the middle end. So the list we construct is
2001 sorted in ascending order. The bounds on the case range, LOW and HIGH,
2002 are converted to case's index type TYPE. */
2004 static struct case_node *
2005 add_case_node (struct case_node *head, tree type, tree low, tree high,
2006 tree label, alloc_pool case_node_pool)
2008 tree min_value, max_value;
2009 struct case_node *r;
2011 gcc_assert (TREE_CODE (low) == INTEGER_CST);
2012 gcc_assert (!high || TREE_CODE (high) == INTEGER_CST);
2014 min_value = TYPE_MIN_VALUE (type);
2015 max_value = TYPE_MAX_VALUE (type);
2017 /* If there's no HIGH value, then this is not a case range; it's
2018 just a simple case label. But that's just a degenerate case
2019 range.
2020 If the bounds are equal, turn this into the one-value case. */
2021 if (!high || tree_int_cst_equal (low, high))
2023 /* If the simple case value is unreachable, ignore it. */
2024 if ((TREE_CODE (min_value) == INTEGER_CST
2025 && tree_int_cst_compare (low, min_value) < 0)
2026 || (TREE_CODE (max_value) == INTEGER_CST
2027 && tree_int_cst_compare (low, max_value) > 0))
2028 return head;
2029 low = fold_convert (type, low);
2030 high = low;
2032 else
2034 /* If the entire case range is unreachable, ignore it. */
2035 if ((TREE_CODE (min_value) == INTEGER_CST
2036 && tree_int_cst_compare (high, min_value) < 0)
2037 || (TREE_CODE (max_value) == INTEGER_CST
2038 && tree_int_cst_compare (low, max_value) > 0))
2039 return head;
2041 /* If the lower bound is less than the index type's minimum
2042 value, truncate the range bounds. */
2043 if (TREE_CODE (min_value) == INTEGER_CST
2044 && tree_int_cst_compare (low, min_value) < 0)
2045 low = min_value;
2046 low = fold_convert (type, low);
2048 /* If the upper bound is greater than the index type's maximum
2049 value, truncate the range bounds. */
2050 if (TREE_CODE (max_value) == INTEGER_CST
2051 && tree_int_cst_compare (high, max_value) > 0)
2052 high = max_value;
2053 high = fold_convert (type, high);
2057 /* Add this label to the chain. Make sure to drop overflow flags. */
2058 r = (struct case_node *) pool_alloc (case_node_pool);
2059 r->low = build_int_cst_wide (TREE_TYPE (low), TREE_INT_CST_LOW (low),
2060 TREE_INT_CST_HIGH (low));
2061 r->high = build_int_cst_wide (TREE_TYPE (high), TREE_INT_CST_LOW (high),
2062 TREE_INT_CST_HIGH (high));
2063 r->code_label = label;
2064 r->parent = r->left = NULL;
2065 r->right = head;
2066 return r;
2069 /* Maximum number of case bit tests. */
2070 #define MAX_CASE_BIT_TESTS 3
2072 /* By default, enable case bit tests on targets with ashlsi3. */
2073 #ifndef CASE_USE_BIT_TESTS
2074 #define CASE_USE_BIT_TESTS (optab_handler (ashl_optab, word_mode) \
2075 != CODE_FOR_nothing)
2076 #endif
2079 /* A case_bit_test represents a set of case nodes that may be
2080 selected from using a bit-wise comparison. HI and LO hold
2081 the integer to be tested against, LABEL contains the label
2082 to jump to upon success and BITS counts the number of case
2083 nodes handled by this test, typically the number of bits
2084 set in HI:LO. */
2086 struct case_bit_test
2088 HOST_WIDE_INT hi;
2089 HOST_WIDE_INT lo;
2090 rtx label;
2091 int bits;
2094 /* Determine whether "1 << x" is relatively cheap in word_mode. */
2096 static
2097 bool lshift_cheap_p (void)
2099 static bool init = false;
2100 static bool cheap = true;
2102 if (!init)
2104 rtx reg = gen_rtx_REG (word_mode, 10000);
2105 int cost = rtx_cost (gen_rtx_ASHIFT (word_mode, const1_rtx, reg), SET,
2106 optimize_insn_for_speed_p ());
2107 cheap = cost < COSTS_N_INSNS (3);
2108 init = true;
2111 return cheap;
2114 /* Comparison function for qsort to order bit tests by decreasing
2115 number of case nodes, i.e. the node with the most cases gets
2116 tested first. */
2118 static int
2119 case_bit_test_cmp (const void *p1, const void *p2)
2121 const struct case_bit_test *const d1 = (const struct case_bit_test *) p1;
2122 const struct case_bit_test *const d2 = (const struct case_bit_test *) p2;
2124 if (d2->bits != d1->bits)
2125 return d2->bits - d1->bits;
2127 /* Stabilize the sort. */
2128 return CODE_LABEL_NUMBER (d2->label) - CODE_LABEL_NUMBER (d1->label);
2131 /* Expand a switch statement by a short sequence of bit-wise
2132 comparisons. "switch(x)" is effectively converted into
2133 "if ((1 << (x-MINVAL)) & CST)" where CST and MINVAL are
2134 integer constants.
2136 INDEX_EXPR is the value being switched on, which is of
2137 type INDEX_TYPE. MINVAL is the lowest case value of in
2138 the case nodes, of INDEX_TYPE type, and RANGE is highest
2139 value minus MINVAL, also of type INDEX_TYPE. NODES is
2140 the set of case nodes, and DEFAULT_LABEL is the label to
2141 branch to should none of the cases match.
2143 There *MUST* be MAX_CASE_BIT_TESTS or less unique case
2144 node targets. */
2146 static void
2147 emit_case_bit_tests (tree index_type, tree index_expr, tree minval,
2148 tree range, case_node_ptr nodes, rtx default_label)
2150 struct case_bit_test test[MAX_CASE_BIT_TESTS];
2151 enum machine_mode mode;
2152 rtx expr, index, label;
2153 unsigned int i,j,lo,hi;
2154 struct case_node *n;
2155 unsigned int count;
2157 count = 0;
2158 for (n = nodes; n; n = n->right)
2160 label = label_rtx (n->code_label);
2161 for (i = 0; i < count; i++)
2162 if (label == test[i].label)
2163 break;
2165 if (i == count)
2167 gcc_assert (count < MAX_CASE_BIT_TESTS);
2168 test[i].hi = 0;
2169 test[i].lo = 0;
2170 test[i].label = label;
2171 test[i].bits = 1;
2172 count++;
2174 else
2175 test[i].bits++;
2177 lo = tree_low_cst (fold_build2 (MINUS_EXPR, index_type,
2178 n->low, minval), 1);
2179 hi = tree_low_cst (fold_build2 (MINUS_EXPR, index_type,
2180 n->high, minval), 1);
2181 for (j = lo; j <= hi; j++)
2182 if (j >= HOST_BITS_PER_WIDE_INT)
2183 test[i].hi |= (HOST_WIDE_INT) 1 << (j - HOST_BITS_PER_INT);
2184 else
2185 test[i].lo |= (HOST_WIDE_INT) 1 << j;
2188 qsort (test, count, sizeof(*test), case_bit_test_cmp);
2190 index_expr = fold_build2 (MINUS_EXPR, index_type,
2191 fold_convert (index_type, index_expr),
2192 fold_convert (index_type, minval));
2193 index = expand_normal (index_expr);
2194 do_pending_stack_adjust ();
2196 mode = TYPE_MODE (index_type);
2197 expr = expand_normal (range);
2198 if (default_label)
2199 emit_cmp_and_jump_insns (index, expr, GTU, NULL_RTX, mode, 1,
2200 default_label);
2202 index = convert_to_mode (word_mode, index, 0);
2203 index = expand_binop (word_mode, ashl_optab, const1_rtx,
2204 index, NULL_RTX, 1, OPTAB_WIDEN);
2206 for (i = 0; i < count; i++)
2208 expr = immed_double_const (test[i].lo, test[i].hi, word_mode);
2209 expr = expand_binop (word_mode, and_optab, index, expr,
2210 NULL_RTX, 1, OPTAB_WIDEN);
2211 emit_cmp_and_jump_insns (expr, const0_rtx, NE, NULL_RTX,
2212 word_mode, 1, test[i].label);
2215 if (default_label)
2216 emit_jump (default_label);
2219 #ifndef HAVE_casesi
2220 #define HAVE_casesi 0
2221 #endif
2223 #ifndef HAVE_tablejump
2224 #define HAVE_tablejump 0
2225 #endif
2227 /* Terminate a case (Pascal/Ada) or switch (C) statement
2228 in which ORIG_INDEX is the expression to be tested.
2229 If ORIG_TYPE is not NULL, it is the original ORIG_INDEX
2230 type as given in the source before any compiler conversions.
2231 Generate the code to test it and jump to the right place. */
2233 void
2234 expand_case (gimple stmt)
2236 tree minval = NULL_TREE, maxval = NULL_TREE, range = NULL_TREE;
2237 rtx default_label = 0;
2238 struct case_node *n;
2239 unsigned int count, uniq;
2240 rtx index;
2241 rtx table_label;
2242 int ncases;
2243 rtx *labelvec;
2244 int i;
2245 rtx before_case, end, lab;
2247 tree index_expr = gimple_switch_index (stmt);
2248 tree index_type = TREE_TYPE (index_expr);
2249 int unsignedp = TYPE_UNSIGNED (index_type);
2251 /* The insn after which the case dispatch should finally
2252 be emitted. Zero for a dummy. */
2253 rtx start;
2255 /* A list of case labels; it is first built as a list and it may then
2256 be rearranged into a nearly balanced binary tree. */
2257 struct case_node *case_list = 0;
2259 /* Label to jump to if no case matches. */
2260 tree default_label_decl = NULL_TREE;
2262 alloc_pool case_node_pool = create_alloc_pool ("struct case_node pool",
2263 sizeof (struct case_node),
2264 100);
2266 do_pending_stack_adjust ();
2268 /* An ERROR_MARK occurs for various reasons including invalid data type. */
2269 if (index_type != error_mark_node)
2271 tree elt;
2272 bitmap label_bitmap;
2273 int stopi = 0;
2275 /* cleanup_tree_cfg removes all SWITCH_EXPR with their index
2276 expressions being INTEGER_CST. */
2277 gcc_assert (TREE_CODE (index_expr) != INTEGER_CST);
2279 /* The default case, if ever taken, is the first element. */
2280 elt = gimple_switch_label (stmt, 0);
2281 if (!CASE_LOW (elt) && !CASE_HIGH (elt))
2283 default_label_decl = CASE_LABEL (elt);
2284 stopi = 1;
2287 for (i = gimple_switch_num_labels (stmt) - 1; i >= stopi; --i)
2289 tree low, high;
2290 elt = gimple_switch_label (stmt, i);
2292 low = CASE_LOW (elt);
2293 gcc_assert (low);
2294 high = CASE_HIGH (elt);
2296 /* Discard empty ranges. */
2297 if (high && tree_int_cst_lt (high, low))
2298 continue;
2300 case_list = add_case_node (case_list, index_type, low, high,
2301 CASE_LABEL (elt), case_node_pool);
2305 before_case = start = get_last_insn ();
2306 if (default_label_decl)
2307 default_label = label_rtx (default_label_decl);
2309 /* Get upper and lower bounds of case values. */
2311 uniq = 0;
2312 count = 0;
2313 label_bitmap = BITMAP_ALLOC (NULL);
2314 for (n = case_list; n; n = n->right)
2316 /* Count the elements and track the largest and smallest
2317 of them (treating them as signed even if they are not). */
2318 if (count++ == 0)
2320 minval = n->low;
2321 maxval = n->high;
2323 else
2325 if (tree_int_cst_lt (n->low, minval))
2326 minval = n->low;
2327 if (tree_int_cst_lt (maxval, n->high))
2328 maxval = n->high;
2330 /* A range counts double, since it requires two compares. */
2331 if (! tree_int_cst_equal (n->low, n->high))
2332 count++;
2334 /* If we have not seen this label yet, then increase the
2335 number of unique case node targets seen. */
2336 lab = label_rtx (n->code_label);
2337 if (!bitmap_bit_p (label_bitmap, CODE_LABEL_NUMBER (lab)))
2339 bitmap_set_bit (label_bitmap, CODE_LABEL_NUMBER (lab));
2340 uniq++;
2344 BITMAP_FREE (label_bitmap);
2346 /* cleanup_tree_cfg removes all SWITCH_EXPR with a single
2347 destination, such as one with a default case only. However,
2348 it doesn't remove cases that are out of range for the switch
2349 type, so we may still get a zero here. */
2350 if (count == 0)
2352 if (default_label)
2353 emit_jump (default_label);
2354 free_alloc_pool (case_node_pool);
2355 return;
2358 /* Compute span of values. */
2359 range = fold_build2 (MINUS_EXPR, index_type, maxval, minval);
2361 /* Try implementing this switch statement by a short sequence of
2362 bit-wise comparisons. However, we let the binary-tree case
2363 below handle constant index expressions. */
2364 if (CASE_USE_BIT_TESTS
2365 && ! TREE_CONSTANT (index_expr)
2366 && compare_tree_int (range, GET_MODE_BITSIZE (word_mode)) < 0
2367 && compare_tree_int (range, 0) > 0
2368 && lshift_cheap_p ()
2369 && ((uniq == 1 && count >= 3)
2370 || (uniq == 2 && count >= 5)
2371 || (uniq == 3 && count >= 6)))
2373 /* Optimize the case where all the case values fit in a
2374 word without having to subtract MINVAL. In this case,
2375 we can optimize away the subtraction. */
2376 if (compare_tree_int (minval, 0) > 0
2377 && compare_tree_int (maxval, GET_MODE_BITSIZE (word_mode)) < 0)
2379 minval = build_int_cst (index_type, 0);
2380 range = maxval;
2382 emit_case_bit_tests (index_type, index_expr, minval, range,
2383 case_list, default_label);
2386 /* If range of values is much bigger than number of values,
2387 make a sequence of conditional branches instead of a dispatch.
2388 If the switch-index is a constant, do it this way
2389 because we can optimize it. */
2391 else if (count < targetm.case_values_threshold ()
2392 || compare_tree_int (range,
2393 (optimize_insn_for_size_p () ? 3 : 10) * count) > 0
2394 /* RANGE may be signed, and really large ranges will show up
2395 as negative numbers. */
2396 || compare_tree_int (range, 0) < 0
2397 #ifndef ASM_OUTPUT_ADDR_DIFF_ELT
2398 || flag_pic
2399 #endif
2400 || !flag_jump_tables
2401 || TREE_CONSTANT (index_expr)
2402 /* If neither casesi or tablejump is available, we can
2403 only go this way. */
2404 || (!HAVE_casesi && !HAVE_tablejump))
2406 index = expand_normal (index_expr);
2408 /* If the index is a short or char that we do not have
2409 an insn to handle comparisons directly, convert it to
2410 a full integer now, rather than letting each comparison
2411 generate the conversion. */
2413 if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT
2414 && ! have_insn_for (COMPARE, GET_MODE (index)))
2416 enum machine_mode wider_mode;
2417 for (wider_mode = GET_MODE (index); wider_mode != VOIDmode;
2418 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
2419 if (have_insn_for (COMPARE, wider_mode))
2421 index = convert_to_mode (wider_mode, index, unsignedp);
2422 break;
2426 do_pending_stack_adjust ();
2428 if (MEM_P (index))
2429 index = copy_to_reg (index);
2431 /* We generate a binary decision tree to select the
2432 appropriate target code. This is done as follows:
2434 The list of cases is rearranged into a binary tree,
2435 nearly optimal assuming equal probability for each case.
2437 The tree is transformed into RTL, eliminating
2438 redundant test conditions at the same time.
2440 If program flow could reach the end of the
2441 decision tree an unconditional jump to the
2442 default code is emitted. */
2444 use_cost_table = estimate_case_costs (case_list);
2445 balance_case_nodes (&case_list, NULL);
2446 emit_case_nodes (index, case_list, default_label, index_type);
2447 if (default_label)
2448 emit_jump (default_label);
2450 else
2452 rtx fallback_label = label_rtx (case_list->code_label);
2453 table_label = gen_label_rtx ();
2454 if (! try_casesi (index_type, index_expr, minval, range,
2455 table_label, default_label, fallback_label))
2457 bool ok;
2459 /* Index jumptables from zero for suitable values of
2460 minval to avoid a subtraction. */
2461 if (optimize_insn_for_speed_p ()
2462 && compare_tree_int (minval, 0) > 0
2463 && compare_tree_int (minval, 3) < 0)
2465 minval = build_int_cst (index_type, 0);
2466 range = maxval;
2469 ok = try_tablejump (index_type, index_expr, minval, range,
2470 table_label, default_label);
2471 gcc_assert (ok);
2474 /* Get table of labels to jump to, in order of case index. */
2476 ncases = tree_low_cst (range, 0) + 1;
2477 labelvec = XALLOCAVEC (rtx, ncases);
2478 memset (labelvec, 0, ncases * sizeof (rtx));
2480 for (n = case_list; n; n = n->right)
2482 /* Compute the low and high bounds relative to the minimum
2483 value since that should fit in a HOST_WIDE_INT while the
2484 actual values may not. */
2485 HOST_WIDE_INT i_low
2486 = tree_low_cst (fold_build2 (MINUS_EXPR, index_type,
2487 n->low, minval), 1);
2488 HOST_WIDE_INT i_high
2489 = tree_low_cst (fold_build2 (MINUS_EXPR, index_type,
2490 n->high, minval), 1);
2491 HOST_WIDE_INT i;
2493 for (i = i_low; i <= i_high; i ++)
2494 labelvec[i]
2495 = gen_rtx_LABEL_REF (Pmode, label_rtx (n->code_label));
2498 /* Fill in the gaps with the default. We may have gaps at
2499 the beginning if we tried to avoid the minval subtraction,
2500 so substitute some label even if the default label was
2501 deemed unreachable. */
2502 if (!default_label)
2503 default_label = fallback_label;
2504 for (i = 0; i < ncases; i++)
2505 if (labelvec[i] == 0)
2506 labelvec[i] = gen_rtx_LABEL_REF (Pmode, default_label);
2508 /* Output the table. */
2509 emit_label (table_label);
2511 if (CASE_VECTOR_PC_RELATIVE || flag_pic)
2512 emit_jump_insn (gen_rtx_ADDR_DIFF_VEC (CASE_VECTOR_MODE,
2513 gen_rtx_LABEL_REF (Pmode, table_label),
2514 gen_rtvec_v (ncases, labelvec),
2515 const0_rtx, const0_rtx));
2516 else
2517 emit_jump_insn (gen_rtx_ADDR_VEC (CASE_VECTOR_MODE,
2518 gen_rtvec_v (ncases, labelvec)));
2520 /* Record no drop-through after the table. */
2521 emit_barrier ();
2524 before_case = NEXT_INSN (before_case);
2525 end = get_last_insn ();
2526 reorder_insns (before_case, end, start);
2529 free_temp_slots ();
2530 free_alloc_pool (case_node_pool);
2533 /* Generate code to jump to LABEL if OP0 and OP1 are equal in mode MODE. */
2535 static void
2536 do_jump_if_equal (enum machine_mode mode, rtx op0, rtx op1, rtx label,
2537 int unsignedp)
2539 do_compare_rtx_and_jump (op0, op1, EQ, unsignedp, mode,
2540 NULL_RTX, NULL_RTX, label, -1);
2543 /* Not all case values are encountered equally. This function
2544 uses a heuristic to weight case labels, in cases where that
2545 looks like a reasonable thing to do.
2547 Right now, all we try to guess is text, and we establish the
2548 following weights:
2550 chars above space: 16
2551 digits: 16
2552 default: 12
2553 space, punct: 8
2554 tab: 4
2555 newline: 2
2556 other "\" chars: 1
2557 remaining chars: 0
2559 If we find any cases in the switch that are not either -1 or in the range
2560 of valid ASCII characters, or are control characters other than those
2561 commonly used with "\", don't treat this switch scanning text.
2563 Return 1 if these nodes are suitable for cost estimation, otherwise
2564 return 0. */
2566 static int
2567 estimate_case_costs (case_node_ptr node)
2569 tree min_ascii = integer_minus_one_node;
2570 tree max_ascii = build_int_cst (TREE_TYPE (node->high), 127);
2571 case_node_ptr n;
2572 int i;
2574 /* If we haven't already made the cost table, make it now. Note that the
2575 lower bound of the table is -1, not zero. */
2577 if (! cost_table_initialized)
2579 cost_table_initialized = 1;
2581 for (i = 0; i < 128; i++)
2583 if (ISALNUM (i))
2584 COST_TABLE (i) = 16;
2585 else if (ISPUNCT (i))
2586 COST_TABLE (i) = 8;
2587 else if (ISCNTRL (i))
2588 COST_TABLE (i) = -1;
2591 COST_TABLE (' ') = 8;
2592 COST_TABLE ('\t') = 4;
2593 COST_TABLE ('\0') = 4;
2594 COST_TABLE ('\n') = 2;
2595 COST_TABLE ('\f') = 1;
2596 COST_TABLE ('\v') = 1;
2597 COST_TABLE ('\b') = 1;
2600 /* See if all the case expressions look like text. It is text if the
2601 constant is >= -1 and the highest constant is <= 127. Do all comparisons
2602 as signed arithmetic since we don't want to ever access cost_table with a
2603 value less than -1. Also check that none of the constants in a range
2604 are strange control characters. */
2606 for (n = node; n; n = n->right)
2608 if (tree_int_cst_lt (n->low, min_ascii)
2609 || tree_int_cst_lt (max_ascii, n->high))
2610 return 0;
2612 for (i = (HOST_WIDE_INT) TREE_INT_CST_LOW (n->low);
2613 i <= (HOST_WIDE_INT) TREE_INT_CST_LOW (n->high); i++)
2614 if (COST_TABLE (i) < 0)
2615 return 0;
2618 /* All interesting values are within the range of interesting
2619 ASCII characters. */
2620 return 1;
2623 /* Take an ordered list of case nodes
2624 and transform them into a near optimal binary tree,
2625 on the assumption that any target code selection value is as
2626 likely as any other.
2628 The transformation is performed by splitting the ordered
2629 list into two equal sections plus a pivot. The parts are
2630 then attached to the pivot as left and right branches. Each
2631 branch is then transformed recursively. */
2633 static void
2634 balance_case_nodes (case_node_ptr *head, case_node_ptr parent)
2636 case_node_ptr np;
2638 np = *head;
2639 if (np)
2641 int cost = 0;
2642 int i = 0;
2643 int ranges = 0;
2644 case_node_ptr *npp;
2645 case_node_ptr left;
2647 /* Count the number of entries on branch. Also count the ranges. */
2649 while (np)
2651 if (!tree_int_cst_equal (np->low, np->high))
2653 ranges++;
2654 if (use_cost_table)
2655 cost += COST_TABLE (TREE_INT_CST_LOW (np->high));
2658 if (use_cost_table)
2659 cost += COST_TABLE (TREE_INT_CST_LOW (np->low));
2661 i++;
2662 np = np->right;
2665 if (i > 2)
2667 /* Split this list if it is long enough for that to help. */
2668 npp = head;
2669 left = *npp;
2670 if (use_cost_table)
2672 /* Find the place in the list that bisects the list's total cost,
2673 Here I gets half the total cost. */
2674 int n_moved = 0;
2675 i = (cost + 1) / 2;
2676 while (1)
2678 /* Skip nodes while their cost does not reach that amount. */
2679 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
2680 i -= COST_TABLE (TREE_INT_CST_LOW ((*npp)->high));
2681 i -= COST_TABLE (TREE_INT_CST_LOW ((*npp)->low));
2682 if (i <= 0)
2683 break;
2684 npp = &(*npp)->right;
2685 n_moved += 1;
2687 if (n_moved == 0)
2689 /* Leave this branch lopsided, but optimize left-hand
2690 side and fill in `parent' fields for right-hand side. */
2691 np = *head;
2692 np->parent = parent;
2693 balance_case_nodes (&np->left, np);
2694 for (; np->right; np = np->right)
2695 np->right->parent = np;
2696 return;
2699 /* If there are just three nodes, split at the middle one. */
2700 else if (i == 3)
2701 npp = &(*npp)->right;
2702 else
2704 /* Find the place in the list that bisects the list's total cost,
2705 where ranges count as 2.
2706 Here I gets half the total cost. */
2707 i = (i + ranges + 1) / 2;
2708 while (1)
2710 /* Skip nodes while their cost does not reach that amount. */
2711 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
2712 i--;
2713 i--;
2714 if (i <= 0)
2715 break;
2716 npp = &(*npp)->right;
2719 *head = np = *npp;
2720 *npp = 0;
2721 np->parent = parent;
2722 np->left = left;
2724 /* Optimize each of the two split parts. */
2725 balance_case_nodes (&np->left, np);
2726 balance_case_nodes (&np->right, np);
2728 else
2730 /* Else leave this branch as one level,
2731 but fill in `parent' fields. */
2732 np = *head;
2733 np->parent = parent;
2734 for (; np->right; np = np->right)
2735 np->right->parent = np;
2740 /* Search the parent sections of the case node tree
2741 to see if a test for the lower bound of NODE would be redundant.
2742 INDEX_TYPE is the type of the index expression.
2744 The instructions to generate the case decision tree are
2745 output in the same order as nodes are processed so it is
2746 known that if a parent node checks the range of the current
2747 node minus one that the current node is bounded at its lower
2748 span. Thus the test would be redundant. */
2750 static int
2751 node_has_low_bound (case_node_ptr node, tree index_type)
2753 tree low_minus_one;
2754 case_node_ptr pnode;
2756 /* If the lower bound of this node is the lowest value in the index type,
2757 we need not test it. */
2759 if (tree_int_cst_equal (node->low, TYPE_MIN_VALUE (index_type)))
2760 return 1;
2762 /* If this node has a left branch, the value at the left must be less
2763 than that at this node, so it cannot be bounded at the bottom and
2764 we need not bother testing any further. */
2766 if (node->left)
2767 return 0;
2769 low_minus_one = fold_build2 (MINUS_EXPR, TREE_TYPE (node->low),
2770 node->low,
2771 build_int_cst (TREE_TYPE (node->low), 1));
2773 /* If the subtraction above overflowed, we can't verify anything.
2774 Otherwise, look for a parent that tests our value - 1. */
2776 if (! tree_int_cst_lt (low_minus_one, node->low))
2777 return 0;
2779 for (pnode = node->parent; pnode; pnode = pnode->parent)
2780 if (tree_int_cst_equal (low_minus_one, pnode->high))
2781 return 1;
2783 return 0;
2786 /* Search the parent sections of the case node tree
2787 to see if a test for the upper bound of NODE would be redundant.
2788 INDEX_TYPE is the type of the index expression.
2790 The instructions to generate the case decision tree are
2791 output in the same order as nodes are processed so it is
2792 known that if a parent node checks the range of the current
2793 node plus one that the current node is bounded at its upper
2794 span. Thus the test would be redundant. */
2796 static int
2797 node_has_high_bound (case_node_ptr node, tree index_type)
2799 tree high_plus_one;
2800 case_node_ptr pnode;
2802 /* If there is no upper bound, obviously no test is needed. */
2804 if (TYPE_MAX_VALUE (index_type) == NULL)
2805 return 1;
2807 /* If the upper bound of this node is the highest value in the type
2808 of the index expression, we need not test against it. */
2810 if (tree_int_cst_equal (node->high, TYPE_MAX_VALUE (index_type)))
2811 return 1;
2813 /* If this node has a right branch, the value at the right must be greater
2814 than that at this node, so it cannot be bounded at the top and
2815 we need not bother testing any further. */
2817 if (node->right)
2818 return 0;
2820 high_plus_one = fold_build2 (PLUS_EXPR, TREE_TYPE (node->high),
2821 node->high,
2822 build_int_cst (TREE_TYPE (node->high), 1));
2824 /* If the addition above overflowed, we can't verify anything.
2825 Otherwise, look for a parent that tests our value + 1. */
2827 if (! tree_int_cst_lt (node->high, high_plus_one))
2828 return 0;
2830 for (pnode = node->parent; pnode; pnode = pnode->parent)
2831 if (tree_int_cst_equal (high_plus_one, pnode->low))
2832 return 1;
2834 return 0;
2837 /* Search the parent sections of the
2838 case node tree to see if both tests for the upper and lower
2839 bounds of NODE would be redundant. */
2841 static int
2842 node_is_bounded (case_node_ptr node, tree index_type)
2844 return (node_has_low_bound (node, index_type)
2845 && node_has_high_bound (node, index_type));
2848 /* Emit step-by-step code to select a case for the value of INDEX.
2849 The thus generated decision tree follows the form of the
2850 case-node binary tree NODE, whose nodes represent test conditions.
2851 INDEX_TYPE is the type of the index of the switch.
2853 Care is taken to prune redundant tests from the decision tree
2854 by detecting any boundary conditions already checked by
2855 emitted rtx. (See node_has_high_bound, node_has_low_bound
2856 and node_is_bounded, above.)
2858 Where the test conditions can be shown to be redundant we emit
2859 an unconditional jump to the target code. As a further
2860 optimization, the subordinates of a tree node are examined to
2861 check for bounded nodes. In this case conditional and/or
2862 unconditional jumps as a result of the boundary check for the
2863 current node are arranged to target the subordinates associated
2864 code for out of bound conditions on the current node.
2866 We can assume that when control reaches the code generated here,
2867 the index value has already been compared with the parents
2868 of this node, and determined to be on the same side of each parent
2869 as this node is. Thus, if this node tests for the value 51,
2870 and a parent tested for 52, we don't need to consider
2871 the possibility of a value greater than 51. If another parent
2872 tests for the value 50, then this node need not test anything. */
2874 static void
2875 emit_case_nodes (rtx index, case_node_ptr node, rtx default_label,
2876 tree index_type)
2878 /* If INDEX has an unsigned type, we must make unsigned branches. */
2879 int unsignedp = TYPE_UNSIGNED (index_type);
2880 enum machine_mode mode = GET_MODE (index);
2881 enum machine_mode imode = TYPE_MODE (index_type);
2883 /* Handle indices detected as constant during RTL expansion. */
2884 if (mode == VOIDmode)
2885 mode = imode;
2887 /* See if our parents have already tested everything for us.
2888 If they have, emit an unconditional jump for this node. */
2889 if (node_is_bounded (node, index_type))
2890 emit_jump (label_rtx (node->code_label));
2892 else if (tree_int_cst_equal (node->low, node->high))
2894 /* Node is single valued. First see if the index expression matches
2895 this node and then check our children, if any. */
2897 do_jump_if_equal (mode, index,
2898 convert_modes (mode, imode,
2899 expand_normal (node->low),
2900 unsignedp),
2901 label_rtx (node->code_label), unsignedp);
2903 if (node->right != 0 && node->left != 0)
2905 /* This node has children on both sides.
2906 Dispatch to one side or the other
2907 by comparing the index value with this node's value.
2908 If one subtree is bounded, check that one first,
2909 so we can avoid real branches in the tree. */
2911 if (node_is_bounded (node->right, index_type))
2913 emit_cmp_and_jump_insns (index,
2914 convert_modes
2915 (mode, imode,
2916 expand_normal (node->high),
2917 unsignedp),
2918 GT, NULL_RTX, mode, unsignedp,
2919 label_rtx (node->right->code_label));
2920 emit_case_nodes (index, node->left, default_label, index_type);
2923 else if (node_is_bounded (node->left, index_type))
2925 emit_cmp_and_jump_insns (index,
2926 convert_modes
2927 (mode, imode,
2928 expand_normal (node->high),
2929 unsignedp),
2930 LT, NULL_RTX, mode, unsignedp,
2931 label_rtx (node->left->code_label));
2932 emit_case_nodes (index, node->right, default_label, index_type);
2935 /* If both children are single-valued cases with no
2936 children, finish up all the work. This way, we can save
2937 one ordered comparison. */
2938 else if (tree_int_cst_equal (node->right->low, node->right->high)
2939 && node->right->left == 0
2940 && node->right->right == 0
2941 && tree_int_cst_equal (node->left->low, node->left->high)
2942 && node->left->left == 0
2943 && node->left->right == 0)
2945 /* Neither node is bounded. First distinguish the two sides;
2946 then emit the code for one side at a time. */
2948 /* See if the value matches what the right hand side
2949 wants. */
2950 do_jump_if_equal (mode, index,
2951 convert_modes (mode, imode,
2952 expand_normal (node->right->low),
2953 unsignedp),
2954 label_rtx (node->right->code_label),
2955 unsignedp);
2957 /* See if the value matches what the left hand side
2958 wants. */
2959 do_jump_if_equal (mode, index,
2960 convert_modes (mode, imode,
2961 expand_normal (node->left->low),
2962 unsignedp),
2963 label_rtx (node->left->code_label),
2964 unsignedp);
2967 else
2969 /* Neither node is bounded. First distinguish the two sides;
2970 then emit the code for one side at a time. */
2972 tree test_label
2973 = build_decl (CURR_INSN_LOCATION,
2974 LABEL_DECL, NULL_TREE, NULL_TREE);
2976 /* See if the value is on the right. */
2977 emit_cmp_and_jump_insns (index,
2978 convert_modes
2979 (mode, imode,
2980 expand_normal (node->high),
2981 unsignedp),
2982 GT, NULL_RTX, mode, unsignedp,
2983 label_rtx (test_label));
2985 /* Value must be on the left.
2986 Handle the left-hand subtree. */
2987 emit_case_nodes (index, node->left, default_label, index_type);
2988 /* If left-hand subtree does nothing,
2989 go to default. */
2990 if (default_label)
2991 emit_jump (default_label);
2993 /* Code branches here for the right-hand subtree. */
2994 expand_label (test_label);
2995 emit_case_nodes (index, node->right, default_label, index_type);
2999 else if (node->right != 0 && node->left == 0)
3001 /* Here we have a right child but no left so we issue a conditional
3002 branch to default and process the right child.
3004 Omit the conditional branch to default if the right child
3005 does not have any children and is single valued; it would
3006 cost too much space to save so little time. */
3008 if (node->right->right || node->right->left
3009 || !tree_int_cst_equal (node->right->low, node->right->high))
3011 if (!node_has_low_bound (node, index_type))
3013 emit_cmp_and_jump_insns (index,
3014 convert_modes
3015 (mode, imode,
3016 expand_normal (node->high),
3017 unsignedp),
3018 LT, NULL_RTX, mode, unsignedp,
3019 default_label);
3022 emit_case_nodes (index, node->right, default_label, index_type);
3024 else
3025 /* We cannot process node->right normally
3026 since we haven't ruled out the numbers less than
3027 this node's value. So handle node->right explicitly. */
3028 do_jump_if_equal (mode, index,
3029 convert_modes
3030 (mode, imode,
3031 expand_normal (node->right->low),
3032 unsignedp),
3033 label_rtx (node->right->code_label), unsignedp);
3036 else if (node->right == 0 && node->left != 0)
3038 /* Just one subtree, on the left. */
3039 if (node->left->left || node->left->right
3040 || !tree_int_cst_equal (node->left->low, node->left->high))
3042 if (!node_has_high_bound (node, index_type))
3044 emit_cmp_and_jump_insns (index,
3045 convert_modes
3046 (mode, imode,
3047 expand_normal (node->high),
3048 unsignedp),
3049 GT, NULL_RTX, mode, unsignedp,
3050 default_label);
3053 emit_case_nodes (index, node->left, default_label, index_type);
3055 else
3056 /* We cannot process node->left normally
3057 since we haven't ruled out the numbers less than
3058 this node's value. So handle node->left explicitly. */
3059 do_jump_if_equal (mode, index,
3060 convert_modes
3061 (mode, imode,
3062 expand_normal (node->left->low),
3063 unsignedp),
3064 label_rtx (node->left->code_label), unsignedp);
3067 else
3069 /* Node is a range. These cases are very similar to those for a single
3070 value, except that we do not start by testing whether this node
3071 is the one to branch to. */
3073 if (node->right != 0 && node->left != 0)
3075 /* Node has subtrees on both sides.
3076 If the right-hand subtree is bounded,
3077 test for it first, since we can go straight there.
3078 Otherwise, we need to make a branch in the control structure,
3079 then handle the two subtrees. */
3080 tree test_label = 0;
3082 if (node_is_bounded (node->right, index_type))
3083 /* Right hand node is fully bounded so we can eliminate any
3084 testing and branch directly to the target code. */
3085 emit_cmp_and_jump_insns (index,
3086 convert_modes
3087 (mode, imode,
3088 expand_normal (node->high),
3089 unsignedp),
3090 GT, NULL_RTX, mode, unsignedp,
3091 label_rtx (node->right->code_label));
3092 else
3094 /* Right hand node requires testing.
3095 Branch to a label where we will handle it later. */
3097 test_label = build_decl (CURR_INSN_LOCATION,
3098 LABEL_DECL, NULL_TREE, NULL_TREE);
3099 emit_cmp_and_jump_insns (index,
3100 convert_modes
3101 (mode, imode,
3102 expand_normal (node->high),
3103 unsignedp),
3104 GT, NULL_RTX, mode, unsignedp,
3105 label_rtx (test_label));
3108 /* Value belongs to this node or to the left-hand subtree. */
3110 emit_cmp_and_jump_insns (index,
3111 convert_modes
3112 (mode, imode,
3113 expand_normal (node->low),
3114 unsignedp),
3115 GE, NULL_RTX, mode, unsignedp,
3116 label_rtx (node->code_label));
3118 /* Handle the left-hand subtree. */
3119 emit_case_nodes (index, node->left, default_label, index_type);
3121 /* If right node had to be handled later, do that now. */
3123 if (test_label)
3125 /* If the left-hand subtree fell through,
3126 don't let it fall into the right-hand subtree. */
3127 if (default_label)
3128 emit_jump (default_label);
3130 expand_label (test_label);
3131 emit_case_nodes (index, node->right, default_label, index_type);
3135 else if (node->right != 0 && node->left == 0)
3137 /* Deal with values to the left of this node,
3138 if they are possible. */
3139 if (!node_has_low_bound (node, index_type))
3141 emit_cmp_and_jump_insns (index,
3142 convert_modes
3143 (mode, imode,
3144 expand_normal (node->low),
3145 unsignedp),
3146 LT, NULL_RTX, mode, unsignedp,
3147 default_label);
3150 /* Value belongs to this node or to the right-hand subtree. */
3152 emit_cmp_and_jump_insns (index,
3153 convert_modes
3154 (mode, imode,
3155 expand_normal (node->high),
3156 unsignedp),
3157 LE, NULL_RTX, mode, unsignedp,
3158 label_rtx (node->code_label));
3160 emit_case_nodes (index, node->right, default_label, index_type);
3163 else if (node->right == 0 && node->left != 0)
3165 /* Deal with values to the right of this node,
3166 if they are possible. */
3167 if (!node_has_high_bound (node, index_type))
3169 emit_cmp_and_jump_insns (index,
3170 convert_modes
3171 (mode, imode,
3172 expand_normal (node->high),
3173 unsignedp),
3174 GT, NULL_RTX, mode, unsignedp,
3175 default_label);
3178 /* Value belongs to this node or to the left-hand subtree. */
3180 emit_cmp_and_jump_insns (index,
3181 convert_modes
3182 (mode, imode,
3183 expand_normal (node->low),
3184 unsignedp),
3185 GE, NULL_RTX, mode, unsignedp,
3186 label_rtx (node->code_label));
3188 emit_case_nodes (index, node->left, default_label, index_type);
3191 else
3193 /* Node has no children so we check low and high bounds to remove
3194 redundant tests. Only one of the bounds can exist,
3195 since otherwise this node is bounded--a case tested already. */
3196 int high_bound = node_has_high_bound (node, index_type);
3197 int low_bound = node_has_low_bound (node, index_type);
3199 if (!high_bound && low_bound)
3201 emit_cmp_and_jump_insns (index,
3202 convert_modes
3203 (mode, imode,
3204 expand_normal (node->high),
3205 unsignedp),
3206 GT, NULL_RTX, mode, unsignedp,
3207 default_label);
3210 else if (!low_bound && high_bound)
3212 emit_cmp_and_jump_insns (index,
3213 convert_modes
3214 (mode, imode,
3215 expand_normal (node->low),
3216 unsignedp),
3217 LT, NULL_RTX, mode, unsignedp,
3218 default_label);
3220 else if (!low_bound && !high_bound)
3222 /* Widen LOW and HIGH to the same width as INDEX. */
3223 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
3224 tree low = build1 (CONVERT_EXPR, type, node->low);
3225 tree high = build1 (CONVERT_EXPR, type, node->high);
3226 rtx low_rtx, new_index, new_bound;
3228 /* Instead of doing two branches, emit one unsigned branch for
3229 (index-low) > (high-low). */
3230 low_rtx = expand_expr (low, NULL_RTX, mode, EXPAND_NORMAL);
3231 new_index = expand_simple_binop (mode, MINUS, index, low_rtx,
3232 NULL_RTX, unsignedp,
3233 OPTAB_WIDEN);
3234 new_bound = expand_expr (fold_build2 (MINUS_EXPR, type,
3235 high, low),
3236 NULL_RTX, mode, EXPAND_NORMAL);
3238 emit_cmp_and_jump_insns (new_index, new_bound, GT, NULL_RTX,
3239 mode, 1, default_label);
3242 emit_jump (label_rtx (node->code_label));