* events.c (hash_param_callback): Allow NULL to stand for empty
[official-gcc.git] / gcc / stmt.c
blobd2583ca5458c50ef68541268bbacfa23b84589d3
1 /* Expands front end tree to back end RTL for GCC
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /* This file handles the generation of rtl code from tree structure
23 above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
24 The functions whose names start with `expand_' are called by the
25 expander to generate RTL instructions for various kinds of constructs. */
27 #include "config.h"
28 #include "system.h"
29 #include "coretypes.h"
30 #include "tm.h"
32 #include "rtl.h"
33 #include "hard-reg-set.h"
34 #include "tree.h"
35 #include "tm_p.h"
36 #include "flags.h"
37 #include "except.h"
38 #include "function.h"
39 #include "insn-config.h"
40 #include "expr.h"
41 #include "libfuncs.h"
42 #include "recog.h"
43 #include "machmode.h"
44 #include "toplev.h"
45 #include "output.h"
46 #include "ggc.h"
47 #include "langhooks.h"
48 #include "predict.h"
49 #include "optabs.h"
50 #include "target.h"
51 #include "gimple.h"
52 #include "regs.h"
53 #include "alloc-pool.h"
54 #include "pretty-print.h"
56 /* Functions and data structures for expanding case statements. */
58 /* Case label structure, used to hold info on labels within case
59 statements. We handle "range" labels; for a single-value label
60 as in C, the high and low limits are the same.
62 We start with a vector of case nodes sorted in ascending order, and
63 the default label as the last element in the vector. Before expanding
64 to RTL, we transform this vector into a list linked via the RIGHT
65 fields in the case_node struct. Nodes with higher case values are
66 later in the list.
68 Switch statements can be output in three forms. A branch table is
69 used if there are more than a few labels and the labels are dense
70 within the range between the smallest and largest case value. If a
71 branch table is used, no further manipulations are done with the case
72 node chain.
74 The alternative to the use of a branch table is to generate a series
75 of compare and jump insns. When that is done, we use the LEFT, RIGHT,
76 and PARENT fields to hold a binary tree. Initially the tree is
77 totally unbalanced, with everything on the right. We balance the tree
78 with nodes on the left having lower case values than the parent
79 and nodes on the right having higher values. We then output the tree
80 in order.
82 For very small, suitable switch statements, we can generate a series
83 of simple bit test and branches instead. */
85 struct case_node
87 struct case_node *left; /* Left son in binary tree */
88 struct case_node *right; /* Right son in binary tree; also node chain */
89 struct case_node *parent; /* Parent of node in binary tree */
90 tree low; /* Lowest index value for this label */
91 tree high; /* Highest index value for this label */
92 tree code_label; /* Label to jump to when node matches */
95 typedef struct case_node case_node;
96 typedef struct case_node *case_node_ptr;
98 /* These are used by estimate_case_costs and balance_case_nodes. */
100 /* This must be a signed type, and non-ANSI compilers lack signed char. */
101 static short cost_table_[129];
102 static int use_cost_table;
103 static int cost_table_initialized;
105 /* Special care is needed because we allow -1, but TREE_INT_CST_LOW
106 is unsigned. */
107 #define COST_TABLE(I) cost_table_[(unsigned HOST_WIDE_INT) ((I) + 1)]
109 static int n_occurrences (int, const char *);
110 static bool tree_conflicts_with_clobbers_p (tree, HARD_REG_SET *);
111 static void expand_nl_goto_receiver (void);
112 static bool check_operand_nalternatives (tree, tree);
113 static bool check_unique_operand_names (tree, tree, tree);
114 static char *resolve_operand_name_1 (char *, tree, tree, tree);
115 static void expand_null_return_1 (void);
116 static void expand_value_return (rtx);
117 static int estimate_case_costs (case_node_ptr);
118 static bool lshift_cheap_p (void);
119 static int case_bit_test_cmp (const void *, const void *);
120 static void emit_case_bit_tests (tree, tree, tree, tree, case_node_ptr, rtx);
121 static void balance_case_nodes (case_node_ptr *, case_node_ptr);
122 static int node_has_low_bound (case_node_ptr, tree);
123 static int node_has_high_bound (case_node_ptr, tree);
124 static int node_is_bounded (case_node_ptr, tree);
125 static void emit_case_nodes (rtx, case_node_ptr, rtx, tree);
126 static struct case_node *add_case_node (struct case_node *, tree,
127 tree, tree, tree, alloc_pool);
130 /* Return the rtx-label that corresponds to a LABEL_DECL,
131 creating it if necessary. */
134 label_rtx (tree label)
136 gcc_assert (TREE_CODE (label) == LABEL_DECL);
138 if (!DECL_RTL_SET_P (label))
140 rtx r = gen_label_rtx ();
141 SET_DECL_RTL (label, r);
142 if (FORCED_LABEL (label) || DECL_NONLOCAL (label))
143 LABEL_PRESERVE_P (r) = 1;
146 return DECL_RTL (label);
149 /* As above, but also put it on the forced-reference list of the
150 function that contains it. */
152 force_label_rtx (tree label)
154 rtx ref = label_rtx (label);
155 tree function = decl_function_context (label);
157 gcc_assert (function);
159 forced_labels = gen_rtx_EXPR_LIST (VOIDmode, ref, forced_labels);
160 return ref;
163 /* Add an unconditional jump to LABEL as the next sequential instruction. */
165 void
166 emit_jump (rtx label)
168 do_pending_stack_adjust ();
169 emit_jump_insn (gen_jump (label));
170 emit_barrier ();
173 /* Emit code to jump to the address
174 specified by the pointer expression EXP. */
176 void
177 expand_computed_goto (tree exp)
179 rtx x = expand_normal (exp);
181 x = convert_memory_address (Pmode, x);
183 do_pending_stack_adjust ();
184 emit_indirect_jump (x);
187 /* Handle goto statements and the labels that they can go to. */
189 /* Specify the location in the RTL code of a label LABEL,
190 which is a LABEL_DECL tree node.
192 This is used for the kind of label that the user can jump to with a
193 goto statement, and for alternatives of a switch or case statement.
194 RTL labels generated for loops and conditionals don't go through here;
195 they are generated directly at the RTL level, by other functions below.
197 Note that this has nothing to do with defining label *names*.
198 Languages vary in how they do that and what that even means. */
200 void
201 expand_label (tree label)
203 rtx label_r = label_rtx (label);
205 do_pending_stack_adjust ();
206 emit_label (label_r);
207 if (DECL_NAME (label))
208 LABEL_NAME (DECL_RTL (label)) = IDENTIFIER_POINTER (DECL_NAME (label));
210 if (DECL_NONLOCAL (label))
212 expand_nl_goto_receiver ();
213 nonlocal_goto_handler_labels
214 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
215 nonlocal_goto_handler_labels);
218 if (FORCED_LABEL (label))
219 forced_labels = gen_rtx_EXPR_LIST (VOIDmode, label_r, forced_labels);
221 if (DECL_NONLOCAL (label) || FORCED_LABEL (label))
222 maybe_set_first_label_num (label_r);
225 /* Generate RTL code for a `goto' statement with target label LABEL.
226 LABEL should be a LABEL_DECL tree node that was or will later be
227 defined with `expand_label'. */
229 void
230 expand_goto (tree label)
232 #ifdef ENABLE_CHECKING
233 /* Check for a nonlocal goto to a containing function. Should have
234 gotten translated to __builtin_nonlocal_goto. */
235 tree context = decl_function_context (label);
236 gcc_assert (!context || context == current_function_decl);
237 #endif
239 emit_jump (label_rtx (label));
242 /* Return the number of times character C occurs in string S. */
243 static int
244 n_occurrences (int c, const char *s)
246 int n = 0;
247 while (*s)
248 n += (*s++ == c);
249 return n;
252 /* Generate RTL for an asm statement (explicit assembler code).
253 STRING is a STRING_CST node containing the assembler code text,
254 or an ADDR_EXPR containing a STRING_CST. VOL nonzero means the
255 insn is volatile; don't optimize it. */
257 static void
258 expand_asm_loc (tree string, int vol, location_t locus)
260 rtx body;
262 if (TREE_CODE (string) == ADDR_EXPR)
263 string = TREE_OPERAND (string, 0);
265 body = gen_rtx_ASM_INPUT_loc (VOIDmode,
266 ggc_strdup (TREE_STRING_POINTER (string)),
267 locus);
269 MEM_VOLATILE_P (body) = vol;
271 emit_insn (body);
274 /* Parse the output constraint pointed to by *CONSTRAINT_P. It is the
275 OPERAND_NUMth output operand, indexed from zero. There are NINPUTS
276 inputs and NOUTPUTS outputs to this extended-asm. Upon return,
277 *ALLOWS_MEM will be TRUE iff the constraint allows the use of a
278 memory operand. Similarly, *ALLOWS_REG will be TRUE iff the
279 constraint allows the use of a register operand. And, *IS_INOUT
280 will be true if the operand is read-write, i.e., if it is used as
281 an input as well as an output. If *CONSTRAINT_P is not in
282 canonical form, it will be made canonical. (Note that `+' will be
283 replaced with `=' as part of this process.)
285 Returns TRUE if all went well; FALSE if an error occurred. */
287 bool
288 parse_output_constraint (const char **constraint_p, int operand_num,
289 int ninputs, int noutputs, bool *allows_mem,
290 bool *allows_reg, bool *is_inout)
292 const char *constraint = *constraint_p;
293 const char *p;
295 /* Assume the constraint doesn't allow the use of either a register
296 or memory. */
297 *allows_mem = false;
298 *allows_reg = false;
300 /* Allow the `=' or `+' to not be at the beginning of the string,
301 since it wasn't explicitly documented that way, and there is a
302 large body of code that puts it last. Swap the character to
303 the front, so as not to uglify any place else. */
304 p = strchr (constraint, '=');
305 if (!p)
306 p = strchr (constraint, '+');
308 /* If the string doesn't contain an `=', issue an error
309 message. */
310 if (!p)
312 error ("output operand constraint lacks %<=%>");
313 return false;
316 /* If the constraint begins with `+', then the operand is both read
317 from and written to. */
318 *is_inout = (*p == '+');
320 /* Canonicalize the output constraint so that it begins with `='. */
321 if (p != constraint || *is_inout)
323 char *buf;
324 size_t c_len = strlen (constraint);
326 if (p != constraint)
327 warning (0, "output constraint %qc for operand %d "
328 "is not at the beginning",
329 *p, operand_num);
331 /* Make a copy of the constraint. */
332 buf = XALLOCAVEC (char, c_len + 1);
333 strcpy (buf, constraint);
334 /* Swap the first character and the `=' or `+'. */
335 buf[p - constraint] = buf[0];
336 /* Make sure the first character is an `='. (Until we do this,
337 it might be a `+'.) */
338 buf[0] = '=';
339 /* Replace the constraint with the canonicalized string. */
340 *constraint_p = ggc_alloc_string (buf, c_len);
341 constraint = *constraint_p;
344 /* Loop through the constraint string. */
345 for (p = constraint + 1; *p; p += CONSTRAINT_LEN (*p, p))
346 switch (*p)
348 case '+':
349 case '=':
350 error ("operand constraint contains incorrectly positioned "
351 "%<+%> or %<=%>");
352 return false;
354 case '%':
355 if (operand_num + 1 == ninputs + noutputs)
357 error ("%<%%%> constraint used with last operand");
358 return false;
360 break;
362 case 'V': case TARGET_MEM_CONSTRAINT: case 'o':
363 *allows_mem = true;
364 break;
366 case '?': case '!': case '*': case '&': case '#':
367 case 'E': case 'F': case 'G': case 'H':
368 case 's': case 'i': case 'n':
369 case 'I': case 'J': case 'K': case 'L': case 'M':
370 case 'N': case 'O': case 'P': case ',':
371 break;
373 case '0': case '1': case '2': case '3': case '4':
374 case '5': case '6': case '7': case '8': case '9':
375 case '[':
376 error ("matching constraint not valid in output operand");
377 return false;
379 case '<': case '>':
380 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
381 excepting those that expand_call created. So match memory
382 and hope. */
383 *allows_mem = true;
384 break;
386 case 'g': case 'X':
387 *allows_reg = true;
388 *allows_mem = true;
389 break;
391 case 'p': case 'r':
392 *allows_reg = true;
393 break;
395 default:
396 if (!ISALPHA (*p))
397 break;
398 if (REG_CLASS_FROM_CONSTRAINT (*p, p) != NO_REGS)
399 *allows_reg = true;
400 #ifdef EXTRA_CONSTRAINT_STR
401 else if (EXTRA_ADDRESS_CONSTRAINT (*p, p))
402 *allows_reg = true;
403 else if (EXTRA_MEMORY_CONSTRAINT (*p, p))
404 *allows_mem = true;
405 else
407 /* Otherwise we can't assume anything about the nature of
408 the constraint except that it isn't purely registers.
409 Treat it like "g" and hope for the best. */
410 *allows_reg = true;
411 *allows_mem = true;
413 #endif
414 break;
417 return true;
420 /* Similar, but for input constraints. */
422 bool
423 parse_input_constraint (const char **constraint_p, int input_num,
424 int ninputs, int noutputs, int ninout,
425 const char * const * constraints,
426 bool *allows_mem, bool *allows_reg)
428 const char *constraint = *constraint_p;
429 const char *orig_constraint = constraint;
430 size_t c_len = strlen (constraint);
431 size_t j;
432 bool saw_match = false;
434 /* Assume the constraint doesn't allow the use of either
435 a register or memory. */
436 *allows_mem = false;
437 *allows_reg = false;
439 /* Make sure constraint has neither `=', `+', nor '&'. */
441 for (j = 0; j < c_len; j += CONSTRAINT_LEN (constraint[j], constraint+j))
442 switch (constraint[j])
444 case '+': case '=': case '&':
445 if (constraint == orig_constraint)
447 error ("input operand constraint contains %qc", constraint[j]);
448 return false;
450 break;
452 case '%':
453 if (constraint == orig_constraint
454 && input_num + 1 == ninputs - ninout)
456 error ("%<%%%> constraint used with last operand");
457 return false;
459 break;
461 case 'V': case TARGET_MEM_CONSTRAINT: case 'o':
462 *allows_mem = true;
463 break;
465 case '<': case '>':
466 case '?': case '!': case '*': case '#':
467 case 'E': case 'F': case 'G': case 'H':
468 case 's': case 'i': case 'n':
469 case 'I': case 'J': case 'K': case 'L': case 'M':
470 case 'N': case 'O': case 'P': case ',':
471 break;
473 /* Whether or not a numeric constraint allows a register is
474 decided by the matching constraint, and so there is no need
475 to do anything special with them. We must handle them in
476 the default case, so that we don't unnecessarily force
477 operands to memory. */
478 case '0': case '1': case '2': case '3': case '4':
479 case '5': case '6': case '7': case '8': case '9':
481 char *end;
482 unsigned long match;
484 saw_match = true;
486 match = strtoul (constraint + j, &end, 10);
487 if (match >= (unsigned long) noutputs)
489 error ("matching constraint references invalid operand number");
490 return false;
493 /* Try and find the real constraint for this dup. Only do this
494 if the matching constraint is the only alternative. */
495 if (*end == '\0'
496 && (j == 0 || (j == 1 && constraint[0] == '%')))
498 constraint = constraints[match];
499 *constraint_p = constraint;
500 c_len = strlen (constraint);
501 j = 0;
502 /* ??? At the end of the loop, we will skip the first part of
503 the matched constraint. This assumes not only that the
504 other constraint is an output constraint, but also that
505 the '=' or '+' come first. */
506 break;
508 else
509 j = end - constraint;
510 /* Anticipate increment at end of loop. */
511 j--;
513 /* Fall through. */
515 case 'p': case 'r':
516 *allows_reg = true;
517 break;
519 case 'g': case 'X':
520 *allows_reg = true;
521 *allows_mem = true;
522 break;
524 default:
525 if (! ISALPHA (constraint[j]))
527 error ("invalid punctuation %qc in constraint", constraint[j]);
528 return false;
530 if (REG_CLASS_FROM_CONSTRAINT (constraint[j], constraint + j)
531 != NO_REGS)
532 *allows_reg = true;
533 #ifdef EXTRA_CONSTRAINT_STR
534 else if (EXTRA_ADDRESS_CONSTRAINT (constraint[j], constraint + j))
535 *allows_reg = true;
536 else if (EXTRA_MEMORY_CONSTRAINT (constraint[j], constraint + j))
537 *allows_mem = true;
538 else
540 /* Otherwise we can't assume anything about the nature of
541 the constraint except that it isn't purely registers.
542 Treat it like "g" and hope for the best. */
543 *allows_reg = true;
544 *allows_mem = true;
546 #endif
547 break;
550 if (saw_match && !*allows_reg)
551 warning (0, "matching constraint does not allow a register");
553 return true;
556 /* Return DECL iff there's an overlap between *REGS and DECL, where DECL
557 can be an asm-declared register. Called via walk_tree. */
559 static tree
560 decl_overlaps_hard_reg_set_p (tree *declp, int *walk_subtrees ATTRIBUTE_UNUSED,
561 void *data)
563 tree decl = *declp;
564 const HARD_REG_SET *const regs = (const HARD_REG_SET *) data;
566 if (TREE_CODE (decl) == VAR_DECL)
568 if (DECL_HARD_REGISTER (decl)
569 && REG_P (DECL_RTL (decl))
570 && REGNO (DECL_RTL (decl)) < FIRST_PSEUDO_REGISTER)
572 rtx reg = DECL_RTL (decl);
574 if (overlaps_hard_reg_set_p (*regs, GET_MODE (reg), REGNO (reg)))
575 return decl;
577 walk_subtrees = 0;
579 else if (TYPE_P (decl) || TREE_CODE (decl) == PARM_DECL)
580 walk_subtrees = 0;
581 return NULL_TREE;
584 /* If there is an overlap between *REGS and DECL, return the first overlap
585 found. */
586 tree
587 tree_overlaps_hard_reg_set (tree decl, HARD_REG_SET *regs)
589 return walk_tree (&decl, decl_overlaps_hard_reg_set_p, regs, NULL);
592 /* Check for overlap between registers marked in CLOBBERED_REGS and
593 anything inappropriate in T. Emit error and return the register
594 variable definition for error, NULL_TREE for ok. */
596 static bool
597 tree_conflicts_with_clobbers_p (tree t, HARD_REG_SET *clobbered_regs)
599 /* Conflicts between asm-declared register variables and the clobber
600 list are not allowed. */
601 tree overlap = tree_overlaps_hard_reg_set (t, clobbered_regs);
603 if (overlap)
605 error ("asm-specifier for variable %qE conflicts with asm clobber list",
606 DECL_NAME (overlap));
608 /* Reset registerness to stop multiple errors emitted for a single
609 variable. */
610 DECL_REGISTER (overlap) = 0;
611 return true;
614 return false;
617 /* Generate RTL for an asm statement with arguments.
618 STRING is the instruction template.
619 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
620 Each output or input has an expression in the TREE_VALUE and
621 a tree list in TREE_PURPOSE which in turn contains a constraint
622 name in TREE_VALUE (or NULL_TREE) and a constraint string
623 in TREE_PURPOSE.
624 CLOBBERS is a list of STRING_CST nodes each naming a hard register
625 that is clobbered by this insn.
627 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
628 Some elements of OUTPUTS may be replaced with trees representing temporary
629 values. The caller should copy those temporary values to the originally
630 specified lvalues.
632 VOL nonzero means the insn is volatile; don't optimize it. */
634 static void
635 expand_asm_operands (tree string, tree outputs, tree inputs,
636 tree clobbers, tree labels, int vol, location_t locus)
638 rtvec argvec, constraintvec, labelvec;
639 rtx body;
640 int ninputs = list_length (inputs);
641 int noutputs = list_length (outputs);
642 int nlabels = list_length (labels);
643 int ninout;
644 int nclobbers;
645 HARD_REG_SET clobbered_regs;
646 int clobber_conflict_found = 0;
647 tree tail;
648 tree t;
649 int i;
650 /* Vector of RTX's of evaluated output operands. */
651 rtx *output_rtx = XALLOCAVEC (rtx, noutputs);
652 int *inout_opnum = XALLOCAVEC (int, noutputs);
653 rtx *real_output_rtx = XALLOCAVEC (rtx, noutputs);
654 enum machine_mode *inout_mode = XALLOCAVEC (enum machine_mode, noutputs);
655 const char **constraints = XALLOCAVEC (const char *, noutputs + ninputs);
656 int old_generating_concat_p = generating_concat_p;
658 /* An ASM with no outputs needs to be treated as volatile, for now. */
659 if (noutputs == 0)
660 vol = 1;
662 if (! check_operand_nalternatives (outputs, inputs))
663 return;
665 string = resolve_asm_operand_names (string, outputs, inputs, labels);
667 /* Collect constraints. */
668 i = 0;
669 for (t = outputs; t ; t = TREE_CHAIN (t), i++)
670 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
671 for (t = inputs; t ; t = TREE_CHAIN (t), i++)
672 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
674 /* Sometimes we wish to automatically clobber registers across an asm.
675 Case in point is when the i386 backend moved from cc0 to a hard reg --
676 maintaining source-level compatibility means automatically clobbering
677 the flags register. */
678 clobbers = targetm.md_asm_clobbers (outputs, inputs, clobbers);
680 /* Count the number of meaningful clobbered registers, ignoring what
681 we would ignore later. */
682 nclobbers = 0;
683 CLEAR_HARD_REG_SET (clobbered_regs);
684 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
686 const char *regname;
688 if (TREE_VALUE (tail) == error_mark_node)
689 return;
690 regname = TREE_STRING_POINTER (TREE_VALUE (tail));
692 i = decode_reg_name (regname);
693 if (i >= 0 || i == -4)
694 ++nclobbers;
695 else if (i == -2)
696 error ("unknown register name %qs in %<asm%>", regname);
698 /* Mark clobbered registers. */
699 if (i >= 0)
701 /* Clobbering the PIC register is an error. */
702 if (i == (int) PIC_OFFSET_TABLE_REGNUM)
704 error ("PIC register %qs clobbered in %<asm%>", regname);
705 return;
708 SET_HARD_REG_BIT (clobbered_regs, i);
712 /* First pass over inputs and outputs checks validity and sets
713 mark_addressable if needed. */
715 ninout = 0;
716 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
718 tree val = TREE_VALUE (tail);
719 tree type = TREE_TYPE (val);
720 const char *constraint;
721 bool is_inout;
722 bool allows_reg;
723 bool allows_mem;
725 /* If there's an erroneous arg, emit no insn. */
726 if (type == error_mark_node)
727 return;
729 /* Try to parse the output constraint. If that fails, there's
730 no point in going further. */
731 constraint = constraints[i];
732 if (!parse_output_constraint (&constraint, i, ninputs, noutputs,
733 &allows_mem, &allows_reg, &is_inout))
734 return;
736 if (! allows_reg
737 && (allows_mem
738 || is_inout
739 || (DECL_P (val)
740 && REG_P (DECL_RTL (val))
741 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type))))
742 mark_addressable (val);
744 if (is_inout)
745 ninout++;
748 ninputs += ninout;
749 if (ninputs + noutputs > MAX_RECOG_OPERANDS)
751 error ("more than %d operands in %<asm%>", MAX_RECOG_OPERANDS);
752 return;
755 for (i = 0, tail = inputs; tail; i++, tail = TREE_CHAIN (tail))
757 bool allows_reg, allows_mem;
758 const char *constraint;
760 /* If there's an erroneous arg, emit no insn, because the ASM_INPUT
761 would get VOIDmode and that could cause a crash in reload. */
762 if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
763 return;
765 constraint = constraints[i + noutputs];
766 if (! parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
767 constraints, &allows_mem, &allows_reg))
768 return;
770 if (! allows_reg && allows_mem)
771 mark_addressable (TREE_VALUE (tail));
774 /* Second pass evaluates arguments. */
776 ninout = 0;
777 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
779 tree val = TREE_VALUE (tail);
780 tree type = TREE_TYPE (val);
781 bool is_inout;
782 bool allows_reg;
783 bool allows_mem;
784 rtx op;
785 bool ok;
787 ok = parse_output_constraint (&constraints[i], i, ninputs,
788 noutputs, &allows_mem, &allows_reg,
789 &is_inout);
790 gcc_assert (ok);
792 /* If an output operand is not a decl or indirect ref and our constraint
793 allows a register, make a temporary to act as an intermediate.
794 Make the asm insn write into that, then our caller will copy it to
795 the real output operand. Likewise for promoted variables. */
797 generating_concat_p = 0;
799 real_output_rtx[i] = NULL_RTX;
800 if ((TREE_CODE (val) == INDIRECT_REF
801 && allows_mem)
802 || (DECL_P (val)
803 && (allows_mem || REG_P (DECL_RTL (val)))
804 && ! (REG_P (DECL_RTL (val))
805 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
806 || ! allows_reg
807 || is_inout)
809 op = expand_expr (val, NULL_RTX, VOIDmode, EXPAND_WRITE);
810 if (MEM_P (op))
811 op = validize_mem (op);
813 if (! allows_reg && !MEM_P (op))
814 error ("output number %d not directly addressable", i);
815 if ((! allows_mem && MEM_P (op))
816 || GET_CODE (op) == CONCAT)
818 real_output_rtx[i] = op;
819 op = gen_reg_rtx (GET_MODE (op));
820 if (is_inout)
821 emit_move_insn (op, real_output_rtx[i]);
824 else
826 op = assign_temp (type, 0, 0, 1);
827 op = validize_mem (op);
828 if (!MEM_P (op) && TREE_CODE (TREE_VALUE (tail)) == SSA_NAME)
829 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (TREE_VALUE (tail)), op);
830 TREE_VALUE (tail) = make_tree (type, op);
832 output_rtx[i] = op;
834 generating_concat_p = old_generating_concat_p;
836 if (is_inout)
838 inout_mode[ninout] = TYPE_MODE (type);
839 inout_opnum[ninout++] = i;
842 if (tree_conflicts_with_clobbers_p (val, &clobbered_regs))
843 clobber_conflict_found = 1;
846 /* Make vectors for the expression-rtx, constraint strings,
847 and named operands. */
849 argvec = rtvec_alloc (ninputs);
850 constraintvec = rtvec_alloc (ninputs);
851 labelvec = rtvec_alloc (nlabels);
853 body = gen_rtx_ASM_OPERANDS ((noutputs == 0 ? VOIDmode
854 : GET_MODE (output_rtx[0])),
855 ggc_strdup (TREE_STRING_POINTER (string)),
856 empty_string, 0, argvec, constraintvec,
857 labelvec, locus);
859 MEM_VOLATILE_P (body) = vol;
861 /* Eval the inputs and put them into ARGVEC.
862 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
864 for (i = 0, tail = inputs; tail; tail = TREE_CHAIN (tail), ++i)
866 bool allows_reg, allows_mem;
867 const char *constraint;
868 tree val, type;
869 rtx op;
870 bool ok;
872 constraint = constraints[i + noutputs];
873 ok = parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
874 constraints, &allows_mem, &allows_reg);
875 gcc_assert (ok);
877 generating_concat_p = 0;
879 val = TREE_VALUE (tail);
880 type = TREE_TYPE (val);
881 /* EXPAND_INITIALIZER will not generate code for valid initializer
882 constants, but will still generate code for other types of operand.
883 This is the behavior we want for constant constraints. */
884 op = expand_expr (val, NULL_RTX, VOIDmode,
885 allows_reg ? EXPAND_NORMAL
886 : allows_mem ? EXPAND_MEMORY
887 : EXPAND_INITIALIZER);
889 /* Never pass a CONCAT to an ASM. */
890 if (GET_CODE (op) == CONCAT)
891 op = force_reg (GET_MODE (op), op);
892 else if (MEM_P (op))
893 op = validize_mem (op);
895 if (asm_operand_ok (op, constraint, NULL) <= 0)
897 if (allows_reg && TYPE_MODE (type) != BLKmode)
898 op = force_reg (TYPE_MODE (type), op);
899 else if (!allows_mem)
900 warning (0, "asm operand %d probably doesn%'t match constraints",
901 i + noutputs);
902 else if (MEM_P (op))
904 /* We won't recognize either volatile memory or memory
905 with a queued address as available a memory_operand
906 at this point. Ignore it: clearly this *is* a memory. */
908 else
910 warning (0, "use of memory input without lvalue in "
911 "asm operand %d is deprecated", i + noutputs);
913 if (CONSTANT_P (op))
915 rtx mem = force_const_mem (TYPE_MODE (type), op);
916 if (mem)
917 op = validize_mem (mem);
918 else
919 op = force_reg (TYPE_MODE (type), op);
921 if (REG_P (op)
922 || GET_CODE (op) == SUBREG
923 || GET_CODE (op) == CONCAT)
925 tree qual_type = build_qualified_type (type,
926 (TYPE_QUALS (type)
927 | TYPE_QUAL_CONST));
928 rtx memloc = assign_temp (qual_type, 1, 1, 1);
929 memloc = validize_mem (memloc);
930 emit_move_insn (memloc, op);
931 op = memloc;
936 generating_concat_p = old_generating_concat_p;
937 ASM_OPERANDS_INPUT (body, i) = op;
939 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, i)
940 = gen_rtx_ASM_INPUT (TYPE_MODE (type),
941 ggc_strdup (constraints[i + noutputs]));
943 if (tree_conflicts_with_clobbers_p (val, &clobbered_regs))
944 clobber_conflict_found = 1;
947 /* Protect all the operands from the queue now that they have all been
948 evaluated. */
950 generating_concat_p = 0;
952 /* For in-out operands, copy output rtx to input rtx. */
953 for (i = 0; i < ninout; i++)
955 int j = inout_opnum[i];
956 char buffer[16];
958 ASM_OPERANDS_INPUT (body, ninputs - ninout + i)
959 = output_rtx[j];
961 sprintf (buffer, "%d", j);
962 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, ninputs - ninout + i)
963 = gen_rtx_ASM_INPUT (inout_mode[i], ggc_strdup (buffer));
966 /* Copy labels to the vector. */
967 for (i = 0, tail = labels; i < nlabels; ++i, tail = TREE_CHAIN (tail))
968 ASM_OPERANDS_LABEL (body, i)
969 = gen_rtx_LABEL_REF (Pmode, label_rtx (TREE_VALUE (tail)));
971 generating_concat_p = old_generating_concat_p;
973 /* Now, for each output, construct an rtx
974 (set OUTPUT (asm_operands INSN OUTPUTCONSTRAINT OUTPUTNUMBER
975 ARGVEC CONSTRAINTS OPNAMES))
976 If there is more than one, put them inside a PARALLEL. */
978 if (nlabels > 0 && nclobbers == 0)
980 gcc_assert (noutputs == 0);
981 emit_jump_insn (body);
983 else if (noutputs == 0 && nclobbers == 0)
985 /* No output operands: put in a raw ASM_OPERANDS rtx. */
986 emit_insn (body);
988 else if (noutputs == 1 && nclobbers == 0)
990 ASM_OPERANDS_OUTPUT_CONSTRAINT (body) = ggc_strdup (constraints[0]);
991 emit_insn (gen_rtx_SET (VOIDmode, output_rtx[0], body));
993 else
995 rtx obody = body;
996 int num = noutputs;
998 if (num == 0)
999 num = 1;
1001 body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
1003 /* For each output operand, store a SET. */
1004 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1006 XVECEXP (body, 0, i)
1007 = gen_rtx_SET (VOIDmode,
1008 output_rtx[i],
1009 gen_rtx_ASM_OPERANDS
1010 (GET_MODE (output_rtx[i]),
1011 ggc_strdup (TREE_STRING_POINTER (string)),
1012 ggc_strdup (constraints[i]),
1013 i, argvec, constraintvec, labelvec, locus));
1015 MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
1018 /* If there are no outputs (but there are some clobbers)
1019 store the bare ASM_OPERANDS into the PARALLEL. */
1021 if (i == 0)
1022 XVECEXP (body, 0, i++) = obody;
1024 /* Store (clobber REG) for each clobbered register specified. */
1026 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1028 const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1029 int j = decode_reg_name (regname);
1030 rtx clobbered_reg;
1032 if (j < 0)
1034 if (j == -3) /* `cc', which is not a register */
1035 continue;
1037 if (j == -4) /* `memory', don't cache memory across asm */
1039 XVECEXP (body, 0, i++)
1040 = gen_rtx_CLOBBER (VOIDmode,
1041 gen_rtx_MEM
1042 (BLKmode,
1043 gen_rtx_SCRATCH (VOIDmode)));
1044 continue;
1047 /* Ignore unknown register, error already signaled. */
1048 continue;
1051 /* Use QImode since that's guaranteed to clobber just one reg. */
1052 clobbered_reg = gen_rtx_REG (QImode, j);
1054 /* Do sanity check for overlap between clobbers and respectively
1055 input and outputs that hasn't been handled. Such overlap
1056 should have been detected and reported above. */
1057 if (!clobber_conflict_found)
1059 int opno;
1061 /* We test the old body (obody) contents to avoid tripping
1062 over the under-construction body. */
1063 for (opno = 0; opno < noutputs; opno++)
1064 if (reg_overlap_mentioned_p (clobbered_reg, output_rtx[opno]))
1065 internal_error ("asm clobber conflict with output operand");
1067 for (opno = 0; opno < ninputs - ninout; opno++)
1068 if (reg_overlap_mentioned_p (clobbered_reg,
1069 ASM_OPERANDS_INPUT (obody, opno)))
1070 internal_error ("asm clobber conflict with input operand");
1073 XVECEXP (body, 0, i++)
1074 = gen_rtx_CLOBBER (VOIDmode, clobbered_reg);
1077 if (nlabels > 0)
1078 emit_jump_insn (body);
1079 else
1080 emit_insn (body);
1083 /* For any outputs that needed reloading into registers, spill them
1084 back to where they belong. */
1085 for (i = 0; i < noutputs; ++i)
1086 if (real_output_rtx[i])
1087 emit_move_insn (real_output_rtx[i], output_rtx[i]);
1089 crtl->has_asm_statement = 1;
1090 free_temp_slots ();
1093 void
1094 expand_asm_stmt (gimple stmt)
1096 int noutputs;
1097 tree outputs, tail, t;
1098 tree *o;
1099 size_t i, n;
1100 const char *s;
1101 tree str, out, in, cl, labels;
1103 /* Meh... convert the gimple asm operands into real tree lists.
1104 Eventually we should make all routines work on the vectors instead
1105 of relying on TREE_CHAIN. */
1106 out = NULL_TREE;
1107 n = gimple_asm_noutputs (stmt);
1108 if (n > 0)
1110 t = out = gimple_asm_output_op (stmt, 0);
1111 for (i = 1; i < n; i++)
1112 t = TREE_CHAIN (t) = gimple_asm_output_op (stmt, i);
1115 in = NULL_TREE;
1116 n = gimple_asm_ninputs (stmt);
1117 if (n > 0)
1119 t = in = gimple_asm_input_op (stmt, 0);
1120 for (i = 1; i < n; i++)
1121 t = TREE_CHAIN (t) = gimple_asm_input_op (stmt, i);
1124 cl = NULL_TREE;
1125 n = gimple_asm_nclobbers (stmt);
1126 if (n > 0)
1128 t = cl = gimple_asm_clobber_op (stmt, 0);
1129 for (i = 1; i < n; i++)
1130 t = TREE_CHAIN (t) = gimple_asm_clobber_op (stmt, i);
1133 labels = NULL_TREE;
1134 n = gimple_asm_nlabels (stmt);
1135 if (n > 0)
1137 t = labels = gimple_asm_label_op (stmt, 0);
1138 for (i = 1; i < n; i++)
1139 t = TREE_CHAIN (t) = gimple_asm_label_op (stmt, i);
1142 s = gimple_asm_string (stmt);
1143 str = build_string (strlen (s), s);
1145 if (gimple_asm_input_p (stmt))
1147 expand_asm_loc (str, gimple_asm_volatile_p (stmt), input_location);
1148 return;
1151 outputs = out;
1152 noutputs = gimple_asm_noutputs (stmt);
1153 /* o[I] is the place that output number I should be written. */
1154 o = (tree *) alloca (noutputs * sizeof (tree));
1156 /* Record the contents of OUTPUTS before it is modified. */
1157 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1158 o[i] = TREE_VALUE (tail);
1160 /* Generate the ASM_OPERANDS insn; store into the TREE_VALUEs of
1161 OUTPUTS some trees for where the values were actually stored. */
1162 expand_asm_operands (str, outputs, in, cl, labels,
1163 gimple_asm_volatile_p (stmt), input_location);
1165 /* Copy all the intermediate outputs into the specified outputs. */
1166 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1168 if (o[i] != TREE_VALUE (tail))
1170 expand_assignment (o[i], TREE_VALUE (tail), false);
1171 free_temp_slots ();
1173 /* Restore the original value so that it's correct the next
1174 time we expand this function. */
1175 TREE_VALUE (tail) = o[i];
1180 /* A subroutine of expand_asm_operands. Check that all operands have
1181 the same number of alternatives. Return true if so. */
1183 static bool
1184 check_operand_nalternatives (tree outputs, tree inputs)
1186 if (outputs || inputs)
1188 tree tmp = TREE_PURPOSE (outputs ? outputs : inputs);
1189 int nalternatives
1190 = n_occurrences (',', TREE_STRING_POINTER (TREE_VALUE (tmp)));
1191 tree next = inputs;
1193 if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
1195 error ("too many alternatives in %<asm%>");
1196 return false;
1199 tmp = outputs;
1200 while (tmp)
1202 const char *constraint
1203 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (tmp)));
1205 if (n_occurrences (',', constraint) != nalternatives)
1207 error ("operand constraints for %<asm%> differ "
1208 "in number of alternatives");
1209 return false;
1212 if (TREE_CHAIN (tmp))
1213 tmp = TREE_CHAIN (tmp);
1214 else
1215 tmp = next, next = 0;
1219 return true;
1222 /* A subroutine of expand_asm_operands. Check that all operand names
1223 are unique. Return true if so. We rely on the fact that these names
1224 are identifiers, and so have been canonicalized by get_identifier,
1225 so all we need are pointer comparisons. */
1227 static bool
1228 check_unique_operand_names (tree outputs, tree inputs, tree labels)
1230 tree i, j;
1232 for (i = outputs; i ; i = TREE_CHAIN (i))
1234 tree i_name = TREE_PURPOSE (TREE_PURPOSE (i));
1235 if (! i_name)
1236 continue;
1238 for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
1239 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1240 goto failure;
1243 for (i = inputs; i ; i = TREE_CHAIN (i))
1245 tree i_name = TREE_PURPOSE (TREE_PURPOSE (i));
1246 if (! i_name)
1247 continue;
1249 for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
1250 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1251 goto failure;
1252 for (j = outputs; j ; j = TREE_CHAIN (j))
1253 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1254 goto failure;
1257 for (i = labels; i ; i = TREE_CHAIN (i))
1259 tree i_name = TREE_PURPOSE (i);
1260 if (! i_name)
1261 continue;
1263 for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
1264 if (simple_cst_equal (i_name, TREE_PURPOSE (j)))
1265 goto failure;
1266 for (j = inputs; j ; j = TREE_CHAIN (j))
1267 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1268 goto failure;
1271 return true;
1273 failure:
1274 error ("duplicate asm operand name %qs",
1275 TREE_STRING_POINTER (TREE_PURPOSE (TREE_PURPOSE (i))));
1276 return false;
1279 /* A subroutine of expand_asm_operands. Resolve the names of the operands
1280 in *POUTPUTS and *PINPUTS to numbers, and replace the name expansions in
1281 STRING and in the constraints to those numbers. */
1283 tree
1284 resolve_asm_operand_names (tree string, tree outputs, tree inputs, tree labels)
1286 char *buffer;
1287 char *p;
1288 const char *c;
1289 tree t;
1291 check_unique_operand_names (outputs, inputs, labels);
1293 /* Substitute [<name>] in input constraint strings. There should be no
1294 named operands in output constraints. */
1295 for (t = inputs; t ; t = TREE_CHAIN (t))
1297 c = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
1298 if (strchr (c, '[') != NULL)
1300 p = buffer = xstrdup (c);
1301 while ((p = strchr (p, '[')) != NULL)
1302 p = resolve_operand_name_1 (p, outputs, inputs, NULL);
1303 TREE_VALUE (TREE_PURPOSE (t))
1304 = build_string (strlen (buffer), buffer);
1305 free (buffer);
1309 /* Now check for any needed substitutions in the template. */
1310 c = TREE_STRING_POINTER (string);
1311 while ((c = strchr (c, '%')) != NULL)
1313 if (c[1] == '[')
1314 break;
1315 else if (ISALPHA (c[1]) && c[2] == '[')
1316 break;
1317 else
1319 c += 1;
1320 continue;
1324 if (c)
1326 /* OK, we need to make a copy so we can perform the substitutions.
1327 Assume that we will not need extra space--we get to remove '['
1328 and ']', which means we cannot have a problem until we have more
1329 than 999 operands. */
1330 buffer = xstrdup (TREE_STRING_POINTER (string));
1331 p = buffer + (c - TREE_STRING_POINTER (string));
1333 while ((p = strchr (p, '%')) != NULL)
1335 if (p[1] == '[')
1336 p += 1;
1337 else if (ISALPHA (p[1]) && p[2] == '[')
1338 p += 2;
1339 else
1341 p += 1;
1342 continue;
1345 p = resolve_operand_name_1 (p, outputs, inputs, labels);
1348 string = build_string (strlen (buffer), buffer);
1349 free (buffer);
1352 return string;
1355 /* A subroutine of resolve_operand_names. P points to the '[' for a
1356 potential named operand of the form [<name>]. In place, replace
1357 the name and brackets with a number. Return a pointer to the
1358 balance of the string after substitution. */
1360 static char *
1361 resolve_operand_name_1 (char *p, tree outputs, tree inputs, tree labels)
1363 char *q;
1364 int op;
1365 tree t;
1367 /* Collect the operand name. */
1368 q = strchr (++p, ']');
1369 if (!q)
1371 error ("missing close brace for named operand");
1372 return strchr (p, '\0');
1374 *q = '\0';
1376 /* Resolve the name to a number. */
1377 for (op = 0, t = outputs; t ; t = TREE_CHAIN (t), op++)
1379 tree name = TREE_PURPOSE (TREE_PURPOSE (t));
1380 if (name && strcmp (TREE_STRING_POINTER (name), p) == 0)
1381 goto found;
1383 for (t = inputs; t ; t = TREE_CHAIN (t), op++)
1385 tree name = TREE_PURPOSE (TREE_PURPOSE (t));
1386 if (name && strcmp (TREE_STRING_POINTER (name), p) == 0)
1387 goto found;
1389 for (t = labels; t ; t = TREE_CHAIN (t), op++)
1391 tree name = TREE_PURPOSE (t);
1392 if (name && strcmp (TREE_STRING_POINTER (name), p) == 0)
1393 goto found;
1396 error ("undefined named operand %qs", identifier_to_locale (p));
1397 op = 0;
1399 found:
1400 /* Replace the name with the number. Unfortunately, not all libraries
1401 get the return value of sprintf correct, so search for the end of the
1402 generated string by hand. */
1403 sprintf (--p, "%d", op);
1404 p = strchr (p, '\0');
1406 /* Verify the no extra buffer space assumption. */
1407 gcc_assert (p <= q);
1409 /* Shift the rest of the buffer down to fill the gap. */
1410 memmove (p, q + 1, strlen (q + 1) + 1);
1412 return p;
1415 /* Generate RTL to evaluate the expression EXP. */
1417 void
1418 expand_expr_stmt (tree exp)
1420 rtx value;
1421 tree type;
1423 value = expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
1424 type = TREE_TYPE (exp);
1426 /* If all we do is reference a volatile value in memory,
1427 copy it to a register to be sure it is actually touched. */
1428 if (value && MEM_P (value) && TREE_THIS_VOLATILE (exp))
1430 if (TYPE_MODE (type) == VOIDmode)
1432 else if (TYPE_MODE (type) != BLKmode)
1433 value = copy_to_reg (value);
1434 else
1436 rtx lab = gen_label_rtx ();
1438 /* Compare the value with itself to reference it. */
1439 emit_cmp_and_jump_insns (value, value, EQ,
1440 expand_normal (TYPE_SIZE (type)),
1441 BLKmode, 0, lab);
1442 emit_label (lab);
1446 /* Free any temporaries used to evaluate this expression. */
1447 free_temp_slots ();
1450 /* Warn if EXP contains any computations whose results are not used.
1451 Return 1 if a warning is printed; 0 otherwise. LOCUS is the
1452 (potential) location of the expression. */
1455 warn_if_unused_value (const_tree exp, location_t locus)
1457 restart:
1458 if (TREE_USED (exp) || TREE_NO_WARNING (exp))
1459 return 0;
1461 /* Don't warn about void constructs. This includes casting to void,
1462 void function calls, and statement expressions with a final cast
1463 to void. */
1464 if (VOID_TYPE_P (TREE_TYPE (exp)))
1465 return 0;
1467 if (EXPR_HAS_LOCATION (exp))
1468 locus = EXPR_LOCATION (exp);
1470 switch (TREE_CODE (exp))
1472 case PREINCREMENT_EXPR:
1473 case POSTINCREMENT_EXPR:
1474 case PREDECREMENT_EXPR:
1475 case POSTDECREMENT_EXPR:
1476 case MODIFY_EXPR:
1477 case INIT_EXPR:
1478 case TARGET_EXPR:
1479 case CALL_EXPR:
1480 case TRY_CATCH_EXPR:
1481 case WITH_CLEANUP_EXPR:
1482 case EXIT_EXPR:
1483 case VA_ARG_EXPR:
1484 return 0;
1486 case BIND_EXPR:
1487 /* For a binding, warn if no side effect within it. */
1488 exp = BIND_EXPR_BODY (exp);
1489 goto restart;
1491 case SAVE_EXPR:
1492 case NON_LVALUE_EXPR:
1493 exp = TREE_OPERAND (exp, 0);
1494 goto restart;
1496 case TRUTH_ORIF_EXPR:
1497 case TRUTH_ANDIF_EXPR:
1498 /* In && or ||, warn if 2nd operand has no side effect. */
1499 exp = TREE_OPERAND (exp, 1);
1500 goto restart;
1502 case COMPOUND_EXPR:
1503 if (warn_if_unused_value (TREE_OPERAND (exp, 0), locus))
1504 return 1;
1505 /* Let people do `(foo (), 0)' without a warning. */
1506 if (TREE_CONSTANT (TREE_OPERAND (exp, 1)))
1507 return 0;
1508 exp = TREE_OPERAND (exp, 1);
1509 goto restart;
1511 case COND_EXPR:
1512 /* If this is an expression with side effects, don't warn; this
1513 case commonly appears in macro expansions. */
1514 if (TREE_SIDE_EFFECTS (exp))
1515 return 0;
1516 goto warn;
1518 case INDIRECT_REF:
1519 /* Don't warn about automatic dereferencing of references, since
1520 the user cannot control it. */
1521 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == REFERENCE_TYPE)
1523 exp = TREE_OPERAND (exp, 0);
1524 goto restart;
1526 /* Fall through. */
1528 default:
1529 /* Referencing a volatile value is a side effect, so don't warn. */
1530 if ((DECL_P (exp) || REFERENCE_CLASS_P (exp))
1531 && TREE_THIS_VOLATILE (exp))
1532 return 0;
1534 /* If this is an expression which has no operands, there is no value
1535 to be unused. There are no such language-independent codes,
1536 but front ends may define such. */
1537 if (EXPRESSION_CLASS_P (exp) && TREE_OPERAND_LENGTH (exp) == 0)
1538 return 0;
1540 warn:
1541 warning_at (locus, OPT_Wunused_value, "value computed is not used");
1542 return 1;
1547 /* Generate RTL to return from the current function, with no value.
1548 (That is, we do not do anything about returning any value.) */
1550 void
1551 expand_null_return (void)
1553 /* If this function was declared to return a value, but we
1554 didn't, clobber the return registers so that they are not
1555 propagated live to the rest of the function. */
1556 clobber_return_register ();
1558 expand_null_return_1 ();
1561 /* Generate RTL to return directly from the current function.
1562 (That is, we bypass any return value.) */
1564 void
1565 expand_naked_return (void)
1567 rtx end_label;
1569 clear_pending_stack_adjust ();
1570 do_pending_stack_adjust ();
1572 end_label = naked_return_label;
1573 if (end_label == 0)
1574 end_label = naked_return_label = gen_label_rtx ();
1576 emit_jump (end_label);
1579 /* Generate RTL to return from the current function, with value VAL. */
1581 static void
1582 expand_value_return (rtx val)
1584 /* Copy the value to the return location unless it's already there. */
1586 tree decl = DECL_RESULT (current_function_decl);
1587 rtx return_reg = DECL_RTL (decl);
1588 if (return_reg != val)
1590 tree funtype = TREE_TYPE (current_function_decl);
1591 tree type = TREE_TYPE (decl);
1592 int unsignedp = TYPE_UNSIGNED (type);
1593 enum machine_mode old_mode = DECL_MODE (decl);
1594 enum machine_mode mode = promote_function_mode (type, old_mode,
1595 &unsignedp, funtype, 1);
1597 if (mode != old_mode)
1598 val = convert_modes (mode, old_mode, val, unsignedp);
1600 if (GET_CODE (return_reg) == PARALLEL)
1601 emit_group_load (return_reg, val, type, int_size_in_bytes (type));
1602 else
1603 emit_move_insn (return_reg, val);
1606 expand_null_return_1 ();
1609 /* Output a return with no value. */
1611 static void
1612 expand_null_return_1 (void)
1614 clear_pending_stack_adjust ();
1615 do_pending_stack_adjust ();
1616 emit_jump (return_label);
1619 /* Generate RTL to evaluate the expression RETVAL and return it
1620 from the current function. */
1622 void
1623 expand_return (tree retval)
1625 rtx result_rtl;
1626 rtx val = 0;
1627 tree retval_rhs;
1629 /* If function wants no value, give it none. */
1630 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
1632 expand_normal (retval);
1633 expand_null_return ();
1634 return;
1637 if (retval == error_mark_node)
1639 /* Treat this like a return of no value from a function that
1640 returns a value. */
1641 expand_null_return ();
1642 return;
1644 else if ((TREE_CODE (retval) == MODIFY_EXPR
1645 || TREE_CODE (retval) == INIT_EXPR)
1646 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
1647 retval_rhs = TREE_OPERAND (retval, 1);
1648 else
1649 retval_rhs = retval;
1651 result_rtl = DECL_RTL (DECL_RESULT (current_function_decl));
1653 /* If we are returning the RESULT_DECL, then the value has already
1654 been stored into it, so we don't have to do anything special. */
1655 if (TREE_CODE (retval_rhs) == RESULT_DECL)
1656 expand_value_return (result_rtl);
1658 /* If the result is an aggregate that is being returned in one (or more)
1659 registers, load the registers here. The compiler currently can't handle
1660 copying a BLKmode value into registers. We could put this code in a
1661 more general area (for use by everyone instead of just function
1662 call/return), but until this feature is generally usable it is kept here
1663 (and in expand_call). */
1665 else if (retval_rhs != 0
1666 && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
1667 && REG_P (result_rtl))
1669 int i;
1670 unsigned HOST_WIDE_INT bitpos, xbitpos;
1671 unsigned HOST_WIDE_INT padding_correction = 0;
1672 unsigned HOST_WIDE_INT bytes
1673 = int_size_in_bytes (TREE_TYPE (retval_rhs));
1674 int n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1675 unsigned int bitsize
1676 = MIN (TYPE_ALIGN (TREE_TYPE (retval_rhs)), BITS_PER_WORD);
1677 rtx *result_pseudos = XALLOCAVEC (rtx, n_regs);
1678 rtx result_reg, src = NULL_RTX, dst = NULL_RTX;
1679 rtx result_val = expand_normal (retval_rhs);
1680 enum machine_mode tmpmode, result_reg_mode;
1682 if (bytes == 0)
1684 expand_null_return ();
1685 return;
1688 /* If the structure doesn't take up a whole number of words, see
1689 whether the register value should be padded on the left or on
1690 the right. Set PADDING_CORRECTION to the number of padding
1691 bits needed on the left side.
1693 In most ABIs, the structure will be returned at the least end of
1694 the register, which translates to right padding on little-endian
1695 targets and left padding on big-endian targets. The opposite
1696 holds if the structure is returned at the most significant
1697 end of the register. */
1698 if (bytes % UNITS_PER_WORD != 0
1699 && (targetm.calls.return_in_msb (TREE_TYPE (retval_rhs))
1700 ? !BYTES_BIG_ENDIAN
1701 : BYTES_BIG_ENDIAN))
1702 padding_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
1703 * BITS_PER_UNIT));
1705 /* Copy the structure BITSIZE bits at a time. */
1706 for (bitpos = 0, xbitpos = padding_correction;
1707 bitpos < bytes * BITS_PER_UNIT;
1708 bitpos += bitsize, xbitpos += bitsize)
1710 /* We need a new destination pseudo each time xbitpos is
1711 on a word boundary and when xbitpos == padding_correction
1712 (the first time through). */
1713 if (xbitpos % BITS_PER_WORD == 0
1714 || xbitpos == padding_correction)
1716 /* Generate an appropriate register. */
1717 dst = gen_reg_rtx (word_mode);
1718 result_pseudos[xbitpos / BITS_PER_WORD] = dst;
1720 /* Clear the destination before we move anything into it. */
1721 emit_move_insn (dst, CONST0_RTX (GET_MODE (dst)));
1724 /* We need a new source operand each time bitpos is on a word
1725 boundary. */
1726 if (bitpos % BITS_PER_WORD == 0)
1727 src = operand_subword_force (result_val,
1728 bitpos / BITS_PER_WORD,
1729 BLKmode);
1731 /* Use bitpos for the source extraction (left justified) and
1732 xbitpos for the destination store (right justified). */
1733 store_bit_field (dst, bitsize, xbitpos % BITS_PER_WORD, word_mode,
1734 extract_bit_field (src, bitsize,
1735 bitpos % BITS_PER_WORD, 1,
1736 NULL_RTX, word_mode, word_mode));
1739 tmpmode = GET_MODE (result_rtl);
1740 if (tmpmode == BLKmode)
1742 /* Find the smallest integer mode large enough to hold the
1743 entire structure and use that mode instead of BLKmode
1744 on the USE insn for the return register. */
1745 for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1746 tmpmode != VOIDmode;
1747 tmpmode = GET_MODE_WIDER_MODE (tmpmode))
1748 /* Have we found a large enough mode? */
1749 if (GET_MODE_SIZE (tmpmode) >= bytes)
1750 break;
1752 /* A suitable mode should have been found. */
1753 gcc_assert (tmpmode != VOIDmode);
1755 PUT_MODE (result_rtl, tmpmode);
1758 if (GET_MODE_SIZE (tmpmode) < GET_MODE_SIZE (word_mode))
1759 result_reg_mode = word_mode;
1760 else
1761 result_reg_mode = tmpmode;
1762 result_reg = gen_reg_rtx (result_reg_mode);
1764 for (i = 0; i < n_regs; i++)
1765 emit_move_insn (operand_subword (result_reg, i, 0, result_reg_mode),
1766 result_pseudos[i]);
1768 if (tmpmode != result_reg_mode)
1769 result_reg = gen_lowpart (tmpmode, result_reg);
1771 expand_value_return (result_reg);
1773 else if (retval_rhs != 0
1774 && !VOID_TYPE_P (TREE_TYPE (retval_rhs))
1775 && (REG_P (result_rtl)
1776 || (GET_CODE (result_rtl) == PARALLEL)))
1778 /* Calculate the return value into a temporary (usually a pseudo
1779 reg). */
1780 tree ot = TREE_TYPE (DECL_RESULT (current_function_decl));
1781 tree nt = build_qualified_type (ot, TYPE_QUALS (ot) | TYPE_QUAL_CONST);
1783 val = assign_temp (nt, 0, 0, 1);
1784 val = expand_expr (retval_rhs, val, GET_MODE (val), EXPAND_NORMAL);
1785 val = force_not_mem (val);
1786 /* Return the calculated value. */
1787 expand_value_return (val);
1789 else
1791 /* No hard reg used; calculate value into hard return reg. */
1792 expand_expr (retval, const0_rtx, VOIDmode, EXPAND_NORMAL);
1793 expand_value_return (result_rtl);
1797 /* Emit code to restore vital registers at the beginning of a nonlocal goto
1798 handler. */
1799 static void
1800 expand_nl_goto_receiver (void)
1802 rtx chain;
1804 /* Clobber the FP when we get here, so we have to make sure it's
1805 marked as used by this function. */
1806 emit_use (hard_frame_pointer_rtx);
1808 /* Mark the static chain as clobbered here so life information
1809 doesn't get messed up for it. */
1810 chain = targetm.calls.static_chain (current_function_decl, true);
1811 if (chain && REG_P (chain))
1812 emit_clobber (chain);
1814 #ifdef HAVE_nonlocal_goto
1815 if (! HAVE_nonlocal_goto)
1816 #endif
1817 /* First adjust our frame pointer to its actual value. It was
1818 previously set to the start of the virtual area corresponding to
1819 the stacked variables when we branched here and now needs to be
1820 adjusted to the actual hardware fp value.
1822 Assignments are to virtual registers are converted by
1823 instantiate_virtual_regs into the corresponding assignment
1824 to the underlying register (fp in this case) that makes
1825 the original assignment true.
1826 So the following insn will actually be
1827 decrementing fp by STARTING_FRAME_OFFSET. */
1828 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
1830 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
1831 if (fixed_regs[ARG_POINTER_REGNUM])
1833 #ifdef ELIMINABLE_REGS
1834 /* If the argument pointer can be eliminated in favor of the
1835 frame pointer, we don't need to restore it. We assume here
1836 that if such an elimination is present, it can always be used.
1837 This is the case on all known machines; if we don't make this
1838 assumption, we do unnecessary saving on many machines. */
1839 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
1840 size_t i;
1842 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
1843 if (elim_regs[i].from == ARG_POINTER_REGNUM
1844 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
1845 break;
1847 if (i == ARRAY_SIZE (elim_regs))
1848 #endif
1850 /* Now restore our arg pointer from the address at which it
1851 was saved in our stack frame. */
1852 emit_move_insn (crtl->args.internal_arg_pointer,
1853 copy_to_reg (get_arg_pointer_save_area ()));
1856 #endif
1858 #ifdef HAVE_nonlocal_goto_receiver
1859 if (HAVE_nonlocal_goto_receiver)
1860 emit_insn (gen_nonlocal_goto_receiver ());
1861 #endif
1863 /* We must not allow the code we just generated to be reordered by
1864 scheduling. Specifically, the update of the frame pointer must
1865 happen immediately, not later. */
1866 emit_insn (gen_blockage ());
1869 /* Generate RTL for the automatic variable declaration DECL.
1870 (Other kinds of declarations are simply ignored if seen here.) */
1872 void
1873 expand_decl (tree decl)
1875 tree type;
1877 type = TREE_TYPE (decl);
1879 /* For a CONST_DECL, set mode, alignment, and sizes from those of the
1880 type in case this node is used in a reference. */
1881 if (TREE_CODE (decl) == CONST_DECL)
1883 DECL_MODE (decl) = TYPE_MODE (type);
1884 DECL_ALIGN (decl) = TYPE_ALIGN (type);
1885 DECL_SIZE (decl) = TYPE_SIZE (type);
1886 DECL_SIZE_UNIT (decl) = TYPE_SIZE_UNIT (type);
1887 return;
1890 /* Otherwise, only automatic variables need any expansion done. Static and
1891 external variables, and external functions, will be handled by
1892 `assemble_variable' (called from finish_decl). TYPE_DECL requires
1893 nothing. PARM_DECLs are handled in `assign_parms'. */
1894 if (TREE_CODE (decl) != VAR_DECL)
1895 return;
1897 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
1898 return;
1900 /* Create the RTL representation for the variable. */
1902 if (type == error_mark_node)
1903 SET_DECL_RTL (decl, gen_rtx_MEM (BLKmode, const0_rtx));
1905 else if (DECL_SIZE (decl) == 0)
1907 /* Variable with incomplete type. */
1908 rtx x;
1909 if (DECL_INITIAL (decl) == 0)
1910 /* Error message was already done; now avoid a crash. */
1911 x = gen_rtx_MEM (BLKmode, const0_rtx);
1912 else
1913 /* An initializer is going to decide the size of this array.
1914 Until we know the size, represent its address with a reg. */
1915 x = gen_rtx_MEM (BLKmode, gen_reg_rtx (Pmode));
1917 set_mem_attributes (x, decl, 1);
1918 SET_DECL_RTL (decl, x);
1920 else if (use_register_for_decl (decl))
1922 /* Automatic variable that can go in a register. */
1923 enum machine_mode reg_mode = promote_decl_mode (decl, NULL);
1925 SET_DECL_RTL (decl, gen_reg_rtx (reg_mode));
1927 /* Note if the object is a user variable. */
1928 if (!DECL_ARTIFICIAL (decl))
1929 mark_user_reg (DECL_RTL (decl));
1931 if (POINTER_TYPE_P (type))
1932 mark_reg_pointer (DECL_RTL (decl),
1933 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (decl))));
1936 else
1938 rtx oldaddr = 0;
1939 rtx addr;
1940 rtx x;
1942 /* Variable-sized decls are dealt with in the gimplifier. */
1943 gcc_assert (TREE_CODE (DECL_SIZE_UNIT (decl)) == INTEGER_CST);
1945 /* If we previously made RTL for this decl, it must be an array
1946 whose size was determined by the initializer.
1947 The old address was a register; set that register now
1948 to the proper address. */
1949 if (DECL_RTL_SET_P (decl))
1951 gcc_assert (MEM_P (DECL_RTL (decl)));
1952 gcc_assert (REG_P (XEXP (DECL_RTL (decl), 0)));
1953 oldaddr = XEXP (DECL_RTL (decl), 0);
1956 /* Set alignment we actually gave this decl. */
1957 DECL_ALIGN (decl) = (DECL_MODE (decl) == BLKmode ? BIGGEST_ALIGNMENT
1958 : GET_MODE_BITSIZE (DECL_MODE (decl)));
1959 DECL_USER_ALIGN (decl) = 0;
1961 x = assign_temp (decl, 1, 1, 1);
1962 set_mem_attributes (x, decl, 1);
1963 SET_DECL_RTL (decl, x);
1965 if (oldaddr)
1967 addr = force_operand (XEXP (DECL_RTL (decl), 0), oldaddr);
1968 if (addr != oldaddr)
1969 emit_move_insn (oldaddr, addr);
1974 /* Emit code to save the current value of stack. */
1976 expand_stack_save (void)
1978 rtx ret = NULL_RTX;
1980 do_pending_stack_adjust ();
1981 emit_stack_save (SAVE_BLOCK, &ret, NULL_RTX);
1982 return ret;
1985 /* Emit code to restore the current value of stack. */
1986 void
1987 expand_stack_restore (tree var)
1989 rtx sa = expand_normal (var);
1991 sa = convert_memory_address (Pmode, sa);
1992 emit_stack_restore (SAVE_BLOCK, sa, NULL_RTX);
1995 /* Do the insertion of a case label into case_list. The labels are
1996 fed to us in descending order from the sorted vector of case labels used
1997 in the tree part of the middle end. So the list we construct is
1998 sorted in ascending order. The bounds on the case range, LOW and HIGH,
1999 are converted to case's index type TYPE. */
2001 static struct case_node *
2002 add_case_node (struct case_node *head, tree type, tree low, tree high,
2003 tree label, alloc_pool case_node_pool)
2005 tree min_value, max_value;
2006 struct case_node *r;
2008 gcc_assert (TREE_CODE (low) == INTEGER_CST);
2009 gcc_assert (!high || TREE_CODE (high) == INTEGER_CST);
2011 min_value = TYPE_MIN_VALUE (type);
2012 max_value = TYPE_MAX_VALUE (type);
2014 /* If there's no HIGH value, then this is not a case range; it's
2015 just a simple case label. But that's just a degenerate case
2016 range.
2017 If the bounds are equal, turn this into the one-value case. */
2018 if (!high || tree_int_cst_equal (low, high))
2020 /* If the simple case value is unreachable, ignore it. */
2021 if ((TREE_CODE (min_value) == INTEGER_CST
2022 && tree_int_cst_compare (low, min_value) < 0)
2023 || (TREE_CODE (max_value) == INTEGER_CST
2024 && tree_int_cst_compare (low, max_value) > 0))
2025 return head;
2026 low = fold_convert (type, low);
2027 high = low;
2029 else
2031 /* If the entire case range is unreachable, ignore it. */
2032 if ((TREE_CODE (min_value) == INTEGER_CST
2033 && tree_int_cst_compare (high, min_value) < 0)
2034 || (TREE_CODE (max_value) == INTEGER_CST
2035 && tree_int_cst_compare (low, max_value) > 0))
2036 return head;
2038 /* If the lower bound is less than the index type's minimum
2039 value, truncate the range bounds. */
2040 if (TREE_CODE (min_value) == INTEGER_CST
2041 && tree_int_cst_compare (low, min_value) < 0)
2042 low = min_value;
2043 low = fold_convert (type, low);
2045 /* If the upper bound is greater than the index type's maximum
2046 value, truncate the range bounds. */
2047 if (TREE_CODE (max_value) == INTEGER_CST
2048 && tree_int_cst_compare (high, max_value) > 0)
2049 high = max_value;
2050 high = fold_convert (type, high);
2054 /* Add this label to the chain. Make sure to drop overflow flags. */
2055 r = (struct case_node *) pool_alloc (case_node_pool);
2056 r->low = build_int_cst_wide (TREE_TYPE (low), TREE_INT_CST_LOW (low),
2057 TREE_INT_CST_HIGH (low));
2058 r->high = build_int_cst_wide (TREE_TYPE (high), TREE_INT_CST_LOW (high),
2059 TREE_INT_CST_HIGH (high));
2060 r->code_label = label;
2061 r->parent = r->left = NULL;
2062 r->right = head;
2063 return r;
2066 /* Maximum number of case bit tests. */
2067 #define MAX_CASE_BIT_TESTS 3
2069 /* By default, enable case bit tests on targets with ashlsi3. */
2070 #ifndef CASE_USE_BIT_TESTS
2071 #define CASE_USE_BIT_TESTS (optab_handler (ashl_optab, word_mode)->insn_code \
2072 != CODE_FOR_nothing)
2073 #endif
2076 /* A case_bit_test represents a set of case nodes that may be
2077 selected from using a bit-wise comparison. HI and LO hold
2078 the integer to be tested against, LABEL contains the label
2079 to jump to upon success and BITS counts the number of case
2080 nodes handled by this test, typically the number of bits
2081 set in HI:LO. */
2083 struct case_bit_test
2085 HOST_WIDE_INT hi;
2086 HOST_WIDE_INT lo;
2087 rtx label;
2088 int bits;
2091 /* Determine whether "1 << x" is relatively cheap in word_mode. */
2093 static
2094 bool lshift_cheap_p (void)
2096 static bool init = false;
2097 static bool cheap = true;
2099 if (!init)
2101 rtx reg = gen_rtx_REG (word_mode, 10000);
2102 int cost = rtx_cost (gen_rtx_ASHIFT (word_mode, const1_rtx, reg), SET,
2103 optimize_insn_for_speed_p ());
2104 cheap = cost < COSTS_N_INSNS (3);
2105 init = true;
2108 return cheap;
2111 /* Comparison function for qsort to order bit tests by decreasing
2112 number of case nodes, i.e. the node with the most cases gets
2113 tested first. */
2115 static int
2116 case_bit_test_cmp (const void *p1, const void *p2)
2118 const struct case_bit_test *const d1 = (const struct case_bit_test *) p1;
2119 const struct case_bit_test *const d2 = (const struct case_bit_test *) p2;
2121 if (d2->bits != d1->bits)
2122 return d2->bits - d1->bits;
2124 /* Stabilize the sort. */
2125 return CODE_LABEL_NUMBER (d2->label) - CODE_LABEL_NUMBER (d1->label);
2128 /* Expand a switch statement by a short sequence of bit-wise
2129 comparisons. "switch(x)" is effectively converted into
2130 "if ((1 << (x-MINVAL)) & CST)" where CST and MINVAL are
2131 integer constants.
2133 INDEX_EXPR is the value being switched on, which is of
2134 type INDEX_TYPE. MINVAL is the lowest case value of in
2135 the case nodes, of INDEX_TYPE type, and RANGE is highest
2136 value minus MINVAL, also of type INDEX_TYPE. NODES is
2137 the set of case nodes, and DEFAULT_LABEL is the label to
2138 branch to should none of the cases match.
2140 There *MUST* be MAX_CASE_BIT_TESTS or less unique case
2141 node targets. */
2143 static void
2144 emit_case_bit_tests (tree index_type, tree index_expr, tree minval,
2145 tree range, case_node_ptr nodes, rtx default_label)
2147 struct case_bit_test test[MAX_CASE_BIT_TESTS];
2148 enum machine_mode mode;
2149 rtx expr, index, label;
2150 unsigned int i,j,lo,hi;
2151 struct case_node *n;
2152 unsigned int count;
2154 count = 0;
2155 for (n = nodes; n; n = n->right)
2157 label = label_rtx (n->code_label);
2158 for (i = 0; i < count; i++)
2159 if (label == test[i].label)
2160 break;
2162 if (i == count)
2164 gcc_assert (count < MAX_CASE_BIT_TESTS);
2165 test[i].hi = 0;
2166 test[i].lo = 0;
2167 test[i].label = label;
2168 test[i].bits = 1;
2169 count++;
2171 else
2172 test[i].bits++;
2174 lo = tree_low_cst (fold_build2 (MINUS_EXPR, index_type,
2175 n->low, minval), 1);
2176 hi = tree_low_cst (fold_build2 (MINUS_EXPR, index_type,
2177 n->high, minval), 1);
2178 for (j = lo; j <= hi; j++)
2179 if (j >= HOST_BITS_PER_WIDE_INT)
2180 test[i].hi |= (HOST_WIDE_INT) 1 << (j - HOST_BITS_PER_INT);
2181 else
2182 test[i].lo |= (HOST_WIDE_INT) 1 << j;
2185 qsort (test, count, sizeof(*test), case_bit_test_cmp);
2187 index_expr = fold_build2 (MINUS_EXPR, index_type,
2188 fold_convert (index_type, index_expr),
2189 fold_convert (index_type, minval));
2190 index = expand_normal (index_expr);
2191 do_pending_stack_adjust ();
2193 mode = TYPE_MODE (index_type);
2194 expr = expand_normal (range);
2195 if (default_label)
2196 emit_cmp_and_jump_insns (index, expr, GTU, NULL_RTX, mode, 1,
2197 default_label);
2199 index = convert_to_mode (word_mode, index, 0);
2200 index = expand_binop (word_mode, ashl_optab, const1_rtx,
2201 index, NULL_RTX, 1, OPTAB_WIDEN);
2203 for (i = 0; i < count; i++)
2205 expr = immed_double_const (test[i].lo, test[i].hi, word_mode);
2206 expr = expand_binop (word_mode, and_optab, index, expr,
2207 NULL_RTX, 1, OPTAB_WIDEN);
2208 emit_cmp_and_jump_insns (expr, const0_rtx, NE, NULL_RTX,
2209 word_mode, 1, test[i].label);
2212 if (default_label)
2213 emit_jump (default_label);
2216 #ifndef HAVE_casesi
2217 #define HAVE_casesi 0
2218 #endif
2220 #ifndef HAVE_tablejump
2221 #define HAVE_tablejump 0
2222 #endif
2224 /* Terminate a case (Pascal/Ada) or switch (C) statement
2225 in which ORIG_INDEX is the expression to be tested.
2226 If ORIG_TYPE is not NULL, it is the original ORIG_INDEX
2227 type as given in the source before any compiler conversions.
2228 Generate the code to test it and jump to the right place. */
2230 void
2231 expand_case (gimple stmt)
2233 tree minval = NULL_TREE, maxval = NULL_TREE, range = NULL_TREE;
2234 rtx default_label = 0;
2235 struct case_node *n;
2236 unsigned int count, uniq;
2237 rtx index;
2238 rtx table_label;
2239 int ncases;
2240 rtx *labelvec;
2241 int i;
2242 rtx before_case, end, lab;
2244 tree index_expr = gimple_switch_index (stmt);
2245 tree index_type = TREE_TYPE (index_expr);
2246 int unsignedp = TYPE_UNSIGNED (index_type);
2248 /* The insn after which the case dispatch should finally
2249 be emitted. Zero for a dummy. */
2250 rtx start;
2252 /* A list of case labels; it is first built as a list and it may then
2253 be rearranged into a nearly balanced binary tree. */
2254 struct case_node *case_list = 0;
2256 /* Label to jump to if no case matches. */
2257 tree default_label_decl = NULL_TREE;
2259 alloc_pool case_node_pool = create_alloc_pool ("struct case_node pool",
2260 sizeof (struct case_node),
2261 100);
2263 do_pending_stack_adjust ();
2265 /* An ERROR_MARK occurs for various reasons including invalid data type. */
2266 if (index_type != error_mark_node)
2268 tree elt;
2269 bitmap label_bitmap;
2270 int stopi = 0;
2272 /* cleanup_tree_cfg removes all SWITCH_EXPR with their index
2273 expressions being INTEGER_CST. */
2274 gcc_assert (TREE_CODE (index_expr) != INTEGER_CST);
2276 /* The default case, if ever taken, is the first element. */
2277 elt = gimple_switch_label (stmt, 0);
2278 if (!CASE_LOW (elt) && !CASE_HIGH (elt))
2280 default_label_decl = CASE_LABEL (elt);
2281 stopi = 1;
2284 for (i = gimple_switch_num_labels (stmt) - 1; i >= stopi; --i)
2286 tree low, high;
2287 elt = gimple_switch_label (stmt, i);
2289 low = CASE_LOW (elt);
2290 gcc_assert (low);
2291 high = CASE_HIGH (elt);
2293 /* Discard empty ranges. */
2294 if (high && tree_int_cst_lt (high, low))
2295 continue;
2297 case_list = add_case_node (case_list, index_type, low, high,
2298 CASE_LABEL (elt), case_node_pool);
2302 before_case = start = get_last_insn ();
2303 if (default_label_decl)
2304 default_label = label_rtx (default_label_decl);
2306 /* Get upper and lower bounds of case values. */
2308 uniq = 0;
2309 count = 0;
2310 label_bitmap = BITMAP_ALLOC (NULL);
2311 for (n = case_list; n; n = n->right)
2313 /* Count the elements and track the largest and smallest
2314 of them (treating them as signed even if they are not). */
2315 if (count++ == 0)
2317 minval = n->low;
2318 maxval = n->high;
2320 else
2322 if (tree_int_cst_lt (n->low, minval))
2323 minval = n->low;
2324 if (tree_int_cst_lt (maxval, n->high))
2325 maxval = n->high;
2327 /* A range counts double, since it requires two compares. */
2328 if (! tree_int_cst_equal (n->low, n->high))
2329 count++;
2331 /* If we have not seen this label yet, then increase the
2332 number of unique case node targets seen. */
2333 lab = label_rtx (n->code_label);
2334 if (!bitmap_bit_p (label_bitmap, CODE_LABEL_NUMBER (lab)))
2336 bitmap_set_bit (label_bitmap, CODE_LABEL_NUMBER (lab));
2337 uniq++;
2341 BITMAP_FREE (label_bitmap);
2343 /* cleanup_tree_cfg removes all SWITCH_EXPR with a single
2344 destination, such as one with a default case only. However,
2345 it doesn't remove cases that are out of range for the switch
2346 type, so we may still get a zero here. */
2347 if (count == 0)
2349 if (default_label)
2350 emit_jump (default_label);
2351 free_alloc_pool (case_node_pool);
2352 return;
2355 /* Compute span of values. */
2356 range = fold_build2 (MINUS_EXPR, index_type, maxval, minval);
2358 /* Try implementing this switch statement by a short sequence of
2359 bit-wise comparisons. However, we let the binary-tree case
2360 below handle constant index expressions. */
2361 if (CASE_USE_BIT_TESTS
2362 && ! TREE_CONSTANT (index_expr)
2363 && compare_tree_int (range, GET_MODE_BITSIZE (word_mode)) < 0
2364 && compare_tree_int (range, 0) > 0
2365 && lshift_cheap_p ()
2366 && ((uniq == 1 && count >= 3)
2367 || (uniq == 2 && count >= 5)
2368 || (uniq == 3 && count >= 6)))
2370 /* Optimize the case where all the case values fit in a
2371 word without having to subtract MINVAL. In this case,
2372 we can optimize away the subtraction. */
2373 if (compare_tree_int (minval, 0) > 0
2374 && compare_tree_int (maxval, GET_MODE_BITSIZE (word_mode)) < 0)
2376 minval = build_int_cst (index_type, 0);
2377 range = maxval;
2379 emit_case_bit_tests (index_type, index_expr, minval, range,
2380 case_list, default_label);
2383 /* If range of values is much bigger than number of values,
2384 make a sequence of conditional branches instead of a dispatch.
2385 If the switch-index is a constant, do it this way
2386 because we can optimize it. */
2388 else if (count < targetm.case_values_threshold ()
2389 || compare_tree_int (range,
2390 (optimize_insn_for_size_p () ? 3 : 10) * count) > 0
2391 /* RANGE may be signed, and really large ranges will show up
2392 as negative numbers. */
2393 || compare_tree_int (range, 0) < 0
2394 #ifndef ASM_OUTPUT_ADDR_DIFF_ELT
2395 || flag_pic
2396 #endif
2397 || !flag_jump_tables
2398 || TREE_CONSTANT (index_expr)
2399 /* If neither casesi or tablejump is available, we can
2400 only go this way. */
2401 || (!HAVE_casesi && !HAVE_tablejump))
2403 index = expand_normal (index_expr);
2405 /* If the index is a short or char that we do not have
2406 an insn to handle comparisons directly, convert it to
2407 a full integer now, rather than letting each comparison
2408 generate the conversion. */
2410 if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT
2411 && ! have_insn_for (COMPARE, GET_MODE (index)))
2413 enum machine_mode wider_mode;
2414 for (wider_mode = GET_MODE (index); wider_mode != VOIDmode;
2415 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
2416 if (have_insn_for (COMPARE, wider_mode))
2418 index = convert_to_mode (wider_mode, index, unsignedp);
2419 break;
2423 do_pending_stack_adjust ();
2425 if (MEM_P (index))
2426 index = copy_to_reg (index);
2428 /* We generate a binary decision tree to select the
2429 appropriate target code. This is done as follows:
2431 The list of cases is rearranged into a binary tree,
2432 nearly optimal assuming equal probability for each case.
2434 The tree is transformed into RTL, eliminating
2435 redundant test conditions at the same time.
2437 If program flow could reach the end of the
2438 decision tree an unconditional jump to the
2439 default code is emitted. */
2441 use_cost_table = estimate_case_costs (case_list);
2442 balance_case_nodes (&case_list, NULL);
2443 emit_case_nodes (index, case_list, default_label, index_type);
2444 if (default_label)
2445 emit_jump (default_label);
2447 else
2449 rtx fallback_label = label_rtx (case_list->code_label);
2450 table_label = gen_label_rtx ();
2451 if (! try_casesi (index_type, index_expr, minval, range,
2452 table_label, default_label, fallback_label))
2454 bool ok;
2456 /* Index jumptables from zero for suitable values of
2457 minval to avoid a subtraction. */
2458 if (optimize_insn_for_speed_p ()
2459 && compare_tree_int (minval, 0) > 0
2460 && compare_tree_int (minval, 3) < 0)
2462 minval = build_int_cst (index_type, 0);
2463 range = maxval;
2466 ok = try_tablejump (index_type, index_expr, minval, range,
2467 table_label, default_label);
2468 gcc_assert (ok);
2471 /* Get table of labels to jump to, in order of case index. */
2473 ncases = tree_low_cst (range, 0) + 1;
2474 labelvec = XALLOCAVEC (rtx, ncases);
2475 memset (labelvec, 0, ncases * sizeof (rtx));
2477 for (n = case_list; n; n = n->right)
2479 /* Compute the low and high bounds relative to the minimum
2480 value since that should fit in a HOST_WIDE_INT while the
2481 actual values may not. */
2482 HOST_WIDE_INT i_low
2483 = tree_low_cst (fold_build2 (MINUS_EXPR, index_type,
2484 n->low, minval), 1);
2485 HOST_WIDE_INT i_high
2486 = tree_low_cst (fold_build2 (MINUS_EXPR, index_type,
2487 n->high, minval), 1);
2488 HOST_WIDE_INT i;
2490 for (i = i_low; i <= i_high; i ++)
2491 labelvec[i]
2492 = gen_rtx_LABEL_REF (Pmode, label_rtx (n->code_label));
2495 /* Fill in the gaps with the default. We may have gaps at
2496 the beginning if we tried to avoid the minval subtraction,
2497 so substitute some label even if the default label was
2498 deemed unreachable. */
2499 if (!default_label)
2500 default_label = fallback_label;
2501 for (i = 0; i < ncases; i++)
2502 if (labelvec[i] == 0)
2503 labelvec[i] = gen_rtx_LABEL_REF (Pmode, default_label);
2505 /* Output the table. */
2506 emit_label (table_label);
2508 if (CASE_VECTOR_PC_RELATIVE || flag_pic)
2509 emit_jump_insn (gen_rtx_ADDR_DIFF_VEC (CASE_VECTOR_MODE,
2510 gen_rtx_LABEL_REF (Pmode, table_label),
2511 gen_rtvec_v (ncases, labelvec),
2512 const0_rtx, const0_rtx));
2513 else
2514 emit_jump_insn (gen_rtx_ADDR_VEC (CASE_VECTOR_MODE,
2515 gen_rtvec_v (ncases, labelvec)));
2517 /* Record no drop-through after the table. */
2518 emit_barrier ();
2521 before_case = NEXT_INSN (before_case);
2522 end = get_last_insn ();
2523 reorder_insns (before_case, end, start);
2526 free_temp_slots ();
2527 free_alloc_pool (case_node_pool);
2530 /* Generate code to jump to LABEL if OP0 and OP1 are equal in mode MODE. */
2532 static void
2533 do_jump_if_equal (enum machine_mode mode, rtx op0, rtx op1, rtx label,
2534 int unsignedp)
2536 do_compare_rtx_and_jump (op0, op1, EQ, unsignedp, mode,
2537 NULL_RTX, NULL_RTX, label);
2540 /* Not all case values are encountered equally. This function
2541 uses a heuristic to weight case labels, in cases where that
2542 looks like a reasonable thing to do.
2544 Right now, all we try to guess is text, and we establish the
2545 following weights:
2547 chars above space: 16
2548 digits: 16
2549 default: 12
2550 space, punct: 8
2551 tab: 4
2552 newline: 2
2553 other "\" chars: 1
2554 remaining chars: 0
2556 If we find any cases in the switch that are not either -1 or in the range
2557 of valid ASCII characters, or are control characters other than those
2558 commonly used with "\", don't treat this switch scanning text.
2560 Return 1 if these nodes are suitable for cost estimation, otherwise
2561 return 0. */
2563 static int
2564 estimate_case_costs (case_node_ptr node)
2566 tree min_ascii = integer_minus_one_node;
2567 tree max_ascii = build_int_cst (TREE_TYPE (node->high), 127);
2568 case_node_ptr n;
2569 int i;
2571 /* If we haven't already made the cost table, make it now. Note that the
2572 lower bound of the table is -1, not zero. */
2574 if (! cost_table_initialized)
2576 cost_table_initialized = 1;
2578 for (i = 0; i < 128; i++)
2580 if (ISALNUM (i))
2581 COST_TABLE (i) = 16;
2582 else if (ISPUNCT (i))
2583 COST_TABLE (i) = 8;
2584 else if (ISCNTRL (i))
2585 COST_TABLE (i) = -1;
2588 COST_TABLE (' ') = 8;
2589 COST_TABLE ('\t') = 4;
2590 COST_TABLE ('\0') = 4;
2591 COST_TABLE ('\n') = 2;
2592 COST_TABLE ('\f') = 1;
2593 COST_TABLE ('\v') = 1;
2594 COST_TABLE ('\b') = 1;
2597 /* See if all the case expressions look like text. It is text if the
2598 constant is >= -1 and the highest constant is <= 127. Do all comparisons
2599 as signed arithmetic since we don't want to ever access cost_table with a
2600 value less than -1. Also check that none of the constants in a range
2601 are strange control characters. */
2603 for (n = node; n; n = n->right)
2605 if (tree_int_cst_lt (n->low, min_ascii)
2606 || tree_int_cst_lt (max_ascii, n->high))
2607 return 0;
2609 for (i = (HOST_WIDE_INT) TREE_INT_CST_LOW (n->low);
2610 i <= (HOST_WIDE_INT) TREE_INT_CST_LOW (n->high); i++)
2611 if (COST_TABLE (i) < 0)
2612 return 0;
2615 /* All interesting values are within the range of interesting
2616 ASCII characters. */
2617 return 1;
2620 /* Take an ordered list of case nodes
2621 and transform them into a near optimal binary tree,
2622 on the assumption that any target code selection value is as
2623 likely as any other.
2625 The transformation is performed by splitting the ordered
2626 list into two equal sections plus a pivot. The parts are
2627 then attached to the pivot as left and right branches. Each
2628 branch is then transformed recursively. */
2630 static void
2631 balance_case_nodes (case_node_ptr *head, case_node_ptr parent)
2633 case_node_ptr np;
2635 np = *head;
2636 if (np)
2638 int cost = 0;
2639 int i = 0;
2640 int ranges = 0;
2641 case_node_ptr *npp;
2642 case_node_ptr left;
2644 /* Count the number of entries on branch. Also count the ranges. */
2646 while (np)
2648 if (!tree_int_cst_equal (np->low, np->high))
2650 ranges++;
2651 if (use_cost_table)
2652 cost += COST_TABLE (TREE_INT_CST_LOW (np->high));
2655 if (use_cost_table)
2656 cost += COST_TABLE (TREE_INT_CST_LOW (np->low));
2658 i++;
2659 np = np->right;
2662 if (i > 2)
2664 /* Split this list if it is long enough for that to help. */
2665 npp = head;
2666 left = *npp;
2667 if (use_cost_table)
2669 /* Find the place in the list that bisects the list's total cost,
2670 Here I gets half the total cost. */
2671 int n_moved = 0;
2672 i = (cost + 1) / 2;
2673 while (1)
2675 /* Skip nodes while their cost does not reach that amount. */
2676 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
2677 i -= COST_TABLE (TREE_INT_CST_LOW ((*npp)->high));
2678 i -= COST_TABLE (TREE_INT_CST_LOW ((*npp)->low));
2679 if (i <= 0)
2680 break;
2681 npp = &(*npp)->right;
2682 n_moved += 1;
2684 if (n_moved == 0)
2686 /* Leave this branch lopsided, but optimize left-hand
2687 side and fill in `parent' fields for right-hand side. */
2688 np = *head;
2689 np->parent = parent;
2690 balance_case_nodes (&np->left, np);
2691 for (; np->right; np = np->right)
2692 np->right->parent = np;
2693 return;
2696 /* If there are just three nodes, split at the middle one. */
2697 else if (i == 3)
2698 npp = &(*npp)->right;
2699 else
2701 /* Find the place in the list that bisects the list's total cost,
2702 where ranges count as 2.
2703 Here I gets half the total cost. */
2704 i = (i + ranges + 1) / 2;
2705 while (1)
2707 /* Skip nodes while their cost does not reach that amount. */
2708 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
2709 i--;
2710 i--;
2711 if (i <= 0)
2712 break;
2713 npp = &(*npp)->right;
2716 *head = np = *npp;
2717 *npp = 0;
2718 np->parent = parent;
2719 np->left = left;
2721 /* Optimize each of the two split parts. */
2722 balance_case_nodes (&np->left, np);
2723 balance_case_nodes (&np->right, np);
2725 else
2727 /* Else leave this branch as one level,
2728 but fill in `parent' fields. */
2729 np = *head;
2730 np->parent = parent;
2731 for (; np->right; np = np->right)
2732 np->right->parent = np;
2737 /* Search the parent sections of the case node tree
2738 to see if a test for the lower bound of NODE would be redundant.
2739 INDEX_TYPE is the type of the index expression.
2741 The instructions to generate the case decision tree are
2742 output in the same order as nodes are processed so it is
2743 known that if a parent node checks the range of the current
2744 node minus one that the current node is bounded at its lower
2745 span. Thus the test would be redundant. */
2747 static int
2748 node_has_low_bound (case_node_ptr node, tree index_type)
2750 tree low_minus_one;
2751 case_node_ptr pnode;
2753 /* If the lower bound of this node is the lowest value in the index type,
2754 we need not test it. */
2756 if (tree_int_cst_equal (node->low, TYPE_MIN_VALUE (index_type)))
2757 return 1;
2759 /* If this node has a left branch, the value at the left must be less
2760 than that at this node, so it cannot be bounded at the bottom and
2761 we need not bother testing any further. */
2763 if (node->left)
2764 return 0;
2766 low_minus_one = fold_build2 (MINUS_EXPR, TREE_TYPE (node->low),
2767 node->low,
2768 build_int_cst (TREE_TYPE (node->low), 1));
2770 /* If the subtraction above overflowed, we can't verify anything.
2771 Otherwise, look for a parent that tests our value - 1. */
2773 if (! tree_int_cst_lt (low_minus_one, node->low))
2774 return 0;
2776 for (pnode = node->parent; pnode; pnode = pnode->parent)
2777 if (tree_int_cst_equal (low_minus_one, pnode->high))
2778 return 1;
2780 return 0;
2783 /* Search the parent sections of the case node tree
2784 to see if a test for the upper bound of NODE would be redundant.
2785 INDEX_TYPE is the type of the index expression.
2787 The instructions to generate the case decision tree are
2788 output in the same order as nodes are processed so it is
2789 known that if a parent node checks the range of the current
2790 node plus one that the current node is bounded at its upper
2791 span. Thus the test would be redundant. */
2793 static int
2794 node_has_high_bound (case_node_ptr node, tree index_type)
2796 tree high_plus_one;
2797 case_node_ptr pnode;
2799 /* If there is no upper bound, obviously no test is needed. */
2801 if (TYPE_MAX_VALUE (index_type) == NULL)
2802 return 1;
2804 /* If the upper bound of this node is the highest value in the type
2805 of the index expression, we need not test against it. */
2807 if (tree_int_cst_equal (node->high, TYPE_MAX_VALUE (index_type)))
2808 return 1;
2810 /* If this node has a right branch, the value at the right must be greater
2811 than that at this node, so it cannot be bounded at the top and
2812 we need not bother testing any further. */
2814 if (node->right)
2815 return 0;
2817 high_plus_one = fold_build2 (PLUS_EXPR, TREE_TYPE (node->high),
2818 node->high,
2819 build_int_cst (TREE_TYPE (node->high), 1));
2821 /* If the addition above overflowed, we can't verify anything.
2822 Otherwise, look for a parent that tests our value + 1. */
2824 if (! tree_int_cst_lt (node->high, high_plus_one))
2825 return 0;
2827 for (pnode = node->parent; pnode; pnode = pnode->parent)
2828 if (tree_int_cst_equal (high_plus_one, pnode->low))
2829 return 1;
2831 return 0;
2834 /* Search the parent sections of the
2835 case node tree to see if both tests for the upper and lower
2836 bounds of NODE would be redundant. */
2838 static int
2839 node_is_bounded (case_node_ptr node, tree index_type)
2841 return (node_has_low_bound (node, index_type)
2842 && node_has_high_bound (node, index_type));
2845 /* Emit step-by-step code to select a case for the value of INDEX.
2846 The thus generated decision tree follows the form of the
2847 case-node binary tree NODE, whose nodes represent test conditions.
2848 INDEX_TYPE is the type of the index of the switch.
2850 Care is taken to prune redundant tests from the decision tree
2851 by detecting any boundary conditions already checked by
2852 emitted rtx. (See node_has_high_bound, node_has_low_bound
2853 and node_is_bounded, above.)
2855 Where the test conditions can be shown to be redundant we emit
2856 an unconditional jump to the target code. As a further
2857 optimization, the subordinates of a tree node are examined to
2858 check for bounded nodes. In this case conditional and/or
2859 unconditional jumps as a result of the boundary check for the
2860 current node are arranged to target the subordinates associated
2861 code for out of bound conditions on the current node.
2863 We can assume that when control reaches the code generated here,
2864 the index value has already been compared with the parents
2865 of this node, and determined to be on the same side of each parent
2866 as this node is. Thus, if this node tests for the value 51,
2867 and a parent tested for 52, we don't need to consider
2868 the possibility of a value greater than 51. If another parent
2869 tests for the value 50, then this node need not test anything. */
2871 static void
2872 emit_case_nodes (rtx index, case_node_ptr node, rtx default_label,
2873 tree index_type)
2875 /* If INDEX has an unsigned type, we must make unsigned branches. */
2876 int unsignedp = TYPE_UNSIGNED (index_type);
2877 enum machine_mode mode = GET_MODE (index);
2878 enum machine_mode imode = TYPE_MODE (index_type);
2880 /* Handle indices detected as constant during RTL expansion. */
2881 if (mode == VOIDmode)
2882 mode = imode;
2884 /* See if our parents have already tested everything for us.
2885 If they have, emit an unconditional jump for this node. */
2886 if (node_is_bounded (node, index_type))
2887 emit_jump (label_rtx (node->code_label));
2889 else if (tree_int_cst_equal (node->low, node->high))
2891 /* Node is single valued. First see if the index expression matches
2892 this node and then check our children, if any. */
2894 do_jump_if_equal (mode, index,
2895 convert_modes (mode, imode,
2896 expand_normal (node->low),
2897 unsignedp),
2898 label_rtx (node->code_label), unsignedp);
2900 if (node->right != 0 && node->left != 0)
2902 /* This node has children on both sides.
2903 Dispatch to one side or the other
2904 by comparing the index value with this node's value.
2905 If one subtree is bounded, check that one first,
2906 so we can avoid real branches in the tree. */
2908 if (node_is_bounded (node->right, index_type))
2910 emit_cmp_and_jump_insns (index,
2911 convert_modes
2912 (mode, imode,
2913 expand_normal (node->high),
2914 unsignedp),
2915 GT, NULL_RTX, mode, unsignedp,
2916 label_rtx (node->right->code_label));
2917 emit_case_nodes (index, node->left, default_label, index_type);
2920 else if (node_is_bounded (node->left, index_type))
2922 emit_cmp_and_jump_insns (index,
2923 convert_modes
2924 (mode, imode,
2925 expand_normal (node->high),
2926 unsignedp),
2927 LT, NULL_RTX, mode, unsignedp,
2928 label_rtx (node->left->code_label));
2929 emit_case_nodes (index, node->right, default_label, index_type);
2932 /* If both children are single-valued cases with no
2933 children, finish up all the work. This way, we can save
2934 one ordered comparison. */
2935 else if (tree_int_cst_equal (node->right->low, node->right->high)
2936 && node->right->left == 0
2937 && node->right->right == 0
2938 && tree_int_cst_equal (node->left->low, node->left->high)
2939 && node->left->left == 0
2940 && node->left->right == 0)
2942 /* Neither node is bounded. First distinguish the two sides;
2943 then emit the code for one side at a time. */
2945 /* See if the value matches what the right hand side
2946 wants. */
2947 do_jump_if_equal (mode, index,
2948 convert_modes (mode, imode,
2949 expand_normal (node->right->low),
2950 unsignedp),
2951 label_rtx (node->right->code_label),
2952 unsignedp);
2954 /* See if the value matches what the left hand side
2955 wants. */
2956 do_jump_if_equal (mode, index,
2957 convert_modes (mode, imode,
2958 expand_normal (node->left->low),
2959 unsignedp),
2960 label_rtx (node->left->code_label),
2961 unsignedp);
2964 else
2966 /* Neither node is bounded. First distinguish the two sides;
2967 then emit the code for one side at a time. */
2969 tree test_label
2970 = build_decl (CURR_INSN_LOCATION,
2971 LABEL_DECL, NULL_TREE, NULL_TREE);
2973 /* See if the value is on the right. */
2974 emit_cmp_and_jump_insns (index,
2975 convert_modes
2976 (mode, imode,
2977 expand_normal (node->high),
2978 unsignedp),
2979 GT, NULL_RTX, mode, unsignedp,
2980 label_rtx (test_label));
2982 /* Value must be on the left.
2983 Handle the left-hand subtree. */
2984 emit_case_nodes (index, node->left, default_label, index_type);
2985 /* If left-hand subtree does nothing,
2986 go to default. */
2987 if (default_label)
2988 emit_jump (default_label);
2990 /* Code branches here for the right-hand subtree. */
2991 expand_label (test_label);
2992 emit_case_nodes (index, node->right, default_label, index_type);
2996 else if (node->right != 0 && node->left == 0)
2998 /* Here we have a right child but no left so we issue a conditional
2999 branch to default and process the right child.
3001 Omit the conditional branch to default if the right child
3002 does not have any children and is single valued; it would
3003 cost too much space to save so little time. */
3005 if (node->right->right || node->right->left
3006 || !tree_int_cst_equal (node->right->low, node->right->high))
3008 if (!node_has_low_bound (node, index_type))
3010 emit_cmp_and_jump_insns (index,
3011 convert_modes
3012 (mode, imode,
3013 expand_normal (node->high),
3014 unsignedp),
3015 LT, NULL_RTX, mode, unsignedp,
3016 default_label);
3019 emit_case_nodes (index, node->right, default_label, index_type);
3021 else
3022 /* We cannot process node->right normally
3023 since we haven't ruled out the numbers less than
3024 this node's value. So handle node->right explicitly. */
3025 do_jump_if_equal (mode, index,
3026 convert_modes
3027 (mode, imode,
3028 expand_normal (node->right->low),
3029 unsignedp),
3030 label_rtx (node->right->code_label), unsignedp);
3033 else if (node->right == 0 && node->left != 0)
3035 /* Just one subtree, on the left. */
3036 if (node->left->left || node->left->right
3037 || !tree_int_cst_equal (node->left->low, node->left->high))
3039 if (!node_has_high_bound (node, index_type))
3041 emit_cmp_and_jump_insns (index,
3042 convert_modes
3043 (mode, imode,
3044 expand_normal (node->high),
3045 unsignedp),
3046 GT, NULL_RTX, mode, unsignedp,
3047 default_label);
3050 emit_case_nodes (index, node->left, default_label, index_type);
3052 else
3053 /* We cannot process node->left normally
3054 since we haven't ruled out the numbers less than
3055 this node's value. So handle node->left explicitly. */
3056 do_jump_if_equal (mode, index,
3057 convert_modes
3058 (mode, imode,
3059 expand_normal (node->left->low),
3060 unsignedp),
3061 label_rtx (node->left->code_label), unsignedp);
3064 else
3066 /* Node is a range. These cases are very similar to those for a single
3067 value, except that we do not start by testing whether this node
3068 is the one to branch to. */
3070 if (node->right != 0 && node->left != 0)
3072 /* Node has subtrees on both sides.
3073 If the right-hand subtree is bounded,
3074 test for it first, since we can go straight there.
3075 Otherwise, we need to make a branch in the control structure,
3076 then handle the two subtrees. */
3077 tree test_label = 0;
3079 if (node_is_bounded (node->right, index_type))
3080 /* Right hand node is fully bounded so we can eliminate any
3081 testing and branch directly to the target code. */
3082 emit_cmp_and_jump_insns (index,
3083 convert_modes
3084 (mode, imode,
3085 expand_normal (node->high),
3086 unsignedp),
3087 GT, NULL_RTX, mode, unsignedp,
3088 label_rtx (node->right->code_label));
3089 else
3091 /* Right hand node requires testing.
3092 Branch to a label where we will handle it later. */
3094 test_label = build_decl (CURR_INSN_LOCATION,
3095 LABEL_DECL, NULL_TREE, NULL_TREE);
3096 emit_cmp_and_jump_insns (index,
3097 convert_modes
3098 (mode, imode,
3099 expand_normal (node->high),
3100 unsignedp),
3101 GT, NULL_RTX, mode, unsignedp,
3102 label_rtx (test_label));
3105 /* Value belongs to this node or to the left-hand subtree. */
3107 emit_cmp_and_jump_insns (index,
3108 convert_modes
3109 (mode, imode,
3110 expand_normal (node->low),
3111 unsignedp),
3112 GE, NULL_RTX, mode, unsignedp,
3113 label_rtx (node->code_label));
3115 /* Handle the left-hand subtree. */
3116 emit_case_nodes (index, node->left, default_label, index_type);
3118 /* If right node had to be handled later, do that now. */
3120 if (test_label)
3122 /* If the left-hand subtree fell through,
3123 don't let it fall into the right-hand subtree. */
3124 if (default_label)
3125 emit_jump (default_label);
3127 expand_label (test_label);
3128 emit_case_nodes (index, node->right, default_label, index_type);
3132 else if (node->right != 0 && node->left == 0)
3134 /* Deal with values to the left of this node,
3135 if they are possible. */
3136 if (!node_has_low_bound (node, index_type))
3138 emit_cmp_and_jump_insns (index,
3139 convert_modes
3140 (mode, imode,
3141 expand_normal (node->low),
3142 unsignedp),
3143 LT, NULL_RTX, mode, unsignedp,
3144 default_label);
3147 /* Value belongs to this node or to the right-hand subtree. */
3149 emit_cmp_and_jump_insns (index,
3150 convert_modes
3151 (mode, imode,
3152 expand_normal (node->high),
3153 unsignedp),
3154 LE, NULL_RTX, mode, unsignedp,
3155 label_rtx (node->code_label));
3157 emit_case_nodes (index, node->right, default_label, index_type);
3160 else if (node->right == 0 && node->left != 0)
3162 /* Deal with values to the right of this node,
3163 if they are possible. */
3164 if (!node_has_high_bound (node, index_type))
3166 emit_cmp_and_jump_insns (index,
3167 convert_modes
3168 (mode, imode,
3169 expand_normal (node->high),
3170 unsignedp),
3171 GT, NULL_RTX, mode, unsignedp,
3172 default_label);
3175 /* Value belongs to this node or to the left-hand subtree. */
3177 emit_cmp_and_jump_insns (index,
3178 convert_modes
3179 (mode, imode,
3180 expand_normal (node->low),
3181 unsignedp),
3182 GE, NULL_RTX, mode, unsignedp,
3183 label_rtx (node->code_label));
3185 emit_case_nodes (index, node->left, default_label, index_type);
3188 else
3190 /* Node has no children so we check low and high bounds to remove
3191 redundant tests. Only one of the bounds can exist,
3192 since otherwise this node is bounded--a case tested already. */
3193 int high_bound = node_has_high_bound (node, index_type);
3194 int low_bound = node_has_low_bound (node, index_type);
3196 if (!high_bound && low_bound)
3198 emit_cmp_and_jump_insns (index,
3199 convert_modes
3200 (mode, imode,
3201 expand_normal (node->high),
3202 unsignedp),
3203 GT, NULL_RTX, mode, unsignedp,
3204 default_label);
3207 else if (!low_bound && high_bound)
3209 emit_cmp_and_jump_insns (index,
3210 convert_modes
3211 (mode, imode,
3212 expand_normal (node->low),
3213 unsignedp),
3214 LT, NULL_RTX, mode, unsignedp,
3215 default_label);
3217 else if (!low_bound && !high_bound)
3219 /* Widen LOW and HIGH to the same width as INDEX. */
3220 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
3221 tree low = build1 (CONVERT_EXPR, type, node->low);
3222 tree high = build1 (CONVERT_EXPR, type, node->high);
3223 rtx low_rtx, new_index, new_bound;
3225 /* Instead of doing two branches, emit one unsigned branch for
3226 (index-low) > (high-low). */
3227 low_rtx = expand_expr (low, NULL_RTX, mode, EXPAND_NORMAL);
3228 new_index = expand_simple_binop (mode, MINUS, index, low_rtx,
3229 NULL_RTX, unsignedp,
3230 OPTAB_WIDEN);
3231 new_bound = expand_expr (fold_build2 (MINUS_EXPR, type,
3232 high, low),
3233 NULL_RTX, mode, EXPAND_NORMAL);
3235 emit_cmp_and_jump_insns (new_index, new_bound, GT, NULL_RTX,
3236 mode, 1, default_label);
3239 emit_jump (label_rtx (node->code_label));