Enable dumping of alias graphs.
[official-gcc/Ramakrishna.git] / gcc / stmt.c
blob23fdd08dd302f0ef96ce8360d9a71430503d348e
1 /* Expands front end tree to back end RTL for GCC
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /* This file handles the generation of rtl code from tree structure
23 above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
24 The functions whose names start with `expand_' are called by the
25 expander to generate RTL instructions for various kinds of constructs. */
27 #include "config.h"
28 #include "system.h"
29 #include "coretypes.h"
30 #include "tm.h"
32 #include "rtl.h"
33 #include "hard-reg-set.h"
34 #include "tree.h"
35 #include "tm_p.h"
36 #include "flags.h"
37 #include "except.h"
38 #include "function.h"
39 #include "insn-config.h"
40 #include "expr.h"
41 #include "libfuncs.h"
42 #include "recog.h"
43 #include "machmode.h"
44 #include "toplev.h"
45 #include "output.h"
46 #include "ggc.h"
47 #include "langhooks.h"
48 #include "predict.h"
49 #include "optabs.h"
50 #include "target.h"
51 #include "gimple.h"
52 #include "regs.h"
53 #include "alloc-pool.h"
54 #include "pretty-print.h"
56 /* Functions and data structures for expanding case statements. */
58 /* Case label structure, used to hold info on labels within case
59 statements. We handle "range" labels; for a single-value label
60 as in C, the high and low limits are the same.
62 We start with a vector of case nodes sorted in ascending order, and
63 the default label as the last element in the vector. Before expanding
64 to RTL, we transform this vector into a list linked via the RIGHT
65 fields in the case_node struct. Nodes with higher case values are
66 later in the list.
68 Switch statements can be output in three forms. A branch table is
69 used if there are more than a few labels and the labels are dense
70 within the range between the smallest and largest case value. If a
71 branch table is used, no further manipulations are done with the case
72 node chain.
74 The alternative to the use of a branch table is to generate a series
75 of compare and jump insns. When that is done, we use the LEFT, RIGHT,
76 and PARENT fields to hold a binary tree. Initially the tree is
77 totally unbalanced, with everything on the right. We balance the tree
78 with nodes on the left having lower case values than the parent
79 and nodes on the right having higher values. We then output the tree
80 in order.
82 For very small, suitable switch statements, we can generate a series
83 of simple bit test and branches instead. */
85 struct case_node
87 struct case_node *left; /* Left son in binary tree */
88 struct case_node *right; /* Right son in binary tree; also node chain */
89 struct case_node *parent; /* Parent of node in binary tree */
90 tree low; /* Lowest index value for this label */
91 tree high; /* Highest index value for this label */
92 tree code_label; /* Label to jump to when node matches */
95 typedef struct case_node case_node;
96 typedef struct case_node *case_node_ptr;
98 /* These are used by estimate_case_costs and balance_case_nodes. */
100 /* This must be a signed type, and non-ANSI compilers lack signed char. */
101 static short cost_table_[129];
102 static int use_cost_table;
103 static int cost_table_initialized;
105 /* Special care is needed because we allow -1, but TREE_INT_CST_LOW
106 is unsigned. */
107 #define COST_TABLE(I) cost_table_[(unsigned HOST_WIDE_INT) ((I) + 1)]
109 static int n_occurrences (int, const char *);
110 static bool tree_conflicts_with_clobbers_p (tree, HARD_REG_SET *);
111 static void expand_nl_goto_receiver (void);
112 static bool check_operand_nalternatives (tree, tree);
113 static bool check_unique_operand_names (tree, tree);
114 static char *resolve_operand_name_1 (char *, tree, tree);
115 static void expand_null_return_1 (void);
116 static void expand_value_return (rtx);
117 static int estimate_case_costs (case_node_ptr);
118 static bool lshift_cheap_p (void);
119 static int case_bit_test_cmp (const void *, const void *);
120 static void emit_case_bit_tests (tree, tree, tree, tree, case_node_ptr, rtx);
121 static void balance_case_nodes (case_node_ptr *, case_node_ptr);
122 static int node_has_low_bound (case_node_ptr, tree);
123 static int node_has_high_bound (case_node_ptr, tree);
124 static int node_is_bounded (case_node_ptr, tree);
125 static void emit_case_nodes (rtx, case_node_ptr, rtx, tree);
126 static struct case_node *add_case_node (struct case_node *, tree,
127 tree, tree, tree, alloc_pool);
130 /* Return the rtx-label that corresponds to a LABEL_DECL,
131 creating it if necessary. */
134 label_rtx (tree label)
136 gcc_assert (TREE_CODE (label) == LABEL_DECL);
138 if (!DECL_RTL_SET_P (label))
140 rtx r = gen_label_rtx ();
141 SET_DECL_RTL (label, r);
142 if (FORCED_LABEL (label) || DECL_NONLOCAL (label))
143 LABEL_PRESERVE_P (r) = 1;
146 return DECL_RTL (label);
149 /* As above, but also put it on the forced-reference list of the
150 function that contains it. */
152 force_label_rtx (tree label)
154 rtx ref = label_rtx (label);
155 tree function = decl_function_context (label);
157 gcc_assert (function);
159 forced_labels = gen_rtx_EXPR_LIST (VOIDmode, ref, forced_labels);
160 return ref;
163 /* Add an unconditional jump to LABEL as the next sequential instruction. */
165 void
166 emit_jump (rtx label)
168 do_pending_stack_adjust ();
169 emit_jump_insn (gen_jump (label));
170 emit_barrier ();
173 /* Emit code to jump to the address
174 specified by the pointer expression EXP. */
176 void
177 expand_computed_goto (tree exp)
179 rtx x = expand_normal (exp);
181 x = convert_memory_address (Pmode, x);
183 do_pending_stack_adjust ();
184 emit_indirect_jump (x);
187 /* Handle goto statements and the labels that they can go to. */
189 /* Specify the location in the RTL code of a label LABEL,
190 which is a LABEL_DECL tree node.
192 This is used for the kind of label that the user can jump to with a
193 goto statement, and for alternatives of a switch or case statement.
194 RTL labels generated for loops and conditionals don't go through here;
195 they are generated directly at the RTL level, by other functions below.
197 Note that this has nothing to do with defining label *names*.
198 Languages vary in how they do that and what that even means. */
200 void
201 expand_label (tree label)
203 rtx label_r = label_rtx (label);
205 do_pending_stack_adjust ();
206 emit_label (label_r);
207 if (DECL_NAME (label))
208 LABEL_NAME (DECL_RTL (label)) = IDENTIFIER_POINTER (DECL_NAME (label));
210 if (DECL_NONLOCAL (label))
212 expand_nl_goto_receiver ();
213 nonlocal_goto_handler_labels
214 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
215 nonlocal_goto_handler_labels);
218 if (FORCED_LABEL (label))
219 forced_labels = gen_rtx_EXPR_LIST (VOIDmode, label_r, forced_labels);
221 if (DECL_NONLOCAL (label) || FORCED_LABEL (label))
222 maybe_set_first_label_num (label_r);
225 /* Generate RTL code for a `goto' statement with target label LABEL.
226 LABEL should be a LABEL_DECL tree node that was or will later be
227 defined with `expand_label'. */
229 void
230 expand_goto (tree label)
232 #ifdef ENABLE_CHECKING
233 /* Check for a nonlocal goto to a containing function. Should have
234 gotten translated to __builtin_nonlocal_goto. */
235 tree context = decl_function_context (label);
236 gcc_assert (!context || context == current_function_decl);
237 #endif
239 emit_jump (label_rtx (label));
242 /* Return the number of times character C occurs in string S. */
243 static int
244 n_occurrences (int c, const char *s)
246 int n = 0;
247 while (*s)
248 n += (*s++ == c);
249 return n;
252 /* Generate RTL for an asm statement (explicit assembler code).
253 STRING is a STRING_CST node containing the assembler code text,
254 or an ADDR_EXPR containing a STRING_CST. VOL nonzero means the
255 insn is volatile; don't optimize it. */
257 static void
258 expand_asm_loc (tree string, int vol, location_t locus)
260 rtx body;
262 if (TREE_CODE (string) == ADDR_EXPR)
263 string = TREE_OPERAND (string, 0);
265 body = gen_rtx_ASM_INPUT_loc (VOIDmode,
266 ggc_strdup (TREE_STRING_POINTER (string)),
267 locus);
269 MEM_VOLATILE_P (body) = vol;
271 emit_insn (body);
274 /* Parse the output constraint pointed to by *CONSTRAINT_P. It is the
275 OPERAND_NUMth output operand, indexed from zero. There are NINPUTS
276 inputs and NOUTPUTS outputs to this extended-asm. Upon return,
277 *ALLOWS_MEM will be TRUE iff the constraint allows the use of a
278 memory operand. Similarly, *ALLOWS_REG will be TRUE iff the
279 constraint allows the use of a register operand. And, *IS_INOUT
280 will be true if the operand is read-write, i.e., if it is used as
281 an input as well as an output. If *CONSTRAINT_P is not in
282 canonical form, it will be made canonical. (Note that `+' will be
283 replaced with `=' as part of this process.)
285 Returns TRUE if all went well; FALSE if an error occurred. */
287 bool
288 parse_output_constraint (const char **constraint_p, int operand_num,
289 int ninputs, int noutputs, bool *allows_mem,
290 bool *allows_reg, bool *is_inout)
292 const char *constraint = *constraint_p;
293 const char *p;
295 /* Assume the constraint doesn't allow the use of either a register
296 or memory. */
297 *allows_mem = false;
298 *allows_reg = false;
300 /* Allow the `=' or `+' to not be at the beginning of the string,
301 since it wasn't explicitly documented that way, and there is a
302 large body of code that puts it last. Swap the character to
303 the front, so as not to uglify any place else. */
304 p = strchr (constraint, '=');
305 if (!p)
306 p = strchr (constraint, '+');
308 /* If the string doesn't contain an `=', issue an error
309 message. */
310 if (!p)
312 error ("output operand constraint lacks %<=%>");
313 return false;
316 /* If the constraint begins with `+', then the operand is both read
317 from and written to. */
318 *is_inout = (*p == '+');
320 /* Canonicalize the output constraint so that it begins with `='. */
321 if (p != constraint || *is_inout)
323 char *buf;
324 size_t c_len = strlen (constraint);
326 if (p != constraint)
327 warning (0, "output constraint %qc for operand %d "
328 "is not at the beginning",
329 *p, operand_num);
331 /* Make a copy of the constraint. */
332 buf = XALLOCAVEC (char, c_len + 1);
333 strcpy (buf, constraint);
334 /* Swap the first character and the `=' or `+'. */
335 buf[p - constraint] = buf[0];
336 /* Make sure the first character is an `='. (Until we do this,
337 it might be a `+'.) */
338 buf[0] = '=';
339 /* Replace the constraint with the canonicalized string. */
340 *constraint_p = ggc_alloc_string (buf, c_len);
341 constraint = *constraint_p;
344 /* Loop through the constraint string. */
345 for (p = constraint + 1; *p; p += CONSTRAINT_LEN (*p, p))
346 switch (*p)
348 case '+':
349 case '=':
350 error ("operand constraint contains incorrectly positioned "
351 "%<+%> or %<=%>");
352 return false;
354 case '%':
355 if (operand_num + 1 == ninputs + noutputs)
357 error ("%<%%%> constraint used with last operand");
358 return false;
360 break;
362 case 'V': case TARGET_MEM_CONSTRAINT: case 'o':
363 *allows_mem = true;
364 break;
366 case '?': case '!': case '*': case '&': case '#':
367 case 'E': case 'F': case 'G': case 'H':
368 case 's': case 'i': case 'n':
369 case 'I': case 'J': case 'K': case 'L': case 'M':
370 case 'N': case 'O': case 'P': case ',':
371 break;
373 case '0': case '1': case '2': case '3': case '4':
374 case '5': case '6': case '7': case '8': case '9':
375 case '[':
376 error ("matching constraint not valid in output operand");
377 return false;
379 case '<': case '>':
380 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
381 excepting those that expand_call created. So match memory
382 and hope. */
383 *allows_mem = true;
384 break;
386 case 'g': case 'X':
387 *allows_reg = true;
388 *allows_mem = true;
389 break;
391 case 'p': case 'r':
392 *allows_reg = true;
393 break;
395 default:
396 if (!ISALPHA (*p))
397 break;
398 if (REG_CLASS_FROM_CONSTRAINT (*p, p) != NO_REGS)
399 *allows_reg = true;
400 #ifdef EXTRA_CONSTRAINT_STR
401 else if (EXTRA_ADDRESS_CONSTRAINT (*p, p))
402 *allows_reg = true;
403 else if (EXTRA_MEMORY_CONSTRAINT (*p, p))
404 *allows_mem = true;
405 else
407 /* Otherwise we can't assume anything about the nature of
408 the constraint except that it isn't purely registers.
409 Treat it like "g" and hope for the best. */
410 *allows_reg = true;
411 *allows_mem = true;
413 #endif
414 break;
417 return true;
420 /* Similar, but for input constraints. */
422 bool
423 parse_input_constraint (const char **constraint_p, int input_num,
424 int ninputs, int noutputs, int ninout,
425 const char * const * constraints,
426 bool *allows_mem, bool *allows_reg)
428 const char *constraint = *constraint_p;
429 const char *orig_constraint = constraint;
430 size_t c_len = strlen (constraint);
431 size_t j;
432 bool saw_match = false;
434 /* Assume the constraint doesn't allow the use of either
435 a register or memory. */
436 *allows_mem = false;
437 *allows_reg = false;
439 /* Make sure constraint has neither `=', `+', nor '&'. */
441 for (j = 0; j < c_len; j += CONSTRAINT_LEN (constraint[j], constraint+j))
442 switch (constraint[j])
444 case '+': case '=': case '&':
445 if (constraint == orig_constraint)
447 error ("input operand constraint contains %qc", constraint[j]);
448 return false;
450 break;
452 case '%':
453 if (constraint == orig_constraint
454 && input_num + 1 == ninputs - ninout)
456 error ("%<%%%> constraint used with last operand");
457 return false;
459 break;
461 case 'V': case TARGET_MEM_CONSTRAINT: case 'o':
462 *allows_mem = true;
463 break;
465 case '<': case '>':
466 case '?': case '!': case '*': case '#':
467 case 'E': case 'F': case 'G': case 'H':
468 case 's': case 'i': case 'n':
469 case 'I': case 'J': case 'K': case 'L': case 'M':
470 case 'N': case 'O': case 'P': case ',':
471 break;
473 /* Whether or not a numeric constraint allows a register is
474 decided by the matching constraint, and so there is no need
475 to do anything special with them. We must handle them in
476 the default case, so that we don't unnecessarily force
477 operands to memory. */
478 case '0': case '1': case '2': case '3': case '4':
479 case '5': case '6': case '7': case '8': case '9':
481 char *end;
482 unsigned long match;
484 saw_match = true;
486 match = strtoul (constraint + j, &end, 10);
487 if (match >= (unsigned long) noutputs)
489 error ("matching constraint references invalid operand number");
490 return false;
493 /* Try and find the real constraint for this dup. Only do this
494 if the matching constraint is the only alternative. */
495 if (*end == '\0'
496 && (j == 0 || (j == 1 && constraint[0] == '%')))
498 constraint = constraints[match];
499 *constraint_p = constraint;
500 c_len = strlen (constraint);
501 j = 0;
502 /* ??? At the end of the loop, we will skip the first part of
503 the matched constraint. This assumes not only that the
504 other constraint is an output constraint, but also that
505 the '=' or '+' come first. */
506 break;
508 else
509 j = end - constraint;
510 /* Anticipate increment at end of loop. */
511 j--;
513 /* Fall through. */
515 case 'p': case 'r':
516 *allows_reg = true;
517 break;
519 case 'g': case 'X':
520 *allows_reg = true;
521 *allows_mem = true;
522 break;
524 default:
525 if (! ISALPHA (constraint[j]))
527 error ("invalid punctuation %qc in constraint", constraint[j]);
528 return false;
530 if (REG_CLASS_FROM_CONSTRAINT (constraint[j], constraint + j)
531 != NO_REGS)
532 *allows_reg = true;
533 #ifdef EXTRA_CONSTRAINT_STR
534 else if (EXTRA_ADDRESS_CONSTRAINT (constraint[j], constraint + j))
535 *allows_reg = true;
536 else if (EXTRA_MEMORY_CONSTRAINT (constraint[j], constraint + j))
537 *allows_mem = true;
538 else
540 /* Otherwise we can't assume anything about the nature of
541 the constraint except that it isn't purely registers.
542 Treat it like "g" and hope for the best. */
543 *allows_reg = true;
544 *allows_mem = true;
546 #endif
547 break;
550 if (saw_match && !*allows_reg)
551 warning (0, "matching constraint does not allow a register");
553 return true;
556 /* Return DECL iff there's an overlap between *REGS and DECL, where DECL
557 can be an asm-declared register. Called via walk_tree. */
559 static tree
560 decl_overlaps_hard_reg_set_p (tree *declp, int *walk_subtrees ATTRIBUTE_UNUSED,
561 void *data)
563 tree decl = *declp;
564 const HARD_REG_SET *const regs = (const HARD_REG_SET *) data;
566 if (TREE_CODE (decl) == VAR_DECL)
568 if (DECL_HARD_REGISTER (decl)
569 && REG_P (DECL_RTL (decl))
570 && REGNO (DECL_RTL (decl)) < FIRST_PSEUDO_REGISTER)
572 rtx reg = DECL_RTL (decl);
574 if (overlaps_hard_reg_set_p (*regs, GET_MODE (reg), REGNO (reg)))
575 return decl;
577 walk_subtrees = 0;
579 else if (TYPE_P (decl) || TREE_CODE (decl) == PARM_DECL)
580 walk_subtrees = 0;
581 return NULL_TREE;
584 /* If there is an overlap between *REGS and DECL, return the first overlap
585 found. */
586 tree
587 tree_overlaps_hard_reg_set (tree decl, HARD_REG_SET *regs)
589 return walk_tree (&decl, decl_overlaps_hard_reg_set_p, regs, NULL);
592 /* Check for overlap between registers marked in CLOBBERED_REGS and
593 anything inappropriate in T. Emit error and return the register
594 variable definition for error, NULL_TREE for ok. */
596 static bool
597 tree_conflicts_with_clobbers_p (tree t, HARD_REG_SET *clobbered_regs)
599 /* Conflicts between asm-declared register variables and the clobber
600 list are not allowed. */
601 tree overlap = tree_overlaps_hard_reg_set (t, clobbered_regs);
603 if (overlap)
605 error ("asm-specifier for variable %qE conflicts with asm clobber list",
606 DECL_NAME (overlap));
608 /* Reset registerness to stop multiple errors emitted for a single
609 variable. */
610 DECL_REGISTER (overlap) = 0;
611 return true;
614 return false;
617 /* Generate RTL for an asm statement with arguments.
618 STRING is the instruction template.
619 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
620 Each output or input has an expression in the TREE_VALUE and
621 a tree list in TREE_PURPOSE which in turn contains a constraint
622 name in TREE_VALUE (or NULL_TREE) and a constraint string
623 in TREE_PURPOSE.
624 CLOBBERS is a list of STRING_CST nodes each naming a hard register
625 that is clobbered by this insn.
627 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
628 Some elements of OUTPUTS may be replaced with trees representing temporary
629 values. The caller should copy those temporary values to the originally
630 specified lvalues.
632 VOL nonzero means the insn is volatile; don't optimize it. */
634 static void
635 expand_asm_operands (tree string, tree outputs, tree inputs,
636 tree clobbers, int vol, location_t locus)
638 rtvec argvec, constraintvec;
639 rtx body;
640 int ninputs = list_length (inputs);
641 int noutputs = list_length (outputs);
642 int ninout;
643 int nclobbers;
644 HARD_REG_SET clobbered_regs;
645 int clobber_conflict_found = 0;
646 tree tail;
647 tree t;
648 int i;
649 /* Vector of RTX's of evaluated output operands. */
650 rtx *output_rtx = XALLOCAVEC (rtx, noutputs);
651 int *inout_opnum = XALLOCAVEC (int, noutputs);
652 rtx *real_output_rtx = XALLOCAVEC (rtx, noutputs);
653 enum machine_mode *inout_mode = XALLOCAVEC (enum machine_mode, noutputs);
654 const char **constraints = XALLOCAVEC (const char *, noutputs + ninputs);
655 int old_generating_concat_p = generating_concat_p;
657 /* An ASM with no outputs needs to be treated as volatile, for now. */
658 if (noutputs == 0)
659 vol = 1;
661 if (! check_operand_nalternatives (outputs, inputs))
662 return;
664 string = resolve_asm_operand_names (string, outputs, inputs);
666 /* Collect constraints. */
667 i = 0;
668 for (t = outputs; t ; t = TREE_CHAIN (t), i++)
669 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
670 for (t = inputs; t ; t = TREE_CHAIN (t), i++)
671 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
673 /* Sometimes we wish to automatically clobber registers across an asm.
674 Case in point is when the i386 backend moved from cc0 to a hard reg --
675 maintaining source-level compatibility means automatically clobbering
676 the flags register. */
677 clobbers = targetm.md_asm_clobbers (outputs, inputs, clobbers);
679 /* Count the number of meaningful clobbered registers, ignoring what
680 we would ignore later. */
681 nclobbers = 0;
682 CLEAR_HARD_REG_SET (clobbered_regs);
683 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
685 const char *regname;
687 if (TREE_VALUE (tail) == error_mark_node)
688 return;
689 regname = TREE_STRING_POINTER (TREE_VALUE (tail));
691 i = decode_reg_name (regname);
692 if (i >= 0 || i == -4)
693 ++nclobbers;
694 else if (i == -2)
695 error ("unknown register name %qs in %<asm%>", regname);
697 /* Mark clobbered registers. */
698 if (i >= 0)
700 /* Clobbering the PIC register is an error. */
701 if (i == (int) PIC_OFFSET_TABLE_REGNUM)
703 error ("PIC register %qs clobbered in %<asm%>", regname);
704 return;
707 SET_HARD_REG_BIT (clobbered_regs, i);
711 /* First pass over inputs and outputs checks validity and sets
712 mark_addressable if needed. */
714 ninout = 0;
715 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
717 tree val = TREE_VALUE (tail);
718 tree type = TREE_TYPE (val);
719 const char *constraint;
720 bool is_inout;
721 bool allows_reg;
722 bool allows_mem;
724 /* If there's an erroneous arg, emit no insn. */
725 if (type == error_mark_node)
726 return;
728 /* Try to parse the output constraint. If that fails, there's
729 no point in going further. */
730 constraint = constraints[i];
731 if (!parse_output_constraint (&constraint, i, ninputs, noutputs,
732 &allows_mem, &allows_reg, &is_inout))
733 return;
735 if (! allows_reg
736 && (allows_mem
737 || is_inout
738 || (DECL_P (val)
739 && REG_P (DECL_RTL (val))
740 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type))))
741 mark_addressable (val);
743 if (is_inout)
744 ninout++;
747 ninputs += ninout;
748 if (ninputs + noutputs > MAX_RECOG_OPERANDS)
750 error ("more than %d operands in %<asm%>", MAX_RECOG_OPERANDS);
751 return;
754 for (i = 0, tail = inputs; tail; i++, tail = TREE_CHAIN (tail))
756 bool allows_reg, allows_mem;
757 const char *constraint;
759 /* If there's an erroneous arg, emit no insn, because the ASM_INPUT
760 would get VOIDmode and that could cause a crash in reload. */
761 if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
762 return;
764 constraint = constraints[i + noutputs];
765 if (! parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
766 constraints, &allows_mem, &allows_reg))
767 return;
769 if (! allows_reg && allows_mem)
770 mark_addressable (TREE_VALUE (tail));
773 /* Second pass evaluates arguments. */
775 ninout = 0;
776 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
778 tree val = TREE_VALUE (tail);
779 tree type = TREE_TYPE (val);
780 bool is_inout;
781 bool allows_reg;
782 bool allows_mem;
783 rtx op;
784 bool ok;
786 ok = parse_output_constraint (&constraints[i], i, ninputs,
787 noutputs, &allows_mem, &allows_reg,
788 &is_inout);
789 gcc_assert (ok);
791 /* If an output operand is not a decl or indirect ref and our constraint
792 allows a register, make a temporary to act as an intermediate.
793 Make the asm insn write into that, then our caller will copy it to
794 the real output operand. Likewise for promoted variables. */
796 generating_concat_p = 0;
798 real_output_rtx[i] = NULL_RTX;
799 if ((TREE_CODE (val) == INDIRECT_REF
800 && allows_mem)
801 || (DECL_P (val)
802 && (allows_mem || REG_P (DECL_RTL (val)))
803 && ! (REG_P (DECL_RTL (val))
804 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
805 || ! allows_reg
806 || is_inout)
808 op = expand_expr (val, NULL_RTX, VOIDmode, EXPAND_WRITE);
809 if (MEM_P (op))
810 op = validize_mem (op);
812 if (! allows_reg && !MEM_P (op))
813 error ("output number %d not directly addressable", i);
814 if ((! allows_mem && MEM_P (op))
815 || GET_CODE (op) == CONCAT)
817 real_output_rtx[i] = op;
818 op = gen_reg_rtx (GET_MODE (op));
819 if (is_inout)
820 emit_move_insn (op, real_output_rtx[i]);
823 else
825 op = assign_temp (type, 0, 0, 1);
826 op = validize_mem (op);
827 TREE_VALUE (tail) = make_tree (type, op);
829 output_rtx[i] = op;
831 generating_concat_p = old_generating_concat_p;
833 if (is_inout)
835 inout_mode[ninout] = TYPE_MODE (type);
836 inout_opnum[ninout++] = i;
839 if (tree_conflicts_with_clobbers_p (val, &clobbered_regs))
840 clobber_conflict_found = 1;
843 /* Make vectors for the expression-rtx, constraint strings,
844 and named operands. */
846 argvec = rtvec_alloc (ninputs);
847 constraintvec = rtvec_alloc (ninputs);
849 body = gen_rtx_ASM_OPERANDS ((noutputs == 0 ? VOIDmode
850 : GET_MODE (output_rtx[0])),
851 ggc_strdup (TREE_STRING_POINTER (string)),
852 empty_string, 0, argvec, constraintvec,
853 locus);
855 MEM_VOLATILE_P (body) = vol;
857 /* Eval the inputs and put them into ARGVEC.
858 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
860 for (i = 0, tail = inputs; tail; tail = TREE_CHAIN (tail), ++i)
862 bool allows_reg, allows_mem;
863 const char *constraint;
864 tree val, type;
865 rtx op;
866 bool ok;
868 constraint = constraints[i + noutputs];
869 ok = parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
870 constraints, &allows_mem, &allows_reg);
871 gcc_assert (ok);
873 generating_concat_p = 0;
875 val = TREE_VALUE (tail);
876 type = TREE_TYPE (val);
877 /* EXPAND_INITIALIZER will not generate code for valid initializer
878 constants, but will still generate code for other types of operand.
879 This is the behavior we want for constant constraints. */
880 op = expand_expr (val, NULL_RTX, VOIDmode,
881 allows_reg ? EXPAND_NORMAL
882 : allows_mem ? EXPAND_MEMORY
883 : EXPAND_INITIALIZER);
885 /* Never pass a CONCAT to an ASM. */
886 if (GET_CODE (op) == CONCAT)
887 op = force_reg (GET_MODE (op), op);
888 else if (MEM_P (op))
889 op = validize_mem (op);
891 if (asm_operand_ok (op, constraint, NULL) <= 0)
893 if (allows_reg && TYPE_MODE (type) != BLKmode)
894 op = force_reg (TYPE_MODE (type), op);
895 else if (!allows_mem)
896 warning (0, "asm operand %d probably doesn%'t match constraints",
897 i + noutputs);
898 else if (MEM_P (op))
900 /* We won't recognize either volatile memory or memory
901 with a queued address as available a memory_operand
902 at this point. Ignore it: clearly this *is* a memory. */
904 else
906 warning (0, "use of memory input without lvalue in "
907 "asm operand %d is deprecated", i + noutputs);
909 if (CONSTANT_P (op))
911 rtx mem = force_const_mem (TYPE_MODE (type), op);
912 if (mem)
913 op = validize_mem (mem);
914 else
915 op = force_reg (TYPE_MODE (type), op);
917 if (REG_P (op)
918 || GET_CODE (op) == SUBREG
919 || GET_CODE (op) == CONCAT)
921 tree qual_type = build_qualified_type (type,
922 (TYPE_QUALS (type)
923 | TYPE_QUAL_CONST));
924 rtx memloc = assign_temp (qual_type, 1, 1, 1);
925 memloc = validize_mem (memloc);
926 emit_move_insn (memloc, op);
927 op = memloc;
932 generating_concat_p = old_generating_concat_p;
933 ASM_OPERANDS_INPUT (body, i) = op;
935 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, i)
936 = gen_rtx_ASM_INPUT (TYPE_MODE (type),
937 ggc_strdup (constraints[i + noutputs]));
939 if (tree_conflicts_with_clobbers_p (val, &clobbered_regs))
940 clobber_conflict_found = 1;
943 /* Protect all the operands from the queue now that they have all been
944 evaluated. */
946 generating_concat_p = 0;
948 /* For in-out operands, copy output rtx to input rtx. */
949 for (i = 0; i < ninout; i++)
951 int j = inout_opnum[i];
952 char buffer[16];
954 ASM_OPERANDS_INPUT (body, ninputs - ninout + i)
955 = output_rtx[j];
957 sprintf (buffer, "%d", j);
958 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, ninputs - ninout + i)
959 = gen_rtx_ASM_INPUT (inout_mode[i], ggc_strdup (buffer));
962 generating_concat_p = old_generating_concat_p;
964 /* Now, for each output, construct an rtx
965 (set OUTPUT (asm_operands INSN OUTPUTCONSTRAINT OUTPUTNUMBER
966 ARGVEC CONSTRAINTS OPNAMES))
967 If there is more than one, put them inside a PARALLEL. */
969 if (noutputs == 1 && nclobbers == 0)
971 ASM_OPERANDS_OUTPUT_CONSTRAINT (body) = ggc_strdup (constraints[0]);
972 emit_insn (gen_rtx_SET (VOIDmode, output_rtx[0], body));
975 else if (noutputs == 0 && nclobbers == 0)
977 /* No output operands: put in a raw ASM_OPERANDS rtx. */
978 emit_insn (body);
981 else
983 rtx obody = body;
984 int num = noutputs;
986 if (num == 0)
987 num = 1;
989 body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
991 /* For each output operand, store a SET. */
992 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
994 XVECEXP (body, 0, i)
995 = gen_rtx_SET (VOIDmode,
996 output_rtx[i],
997 gen_rtx_ASM_OPERANDS
998 (GET_MODE (output_rtx[i]),
999 ggc_strdup (TREE_STRING_POINTER (string)),
1000 ggc_strdup (constraints[i]),
1001 i, argvec, constraintvec, locus));
1003 MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
1006 /* If there are no outputs (but there are some clobbers)
1007 store the bare ASM_OPERANDS into the PARALLEL. */
1009 if (i == 0)
1010 XVECEXP (body, 0, i++) = obody;
1012 /* Store (clobber REG) for each clobbered register specified. */
1014 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1016 const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1017 int j = decode_reg_name (regname);
1018 rtx clobbered_reg;
1020 if (j < 0)
1022 if (j == -3) /* `cc', which is not a register */
1023 continue;
1025 if (j == -4) /* `memory', don't cache memory across asm */
1027 XVECEXP (body, 0, i++)
1028 = gen_rtx_CLOBBER (VOIDmode,
1029 gen_rtx_MEM
1030 (BLKmode,
1031 gen_rtx_SCRATCH (VOIDmode)));
1032 continue;
1035 /* Ignore unknown register, error already signaled. */
1036 continue;
1039 /* Use QImode since that's guaranteed to clobber just one reg. */
1040 clobbered_reg = gen_rtx_REG (QImode, j);
1042 /* Do sanity check for overlap between clobbers and respectively
1043 input and outputs that hasn't been handled. Such overlap
1044 should have been detected and reported above. */
1045 if (!clobber_conflict_found)
1047 int opno;
1049 /* We test the old body (obody) contents to avoid tripping
1050 over the under-construction body. */
1051 for (opno = 0; opno < noutputs; opno++)
1052 if (reg_overlap_mentioned_p (clobbered_reg, output_rtx[opno]))
1053 internal_error ("asm clobber conflict with output operand");
1055 for (opno = 0; opno < ninputs - ninout; opno++)
1056 if (reg_overlap_mentioned_p (clobbered_reg,
1057 ASM_OPERANDS_INPUT (obody, opno)))
1058 internal_error ("asm clobber conflict with input operand");
1061 XVECEXP (body, 0, i++)
1062 = gen_rtx_CLOBBER (VOIDmode, clobbered_reg);
1065 emit_insn (body);
1068 /* For any outputs that needed reloading into registers, spill them
1069 back to where they belong. */
1070 for (i = 0; i < noutputs; ++i)
1071 if (real_output_rtx[i])
1072 emit_move_insn (real_output_rtx[i], output_rtx[i]);
1074 crtl->has_asm_statement = 1;
1075 free_temp_slots ();
1078 void
1079 expand_asm_stmt (gimple stmt)
1081 int noutputs;
1082 tree outputs, tail, t;
1083 tree *o;
1084 size_t i, n;
1085 const char *s;
1086 tree str, out, in, cl;
1088 /* Meh... convert the gimple asm operands into real tree lists.
1089 Eventually we should make all routines work on the vectors instead
1090 of relying on TREE_CHAIN. */
1091 out = NULL_TREE;
1092 n = gimple_asm_noutputs (stmt);
1093 if (n > 0)
1095 t = out = gimple_asm_output_op (stmt, 0);
1096 for (i = 1; i < n; i++)
1098 TREE_CHAIN (t) = gimple_asm_output_op (stmt, i);
1099 t = gimple_asm_output_op (stmt, i);
1103 in = NULL_TREE;
1104 n = gimple_asm_ninputs (stmt);
1105 if (n > 0)
1107 t = in = gimple_asm_input_op (stmt, 0);
1108 for (i = 1; i < n; i++)
1110 TREE_CHAIN (t) = gimple_asm_input_op (stmt, i);
1111 t = gimple_asm_input_op (stmt, i);
1115 cl = NULL_TREE;
1116 n = gimple_asm_nclobbers (stmt);
1117 if (n > 0)
1119 t = cl = gimple_asm_clobber_op (stmt, 0);
1120 for (i = 1; i < n; i++)
1122 TREE_CHAIN (t) = gimple_asm_clobber_op (stmt, i);
1123 t = gimple_asm_clobber_op (stmt, i);
1127 s = gimple_asm_string (stmt);
1128 str = build_string (strlen (s), s);
1130 if (gimple_asm_input_p (stmt))
1132 expand_asm_loc (str, gimple_asm_volatile_p (stmt), input_location);
1133 return;
1136 outputs = out;
1137 noutputs = gimple_asm_noutputs (stmt);
1138 /* o[I] is the place that output number I should be written. */
1139 o = (tree *) alloca (noutputs * sizeof (tree));
1141 /* Record the contents of OUTPUTS before it is modified. */
1142 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1143 o[i] = TREE_VALUE (tail);
1145 /* Generate the ASM_OPERANDS insn; store into the TREE_VALUEs of
1146 OUTPUTS some trees for where the values were actually stored. */
1147 expand_asm_operands (str, outputs, in, cl, gimple_asm_volatile_p (stmt),
1148 input_location);
1150 /* Copy all the intermediate outputs into the specified outputs. */
1151 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1153 if (o[i] != TREE_VALUE (tail))
1155 expand_assignment (o[i], TREE_VALUE (tail), false);
1156 free_temp_slots ();
1158 /* Restore the original value so that it's correct the next
1159 time we expand this function. */
1160 TREE_VALUE (tail) = o[i];
1165 /* A subroutine of expand_asm_operands. Check that all operands have
1166 the same number of alternatives. Return true if so. */
1168 static bool
1169 check_operand_nalternatives (tree outputs, tree inputs)
1171 if (outputs || inputs)
1173 tree tmp = TREE_PURPOSE (outputs ? outputs : inputs);
1174 int nalternatives
1175 = n_occurrences (',', TREE_STRING_POINTER (TREE_VALUE (tmp)));
1176 tree next = inputs;
1178 if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
1180 error ("too many alternatives in %<asm%>");
1181 return false;
1184 tmp = outputs;
1185 while (tmp)
1187 const char *constraint
1188 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (tmp)));
1190 if (n_occurrences (',', constraint) != nalternatives)
1192 error ("operand constraints for %<asm%> differ "
1193 "in number of alternatives");
1194 return false;
1197 if (TREE_CHAIN (tmp))
1198 tmp = TREE_CHAIN (tmp);
1199 else
1200 tmp = next, next = 0;
1204 return true;
1207 /* A subroutine of expand_asm_operands. Check that all operand names
1208 are unique. Return true if so. We rely on the fact that these names
1209 are identifiers, and so have been canonicalized by get_identifier,
1210 so all we need are pointer comparisons. */
1212 static bool
1213 check_unique_operand_names (tree outputs, tree inputs)
1215 tree i, j;
1217 for (i = outputs; i ; i = TREE_CHAIN (i))
1219 tree i_name = TREE_PURPOSE (TREE_PURPOSE (i));
1220 if (! i_name)
1221 continue;
1223 for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
1224 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1225 goto failure;
1228 for (i = inputs; i ; i = TREE_CHAIN (i))
1230 tree i_name = TREE_PURPOSE (TREE_PURPOSE (i));
1231 if (! i_name)
1232 continue;
1234 for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
1235 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1236 goto failure;
1237 for (j = outputs; j ; j = TREE_CHAIN (j))
1238 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1239 goto failure;
1242 return true;
1244 failure:
1245 error ("duplicate asm operand name %qs",
1246 TREE_STRING_POINTER (TREE_PURPOSE (TREE_PURPOSE (i))));
1247 return false;
1250 /* A subroutine of expand_asm_operands. Resolve the names of the operands
1251 in *POUTPUTS and *PINPUTS to numbers, and replace the name expansions in
1252 STRING and in the constraints to those numbers. */
1254 tree
1255 resolve_asm_operand_names (tree string, tree outputs, tree inputs)
1257 char *buffer;
1258 char *p;
1259 const char *c;
1260 tree t;
1262 check_unique_operand_names (outputs, inputs);
1264 /* Substitute [<name>] in input constraint strings. There should be no
1265 named operands in output constraints. */
1266 for (t = inputs; t ; t = TREE_CHAIN (t))
1268 c = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
1269 if (strchr (c, '[') != NULL)
1271 p = buffer = xstrdup (c);
1272 while ((p = strchr (p, '[')) != NULL)
1273 p = resolve_operand_name_1 (p, outputs, inputs);
1274 TREE_VALUE (TREE_PURPOSE (t))
1275 = build_string (strlen (buffer), buffer);
1276 free (buffer);
1280 /* Now check for any needed substitutions in the template. */
1281 c = TREE_STRING_POINTER (string);
1282 while ((c = strchr (c, '%')) != NULL)
1284 if (c[1] == '[')
1285 break;
1286 else if (ISALPHA (c[1]) && c[2] == '[')
1287 break;
1288 else
1290 c += 1;
1291 continue;
1295 if (c)
1297 /* OK, we need to make a copy so we can perform the substitutions.
1298 Assume that we will not need extra space--we get to remove '['
1299 and ']', which means we cannot have a problem until we have more
1300 than 999 operands. */
1301 buffer = xstrdup (TREE_STRING_POINTER (string));
1302 p = buffer + (c - TREE_STRING_POINTER (string));
1304 while ((p = strchr (p, '%')) != NULL)
1306 if (p[1] == '[')
1307 p += 1;
1308 else if (ISALPHA (p[1]) && p[2] == '[')
1309 p += 2;
1310 else
1312 p += 1;
1313 continue;
1316 p = resolve_operand_name_1 (p, outputs, inputs);
1319 string = build_string (strlen (buffer), buffer);
1320 free (buffer);
1323 return string;
1326 /* A subroutine of resolve_operand_names. P points to the '[' for a
1327 potential named operand of the form [<name>]. In place, replace
1328 the name and brackets with a number. Return a pointer to the
1329 balance of the string after substitution. */
1331 static char *
1332 resolve_operand_name_1 (char *p, tree outputs, tree inputs)
1334 char *q;
1335 int op;
1336 tree t;
1337 size_t len;
1339 /* Collect the operand name. */
1340 q = strchr (p, ']');
1341 if (!q)
1343 error ("missing close brace for named operand");
1344 return strchr (p, '\0');
1346 len = q - p - 1;
1348 /* Resolve the name to a number. */
1349 for (op = 0, t = outputs; t ; t = TREE_CHAIN (t), op++)
1351 tree name = TREE_PURPOSE (TREE_PURPOSE (t));
1352 if (name)
1354 const char *c = TREE_STRING_POINTER (name);
1355 if (strncmp (c, p + 1, len) == 0 && c[len] == '\0')
1356 goto found;
1359 for (t = inputs; t ; t = TREE_CHAIN (t), op++)
1361 tree name = TREE_PURPOSE (TREE_PURPOSE (t));
1362 if (name)
1364 const char *c = TREE_STRING_POINTER (name);
1365 if (strncmp (c, p + 1, len) == 0 && c[len] == '\0')
1366 goto found;
1370 *q = '\0';
1371 error ("undefined named operand %qs", identifier_to_locale (p + 1));
1372 op = 0;
1373 found:
1375 /* Replace the name with the number. Unfortunately, not all libraries
1376 get the return value of sprintf correct, so search for the end of the
1377 generated string by hand. */
1378 sprintf (p, "%d", op);
1379 p = strchr (p, '\0');
1381 /* Verify the no extra buffer space assumption. */
1382 gcc_assert (p <= q);
1384 /* Shift the rest of the buffer down to fill the gap. */
1385 memmove (p, q + 1, strlen (q + 1) + 1);
1387 return p;
1390 /* Generate RTL to evaluate the expression EXP. */
1392 void
1393 expand_expr_stmt (tree exp)
1395 rtx value;
1396 tree type;
1398 value = expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
1399 type = TREE_TYPE (exp);
1401 /* If all we do is reference a volatile value in memory,
1402 copy it to a register to be sure it is actually touched. */
1403 if (value && MEM_P (value) && TREE_THIS_VOLATILE (exp))
1405 if (TYPE_MODE (type) == VOIDmode)
1407 else if (TYPE_MODE (type) != BLKmode)
1408 value = copy_to_reg (value);
1409 else
1411 rtx lab = gen_label_rtx ();
1413 /* Compare the value with itself to reference it. */
1414 emit_cmp_and_jump_insns (value, value, EQ,
1415 expand_normal (TYPE_SIZE (type)),
1416 BLKmode, 0, lab);
1417 emit_label (lab);
1421 /* Free any temporaries used to evaluate this expression. */
1422 free_temp_slots ();
1425 /* Warn if EXP contains any computations whose results are not used.
1426 Return 1 if a warning is printed; 0 otherwise. LOCUS is the
1427 (potential) location of the expression. */
1430 warn_if_unused_value (const_tree exp, location_t locus)
1432 restart:
1433 if (TREE_USED (exp) || TREE_NO_WARNING (exp))
1434 return 0;
1436 /* Don't warn about void constructs. This includes casting to void,
1437 void function calls, and statement expressions with a final cast
1438 to void. */
1439 if (VOID_TYPE_P (TREE_TYPE (exp)))
1440 return 0;
1442 if (EXPR_HAS_LOCATION (exp))
1443 locus = EXPR_LOCATION (exp);
1445 switch (TREE_CODE (exp))
1447 case PREINCREMENT_EXPR:
1448 case POSTINCREMENT_EXPR:
1449 case PREDECREMENT_EXPR:
1450 case POSTDECREMENT_EXPR:
1451 case MODIFY_EXPR:
1452 case INIT_EXPR:
1453 case TARGET_EXPR:
1454 case CALL_EXPR:
1455 case TRY_CATCH_EXPR:
1456 case WITH_CLEANUP_EXPR:
1457 case EXIT_EXPR:
1458 case VA_ARG_EXPR:
1459 return 0;
1461 case BIND_EXPR:
1462 /* For a binding, warn if no side effect within it. */
1463 exp = BIND_EXPR_BODY (exp);
1464 goto restart;
1466 case SAVE_EXPR:
1467 case NON_LVALUE_EXPR:
1468 exp = TREE_OPERAND (exp, 0);
1469 goto restart;
1471 case TRUTH_ORIF_EXPR:
1472 case TRUTH_ANDIF_EXPR:
1473 /* In && or ||, warn if 2nd operand has no side effect. */
1474 exp = TREE_OPERAND (exp, 1);
1475 goto restart;
1477 case COMPOUND_EXPR:
1478 if (warn_if_unused_value (TREE_OPERAND (exp, 0), locus))
1479 return 1;
1480 /* Let people do `(foo (), 0)' without a warning. */
1481 if (TREE_CONSTANT (TREE_OPERAND (exp, 1)))
1482 return 0;
1483 exp = TREE_OPERAND (exp, 1);
1484 goto restart;
1486 case COND_EXPR:
1487 /* If this is an expression with side effects, don't warn; this
1488 case commonly appears in macro expansions. */
1489 if (TREE_SIDE_EFFECTS (exp))
1490 return 0;
1491 goto warn;
1493 case INDIRECT_REF:
1494 /* Don't warn about automatic dereferencing of references, since
1495 the user cannot control it. */
1496 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == REFERENCE_TYPE)
1498 exp = TREE_OPERAND (exp, 0);
1499 goto restart;
1501 /* Fall through. */
1503 default:
1504 /* Referencing a volatile value is a side effect, so don't warn. */
1505 if ((DECL_P (exp) || REFERENCE_CLASS_P (exp))
1506 && TREE_THIS_VOLATILE (exp))
1507 return 0;
1509 /* If this is an expression which has no operands, there is no value
1510 to be unused. There are no such language-independent codes,
1511 but front ends may define such. */
1512 if (EXPRESSION_CLASS_P (exp) && TREE_OPERAND_LENGTH (exp) == 0)
1513 return 0;
1515 warn:
1516 warning_at (locus, OPT_Wunused_value, "value computed is not used");
1517 return 1;
1522 /* Generate RTL to return from the current function, with no value.
1523 (That is, we do not do anything about returning any value.) */
1525 void
1526 expand_null_return (void)
1528 /* If this function was declared to return a value, but we
1529 didn't, clobber the return registers so that they are not
1530 propagated live to the rest of the function. */
1531 clobber_return_register ();
1533 expand_null_return_1 ();
1536 /* Generate RTL to return directly from the current function.
1537 (That is, we bypass any return value.) */
1539 void
1540 expand_naked_return (void)
1542 rtx end_label;
1544 clear_pending_stack_adjust ();
1545 do_pending_stack_adjust ();
1547 end_label = naked_return_label;
1548 if (end_label == 0)
1549 end_label = naked_return_label = gen_label_rtx ();
1551 emit_jump (end_label);
1554 /* Generate RTL to return from the current function, with value VAL. */
1556 static void
1557 expand_value_return (rtx val)
1559 /* Copy the value to the return location unless it's already there. */
1561 tree decl = DECL_RESULT (current_function_decl);
1562 rtx return_reg = DECL_RTL (decl);
1563 if (return_reg != val)
1565 tree funtype = TREE_TYPE (current_function_decl);
1566 tree type = TREE_TYPE (decl);
1567 int unsignedp = TYPE_UNSIGNED (type);
1568 enum machine_mode old_mode = DECL_MODE (decl);
1569 enum machine_mode mode = promote_function_mode (type, old_mode,
1570 &unsignedp, funtype, 1);
1572 if (mode != old_mode)
1573 val = convert_modes (mode, old_mode, val, unsignedp);
1575 if (GET_CODE (return_reg) == PARALLEL)
1576 emit_group_load (return_reg, val, type, int_size_in_bytes (type));
1577 else
1578 emit_move_insn (return_reg, val);
1581 expand_null_return_1 ();
1584 /* Output a return with no value. */
1586 static void
1587 expand_null_return_1 (void)
1589 clear_pending_stack_adjust ();
1590 do_pending_stack_adjust ();
1591 emit_jump (return_label);
1594 /* Generate RTL to evaluate the expression RETVAL and return it
1595 from the current function. */
1597 void
1598 expand_return (tree retval)
1600 rtx result_rtl;
1601 rtx val = 0;
1602 tree retval_rhs;
1604 /* If function wants no value, give it none. */
1605 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
1607 expand_normal (retval);
1608 expand_null_return ();
1609 return;
1612 if (retval == error_mark_node)
1614 /* Treat this like a return of no value from a function that
1615 returns a value. */
1616 expand_null_return ();
1617 return;
1619 else if ((TREE_CODE (retval) == MODIFY_EXPR
1620 || TREE_CODE (retval) == INIT_EXPR)
1621 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
1622 retval_rhs = TREE_OPERAND (retval, 1);
1623 else
1624 retval_rhs = retval;
1626 result_rtl = DECL_RTL (DECL_RESULT (current_function_decl));
1628 /* If we are returning the RESULT_DECL, then the value has already
1629 been stored into it, so we don't have to do anything special. */
1630 if (TREE_CODE (retval_rhs) == RESULT_DECL)
1631 expand_value_return (result_rtl);
1633 /* If the result is an aggregate that is being returned in one (or more)
1634 registers, load the registers here. The compiler currently can't handle
1635 copying a BLKmode value into registers. We could put this code in a
1636 more general area (for use by everyone instead of just function
1637 call/return), but until this feature is generally usable it is kept here
1638 (and in expand_call). */
1640 else if (retval_rhs != 0
1641 && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
1642 && REG_P (result_rtl))
1644 int i;
1645 unsigned HOST_WIDE_INT bitpos, xbitpos;
1646 unsigned HOST_WIDE_INT padding_correction = 0;
1647 unsigned HOST_WIDE_INT bytes
1648 = int_size_in_bytes (TREE_TYPE (retval_rhs));
1649 int n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1650 unsigned int bitsize
1651 = MIN (TYPE_ALIGN (TREE_TYPE (retval_rhs)), BITS_PER_WORD);
1652 rtx *result_pseudos = XALLOCAVEC (rtx, n_regs);
1653 rtx result_reg, src = NULL_RTX, dst = NULL_RTX;
1654 rtx result_val = expand_normal (retval_rhs);
1655 enum machine_mode tmpmode, result_reg_mode;
1657 if (bytes == 0)
1659 expand_null_return ();
1660 return;
1663 /* If the structure doesn't take up a whole number of words, see
1664 whether the register value should be padded on the left or on
1665 the right. Set PADDING_CORRECTION to the number of padding
1666 bits needed on the left side.
1668 In most ABIs, the structure will be returned at the least end of
1669 the register, which translates to right padding on little-endian
1670 targets and left padding on big-endian targets. The opposite
1671 holds if the structure is returned at the most significant
1672 end of the register. */
1673 if (bytes % UNITS_PER_WORD != 0
1674 && (targetm.calls.return_in_msb (TREE_TYPE (retval_rhs))
1675 ? !BYTES_BIG_ENDIAN
1676 : BYTES_BIG_ENDIAN))
1677 padding_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
1678 * BITS_PER_UNIT));
1680 /* Copy the structure BITSIZE bits at a time. */
1681 for (bitpos = 0, xbitpos = padding_correction;
1682 bitpos < bytes * BITS_PER_UNIT;
1683 bitpos += bitsize, xbitpos += bitsize)
1685 /* We need a new destination pseudo each time xbitpos is
1686 on a word boundary and when xbitpos == padding_correction
1687 (the first time through). */
1688 if (xbitpos % BITS_PER_WORD == 0
1689 || xbitpos == padding_correction)
1691 /* Generate an appropriate register. */
1692 dst = gen_reg_rtx (word_mode);
1693 result_pseudos[xbitpos / BITS_PER_WORD] = dst;
1695 /* Clear the destination before we move anything into it. */
1696 emit_move_insn (dst, CONST0_RTX (GET_MODE (dst)));
1699 /* We need a new source operand each time bitpos is on a word
1700 boundary. */
1701 if (bitpos % BITS_PER_WORD == 0)
1702 src = operand_subword_force (result_val,
1703 bitpos / BITS_PER_WORD,
1704 BLKmode);
1706 /* Use bitpos for the source extraction (left justified) and
1707 xbitpos for the destination store (right justified). */
1708 store_bit_field (dst, bitsize, xbitpos % BITS_PER_WORD, word_mode,
1709 extract_bit_field (src, bitsize,
1710 bitpos % BITS_PER_WORD, 1,
1711 NULL_RTX, word_mode, word_mode));
1714 tmpmode = GET_MODE (result_rtl);
1715 if (tmpmode == BLKmode)
1717 /* Find the smallest integer mode large enough to hold the
1718 entire structure and use that mode instead of BLKmode
1719 on the USE insn for the return register. */
1720 for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1721 tmpmode != VOIDmode;
1722 tmpmode = GET_MODE_WIDER_MODE (tmpmode))
1723 /* Have we found a large enough mode? */
1724 if (GET_MODE_SIZE (tmpmode) >= bytes)
1725 break;
1727 /* A suitable mode should have been found. */
1728 gcc_assert (tmpmode != VOIDmode);
1730 PUT_MODE (result_rtl, tmpmode);
1733 if (GET_MODE_SIZE (tmpmode) < GET_MODE_SIZE (word_mode))
1734 result_reg_mode = word_mode;
1735 else
1736 result_reg_mode = tmpmode;
1737 result_reg = gen_reg_rtx (result_reg_mode);
1739 for (i = 0; i < n_regs; i++)
1740 emit_move_insn (operand_subword (result_reg, i, 0, result_reg_mode),
1741 result_pseudos[i]);
1743 if (tmpmode != result_reg_mode)
1744 result_reg = gen_lowpart (tmpmode, result_reg);
1746 expand_value_return (result_reg);
1748 else if (retval_rhs != 0
1749 && !VOID_TYPE_P (TREE_TYPE (retval_rhs))
1750 && (REG_P (result_rtl)
1751 || (GET_CODE (result_rtl) == PARALLEL)))
1753 /* Calculate the return value into a temporary (usually a pseudo
1754 reg). */
1755 tree ot = TREE_TYPE (DECL_RESULT (current_function_decl));
1756 tree nt = build_qualified_type (ot, TYPE_QUALS (ot) | TYPE_QUAL_CONST);
1758 val = assign_temp (nt, 0, 0, 1);
1759 val = expand_expr (retval_rhs, val, GET_MODE (val), EXPAND_NORMAL);
1760 val = force_not_mem (val);
1761 /* Return the calculated value. */
1762 expand_value_return (val);
1764 else
1766 /* No hard reg used; calculate value into hard return reg. */
1767 expand_expr (retval, const0_rtx, VOIDmode, EXPAND_NORMAL);
1768 expand_value_return (result_rtl);
1772 /* Emit code to restore vital registers at the beginning of a nonlocal goto
1773 handler. */
1774 static void
1775 expand_nl_goto_receiver (void)
1777 /* Clobber the FP when we get here, so we have to make sure it's
1778 marked as used by this function. */
1779 emit_use (hard_frame_pointer_rtx);
1781 /* Mark the static chain as clobbered here so life information
1782 doesn't get messed up for it. */
1783 emit_clobber (static_chain_rtx);
1785 #ifdef HAVE_nonlocal_goto
1786 if (! HAVE_nonlocal_goto)
1787 #endif
1788 /* First adjust our frame pointer to its actual value. It was
1789 previously set to the start of the virtual area corresponding to
1790 the stacked variables when we branched here and now needs to be
1791 adjusted to the actual hardware fp value.
1793 Assignments are to virtual registers are converted by
1794 instantiate_virtual_regs into the corresponding assignment
1795 to the underlying register (fp in this case) that makes
1796 the original assignment true.
1797 So the following insn will actually be
1798 decrementing fp by STARTING_FRAME_OFFSET. */
1799 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
1801 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
1802 if (fixed_regs[ARG_POINTER_REGNUM])
1804 #ifdef ELIMINABLE_REGS
1805 /* If the argument pointer can be eliminated in favor of the
1806 frame pointer, we don't need to restore it. We assume here
1807 that if such an elimination is present, it can always be used.
1808 This is the case on all known machines; if we don't make this
1809 assumption, we do unnecessary saving on many machines. */
1810 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
1811 size_t i;
1813 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
1814 if (elim_regs[i].from == ARG_POINTER_REGNUM
1815 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
1816 break;
1818 if (i == ARRAY_SIZE (elim_regs))
1819 #endif
1821 /* Now restore our arg pointer from the address at which it
1822 was saved in our stack frame. */
1823 emit_move_insn (crtl->args.internal_arg_pointer,
1824 copy_to_reg (get_arg_pointer_save_area ()));
1827 #endif
1829 #ifdef HAVE_nonlocal_goto_receiver
1830 if (HAVE_nonlocal_goto_receiver)
1831 emit_insn (gen_nonlocal_goto_receiver ());
1832 #endif
1834 /* We must not allow the code we just generated to be reordered by
1835 scheduling. Specifically, the update of the frame pointer must
1836 happen immediately, not later. */
1837 emit_insn (gen_blockage ());
1840 /* Generate RTL for the automatic variable declaration DECL.
1841 (Other kinds of declarations are simply ignored if seen here.) */
1843 void
1844 expand_decl (tree decl)
1846 tree type;
1848 type = TREE_TYPE (decl);
1850 /* For a CONST_DECL, set mode, alignment, and sizes from those of the
1851 type in case this node is used in a reference. */
1852 if (TREE_CODE (decl) == CONST_DECL)
1854 DECL_MODE (decl) = TYPE_MODE (type);
1855 DECL_ALIGN (decl) = TYPE_ALIGN (type);
1856 DECL_SIZE (decl) = TYPE_SIZE (type);
1857 DECL_SIZE_UNIT (decl) = TYPE_SIZE_UNIT (type);
1858 return;
1861 /* Otherwise, only automatic variables need any expansion done. Static and
1862 external variables, and external functions, will be handled by
1863 `assemble_variable' (called from finish_decl). TYPE_DECL requires
1864 nothing. PARM_DECLs are handled in `assign_parms'. */
1865 if (TREE_CODE (decl) != VAR_DECL)
1866 return;
1868 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
1869 return;
1871 /* Create the RTL representation for the variable. */
1873 if (type == error_mark_node)
1874 SET_DECL_RTL (decl, gen_rtx_MEM (BLKmode, const0_rtx));
1876 else if (DECL_SIZE (decl) == 0)
1878 /* Variable with incomplete type. */
1879 rtx x;
1880 if (DECL_INITIAL (decl) == 0)
1881 /* Error message was already done; now avoid a crash. */
1882 x = gen_rtx_MEM (BLKmode, const0_rtx);
1883 else
1884 /* An initializer is going to decide the size of this array.
1885 Until we know the size, represent its address with a reg. */
1886 x = gen_rtx_MEM (BLKmode, gen_reg_rtx (Pmode));
1888 set_mem_attributes (x, decl, 1);
1889 SET_DECL_RTL (decl, x);
1891 else if (use_register_for_decl (decl))
1893 /* Automatic variable that can go in a register. */
1894 enum machine_mode reg_mode = promote_decl_mode (decl, NULL);
1896 SET_DECL_RTL (decl, gen_reg_rtx (reg_mode));
1898 /* Note if the object is a user variable. */
1899 if (!DECL_ARTIFICIAL (decl))
1900 mark_user_reg (DECL_RTL (decl));
1902 if (POINTER_TYPE_P (type))
1903 mark_reg_pointer (DECL_RTL (decl),
1904 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (decl))));
1907 else
1909 rtx oldaddr = 0;
1910 rtx addr;
1911 rtx x;
1913 /* Variable-sized decls are dealt with in the gimplifier. */
1914 gcc_assert (TREE_CODE (DECL_SIZE_UNIT (decl)) == INTEGER_CST);
1916 /* If we previously made RTL for this decl, it must be an array
1917 whose size was determined by the initializer.
1918 The old address was a register; set that register now
1919 to the proper address. */
1920 if (DECL_RTL_SET_P (decl))
1922 gcc_assert (MEM_P (DECL_RTL (decl)));
1923 gcc_assert (REG_P (XEXP (DECL_RTL (decl), 0)));
1924 oldaddr = XEXP (DECL_RTL (decl), 0);
1927 /* Set alignment we actually gave this decl. */
1928 DECL_ALIGN (decl) = (DECL_MODE (decl) == BLKmode ? BIGGEST_ALIGNMENT
1929 : GET_MODE_BITSIZE (DECL_MODE (decl)));
1930 DECL_USER_ALIGN (decl) = 0;
1932 x = assign_temp (decl, 1, 1, 1);
1933 set_mem_attributes (x, decl, 1);
1934 SET_DECL_RTL (decl, x);
1936 if (oldaddr)
1938 addr = force_operand (XEXP (DECL_RTL (decl), 0), oldaddr);
1939 if (addr != oldaddr)
1940 emit_move_insn (oldaddr, addr);
1945 /* Emit code to save the current value of stack. */
1947 expand_stack_save (void)
1949 rtx ret = NULL_RTX;
1951 do_pending_stack_adjust ();
1952 emit_stack_save (SAVE_BLOCK, &ret, NULL_RTX);
1953 return ret;
1956 /* Emit code to restore the current value of stack. */
1957 void
1958 expand_stack_restore (tree var)
1960 rtx sa = expand_normal (var);
1962 sa = convert_memory_address (Pmode, sa);
1963 emit_stack_restore (SAVE_BLOCK, sa, NULL_RTX);
1966 /* Do the insertion of a case label into case_list. The labels are
1967 fed to us in descending order from the sorted vector of case labels used
1968 in the tree part of the middle end. So the list we construct is
1969 sorted in ascending order. The bounds on the case range, LOW and HIGH,
1970 are converted to case's index type TYPE. */
1972 static struct case_node *
1973 add_case_node (struct case_node *head, tree type, tree low, tree high,
1974 tree label, alloc_pool case_node_pool)
1976 tree min_value, max_value;
1977 struct case_node *r;
1979 gcc_assert (TREE_CODE (low) == INTEGER_CST);
1980 gcc_assert (!high || TREE_CODE (high) == INTEGER_CST);
1982 min_value = TYPE_MIN_VALUE (type);
1983 max_value = TYPE_MAX_VALUE (type);
1985 /* If there's no HIGH value, then this is not a case range; it's
1986 just a simple case label. But that's just a degenerate case
1987 range.
1988 If the bounds are equal, turn this into the one-value case. */
1989 if (!high || tree_int_cst_equal (low, high))
1991 /* If the simple case value is unreachable, ignore it. */
1992 if ((TREE_CODE (min_value) == INTEGER_CST
1993 && tree_int_cst_compare (low, min_value) < 0)
1994 || (TREE_CODE (max_value) == INTEGER_CST
1995 && tree_int_cst_compare (low, max_value) > 0))
1996 return head;
1997 low = fold_convert (type, low);
1998 high = low;
2000 else
2002 /* If the entire case range is unreachable, ignore it. */
2003 if ((TREE_CODE (min_value) == INTEGER_CST
2004 && tree_int_cst_compare (high, min_value) < 0)
2005 || (TREE_CODE (max_value) == INTEGER_CST
2006 && tree_int_cst_compare (low, max_value) > 0))
2007 return head;
2009 /* If the lower bound is less than the index type's minimum
2010 value, truncate the range bounds. */
2011 if (TREE_CODE (min_value) == INTEGER_CST
2012 && tree_int_cst_compare (low, min_value) < 0)
2013 low = min_value;
2014 low = fold_convert (type, low);
2016 /* If the upper bound is greater than the index type's maximum
2017 value, truncate the range bounds. */
2018 if (TREE_CODE (max_value) == INTEGER_CST
2019 && tree_int_cst_compare (high, max_value) > 0)
2020 high = max_value;
2021 high = fold_convert (type, high);
2025 /* Add this label to the chain. Make sure to drop overflow flags. */
2026 r = (struct case_node *) pool_alloc (case_node_pool);
2027 r->low = build_int_cst_wide (TREE_TYPE (low), TREE_INT_CST_LOW (low),
2028 TREE_INT_CST_HIGH (low));
2029 r->high = build_int_cst_wide (TREE_TYPE (high), TREE_INT_CST_LOW (high),
2030 TREE_INT_CST_HIGH (high));
2031 r->code_label = label;
2032 r->parent = r->left = NULL;
2033 r->right = head;
2034 return r;
2037 /* Maximum number of case bit tests. */
2038 #define MAX_CASE_BIT_TESTS 3
2040 /* By default, enable case bit tests on targets with ashlsi3. */
2041 #ifndef CASE_USE_BIT_TESTS
2042 #define CASE_USE_BIT_TESTS (optab_handler (ashl_optab, word_mode)->insn_code \
2043 != CODE_FOR_nothing)
2044 #endif
2047 /* A case_bit_test represents a set of case nodes that may be
2048 selected from using a bit-wise comparison. HI and LO hold
2049 the integer to be tested against, LABEL contains the label
2050 to jump to upon success and BITS counts the number of case
2051 nodes handled by this test, typically the number of bits
2052 set in HI:LO. */
2054 struct case_bit_test
2056 HOST_WIDE_INT hi;
2057 HOST_WIDE_INT lo;
2058 rtx label;
2059 int bits;
2062 /* Determine whether "1 << x" is relatively cheap in word_mode. */
2064 static
2065 bool lshift_cheap_p (void)
2067 static bool init = false;
2068 static bool cheap = true;
2070 if (!init)
2072 rtx reg = gen_rtx_REG (word_mode, 10000);
2073 int cost = rtx_cost (gen_rtx_ASHIFT (word_mode, const1_rtx, reg), SET,
2074 optimize_insn_for_speed_p ());
2075 cheap = cost < COSTS_N_INSNS (3);
2076 init = true;
2079 return cheap;
2082 /* Comparison function for qsort to order bit tests by decreasing
2083 number of case nodes, i.e. the node with the most cases gets
2084 tested first. */
2086 static int
2087 case_bit_test_cmp (const void *p1, const void *p2)
2089 const struct case_bit_test *const d1 = (const struct case_bit_test *) p1;
2090 const struct case_bit_test *const d2 = (const struct case_bit_test *) p2;
2092 if (d2->bits != d1->bits)
2093 return d2->bits - d1->bits;
2095 /* Stabilize the sort. */
2096 return CODE_LABEL_NUMBER (d2->label) - CODE_LABEL_NUMBER (d1->label);
2099 /* Expand a switch statement by a short sequence of bit-wise
2100 comparisons. "switch(x)" is effectively converted into
2101 "if ((1 << (x-MINVAL)) & CST)" where CST and MINVAL are
2102 integer constants.
2104 INDEX_EXPR is the value being switched on, which is of
2105 type INDEX_TYPE. MINVAL is the lowest case value of in
2106 the case nodes, of INDEX_TYPE type, and RANGE is highest
2107 value minus MINVAL, also of type INDEX_TYPE. NODES is
2108 the set of case nodes, and DEFAULT_LABEL is the label to
2109 branch to should none of the cases match.
2111 There *MUST* be MAX_CASE_BIT_TESTS or less unique case
2112 node targets. */
2114 static void
2115 emit_case_bit_tests (tree index_type, tree index_expr, tree minval,
2116 tree range, case_node_ptr nodes, rtx default_label)
2118 struct case_bit_test test[MAX_CASE_BIT_TESTS];
2119 enum machine_mode mode;
2120 rtx expr, index, label;
2121 unsigned int i,j,lo,hi;
2122 struct case_node *n;
2123 unsigned int count;
2125 count = 0;
2126 for (n = nodes; n; n = n->right)
2128 label = label_rtx (n->code_label);
2129 for (i = 0; i < count; i++)
2130 if (label == test[i].label)
2131 break;
2133 if (i == count)
2135 gcc_assert (count < MAX_CASE_BIT_TESTS);
2136 test[i].hi = 0;
2137 test[i].lo = 0;
2138 test[i].label = label;
2139 test[i].bits = 1;
2140 count++;
2142 else
2143 test[i].bits++;
2145 lo = tree_low_cst (fold_build2 (MINUS_EXPR, index_type,
2146 n->low, minval), 1);
2147 hi = tree_low_cst (fold_build2 (MINUS_EXPR, index_type,
2148 n->high, minval), 1);
2149 for (j = lo; j <= hi; j++)
2150 if (j >= HOST_BITS_PER_WIDE_INT)
2151 test[i].hi |= (HOST_WIDE_INT) 1 << (j - HOST_BITS_PER_INT);
2152 else
2153 test[i].lo |= (HOST_WIDE_INT) 1 << j;
2156 qsort (test, count, sizeof(*test), case_bit_test_cmp);
2158 index_expr = fold_build2 (MINUS_EXPR, index_type,
2159 fold_convert (index_type, index_expr),
2160 fold_convert (index_type, minval));
2161 index = expand_normal (index_expr);
2162 do_pending_stack_adjust ();
2164 mode = TYPE_MODE (index_type);
2165 expr = expand_normal (range);
2166 if (default_label)
2167 emit_cmp_and_jump_insns (index, expr, GTU, NULL_RTX, mode, 1,
2168 default_label);
2170 index = convert_to_mode (word_mode, index, 0);
2171 index = expand_binop (word_mode, ashl_optab, const1_rtx,
2172 index, NULL_RTX, 1, OPTAB_WIDEN);
2174 for (i = 0; i < count; i++)
2176 expr = immed_double_const (test[i].lo, test[i].hi, word_mode);
2177 expr = expand_binop (word_mode, and_optab, index, expr,
2178 NULL_RTX, 1, OPTAB_WIDEN);
2179 emit_cmp_and_jump_insns (expr, const0_rtx, NE, NULL_RTX,
2180 word_mode, 1, test[i].label);
2183 if (default_label)
2184 emit_jump (default_label);
2187 #ifndef HAVE_casesi
2188 #define HAVE_casesi 0
2189 #endif
2191 #ifndef HAVE_tablejump
2192 #define HAVE_tablejump 0
2193 #endif
2195 /* Terminate a case (Pascal/Ada) or switch (C) statement
2196 in which ORIG_INDEX is the expression to be tested.
2197 If ORIG_TYPE is not NULL, it is the original ORIG_INDEX
2198 type as given in the source before any compiler conversions.
2199 Generate the code to test it and jump to the right place. */
2201 void
2202 expand_case (gimple stmt)
2204 tree minval = NULL_TREE, maxval = NULL_TREE, range = NULL_TREE;
2205 rtx default_label = 0;
2206 struct case_node *n;
2207 unsigned int count, uniq;
2208 rtx index;
2209 rtx table_label;
2210 int ncases;
2211 rtx *labelvec;
2212 int i;
2213 rtx before_case, end, lab;
2215 tree index_expr = gimple_switch_index (stmt);
2216 tree index_type = TREE_TYPE (index_expr);
2217 int unsignedp = TYPE_UNSIGNED (index_type);
2219 /* The insn after which the case dispatch should finally
2220 be emitted. Zero for a dummy. */
2221 rtx start;
2223 /* A list of case labels; it is first built as a list and it may then
2224 be rearranged into a nearly balanced binary tree. */
2225 struct case_node *case_list = 0;
2227 /* Label to jump to if no case matches. */
2228 tree default_label_decl = NULL_TREE;
2230 alloc_pool case_node_pool = create_alloc_pool ("struct case_node pool",
2231 sizeof (struct case_node),
2232 100);
2234 do_pending_stack_adjust ();
2236 /* An ERROR_MARK occurs for various reasons including invalid data type. */
2237 if (index_type != error_mark_node)
2239 tree elt;
2240 bitmap label_bitmap;
2241 int stopi = 0;
2243 /* cleanup_tree_cfg removes all SWITCH_EXPR with their index
2244 expressions being INTEGER_CST. */
2245 gcc_assert (TREE_CODE (index_expr) != INTEGER_CST);
2247 /* The default case, if ever taken, is the first element. */
2248 elt = gimple_switch_label (stmt, 0);
2249 if (!CASE_LOW (elt) && !CASE_HIGH (elt))
2251 default_label_decl = CASE_LABEL (elt);
2252 stopi = 1;
2255 for (i = gimple_switch_num_labels (stmt) - 1; i >= stopi; --i)
2257 tree low, high;
2258 elt = gimple_switch_label (stmt, i);
2260 low = CASE_LOW (elt);
2261 gcc_assert (low);
2262 high = CASE_HIGH (elt);
2264 /* Discard empty ranges. */
2265 if (high && tree_int_cst_lt (high, low))
2266 continue;
2268 case_list = add_case_node (case_list, index_type, low, high,
2269 CASE_LABEL (elt), case_node_pool);
2273 before_case = start = get_last_insn ();
2274 if (default_label_decl)
2275 default_label = label_rtx (default_label_decl);
2277 /* Get upper and lower bounds of case values. */
2279 uniq = 0;
2280 count = 0;
2281 label_bitmap = BITMAP_ALLOC (NULL);
2282 for (n = case_list; n; n = n->right)
2284 /* Count the elements and track the largest and smallest
2285 of them (treating them as signed even if they are not). */
2286 if (count++ == 0)
2288 minval = n->low;
2289 maxval = n->high;
2291 else
2293 if (tree_int_cst_lt (n->low, minval))
2294 minval = n->low;
2295 if (tree_int_cst_lt (maxval, n->high))
2296 maxval = n->high;
2298 /* A range counts double, since it requires two compares. */
2299 if (! tree_int_cst_equal (n->low, n->high))
2300 count++;
2302 /* If we have not seen this label yet, then increase the
2303 number of unique case node targets seen. */
2304 lab = label_rtx (n->code_label);
2305 if (!bitmap_bit_p (label_bitmap, CODE_LABEL_NUMBER (lab)))
2307 bitmap_set_bit (label_bitmap, CODE_LABEL_NUMBER (lab));
2308 uniq++;
2312 BITMAP_FREE (label_bitmap);
2314 /* cleanup_tree_cfg removes all SWITCH_EXPR with a single
2315 destination, such as one with a default case only. However,
2316 it doesn't remove cases that are out of range for the switch
2317 type, so we may still get a zero here. */
2318 if (count == 0)
2320 if (default_label)
2321 emit_jump (default_label);
2322 free_alloc_pool (case_node_pool);
2323 return;
2326 /* Compute span of values. */
2327 range = fold_build2 (MINUS_EXPR, index_type, maxval, minval);
2329 /* Try implementing this switch statement by a short sequence of
2330 bit-wise comparisons. However, we let the binary-tree case
2331 below handle constant index expressions. */
2332 if (CASE_USE_BIT_TESTS
2333 && ! TREE_CONSTANT (index_expr)
2334 && compare_tree_int (range, GET_MODE_BITSIZE (word_mode)) < 0
2335 && compare_tree_int (range, 0) > 0
2336 && lshift_cheap_p ()
2337 && ((uniq == 1 && count >= 3)
2338 || (uniq == 2 && count >= 5)
2339 || (uniq == 3 && count >= 6)))
2341 /* Optimize the case where all the case values fit in a
2342 word without having to subtract MINVAL. In this case,
2343 we can optimize away the subtraction. */
2344 if (compare_tree_int (minval, 0) > 0
2345 && compare_tree_int (maxval, GET_MODE_BITSIZE (word_mode)) < 0)
2347 minval = build_int_cst (index_type, 0);
2348 range = maxval;
2350 emit_case_bit_tests (index_type, index_expr, minval, range,
2351 case_list, default_label);
2354 /* If range of values is much bigger than number of values,
2355 make a sequence of conditional branches instead of a dispatch.
2356 If the switch-index is a constant, do it this way
2357 because we can optimize it. */
2359 else if (count < targetm.case_values_threshold ()
2360 || compare_tree_int (range,
2361 (optimize_insn_for_size_p () ? 3 : 10) * count) > 0
2362 /* RANGE may be signed, and really large ranges will show up
2363 as negative numbers. */
2364 || compare_tree_int (range, 0) < 0
2365 #ifndef ASM_OUTPUT_ADDR_DIFF_ELT
2366 || flag_pic
2367 #endif
2368 || !flag_jump_tables
2369 || TREE_CONSTANT (index_expr)
2370 /* If neither casesi or tablejump is available, we can
2371 only go this way. */
2372 || (!HAVE_casesi && !HAVE_tablejump))
2374 index = expand_normal (index_expr);
2376 /* If the index is a short or char that we do not have
2377 an insn to handle comparisons directly, convert it to
2378 a full integer now, rather than letting each comparison
2379 generate the conversion. */
2381 if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT
2382 && ! have_insn_for (COMPARE, GET_MODE (index)))
2384 enum machine_mode wider_mode;
2385 for (wider_mode = GET_MODE (index); wider_mode != VOIDmode;
2386 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
2387 if (have_insn_for (COMPARE, wider_mode))
2389 index = convert_to_mode (wider_mode, index, unsignedp);
2390 break;
2394 do_pending_stack_adjust ();
2396 if (MEM_P (index))
2397 index = copy_to_reg (index);
2399 /* We generate a binary decision tree to select the
2400 appropriate target code. This is done as follows:
2402 The list of cases is rearranged into a binary tree,
2403 nearly optimal assuming equal probability for each case.
2405 The tree is transformed into RTL, eliminating
2406 redundant test conditions at the same time.
2408 If program flow could reach the end of the
2409 decision tree an unconditional jump to the
2410 default code is emitted. */
2412 use_cost_table = estimate_case_costs (case_list);
2413 balance_case_nodes (&case_list, NULL);
2414 emit_case_nodes (index, case_list, default_label, index_type);
2415 if (default_label)
2416 emit_jump (default_label);
2418 else
2420 rtx fallback_label = label_rtx (case_list->code_label);
2421 table_label = gen_label_rtx ();
2422 if (! try_casesi (index_type, index_expr, minval, range,
2423 table_label, default_label, fallback_label))
2425 bool ok;
2427 /* Index jumptables from zero for suitable values of
2428 minval to avoid a subtraction. */
2429 if (optimize_insn_for_speed_p ()
2430 && compare_tree_int (minval, 0) > 0
2431 && compare_tree_int (minval, 3) < 0)
2433 minval = build_int_cst (index_type, 0);
2434 range = maxval;
2437 ok = try_tablejump (index_type, index_expr, minval, range,
2438 table_label, default_label);
2439 gcc_assert (ok);
2442 /* Get table of labels to jump to, in order of case index. */
2444 ncases = tree_low_cst (range, 0) + 1;
2445 labelvec = XALLOCAVEC (rtx, ncases);
2446 memset (labelvec, 0, ncases * sizeof (rtx));
2448 for (n = case_list; n; n = n->right)
2450 /* Compute the low and high bounds relative to the minimum
2451 value since that should fit in a HOST_WIDE_INT while the
2452 actual values may not. */
2453 HOST_WIDE_INT i_low
2454 = tree_low_cst (fold_build2 (MINUS_EXPR, index_type,
2455 n->low, minval), 1);
2456 HOST_WIDE_INT i_high
2457 = tree_low_cst (fold_build2 (MINUS_EXPR, index_type,
2458 n->high, minval), 1);
2459 HOST_WIDE_INT i;
2461 for (i = i_low; i <= i_high; i ++)
2462 labelvec[i]
2463 = gen_rtx_LABEL_REF (Pmode, label_rtx (n->code_label));
2466 /* Fill in the gaps with the default. We may have gaps at
2467 the beginning if we tried to avoid the minval subtraction,
2468 so substitute some label even if the default label was
2469 deemed unreachable. */
2470 if (!default_label)
2471 default_label = fallback_label;
2472 for (i = 0; i < ncases; i++)
2473 if (labelvec[i] == 0)
2474 labelvec[i] = gen_rtx_LABEL_REF (Pmode, default_label);
2476 /* Output the table. */
2477 emit_label (table_label);
2479 if (CASE_VECTOR_PC_RELATIVE || flag_pic)
2480 emit_jump_insn (gen_rtx_ADDR_DIFF_VEC (CASE_VECTOR_MODE,
2481 gen_rtx_LABEL_REF (Pmode, table_label),
2482 gen_rtvec_v (ncases, labelvec),
2483 const0_rtx, const0_rtx));
2484 else
2485 emit_jump_insn (gen_rtx_ADDR_VEC (CASE_VECTOR_MODE,
2486 gen_rtvec_v (ncases, labelvec)));
2488 /* Record no drop-through after the table. */
2489 emit_barrier ();
2492 before_case = NEXT_INSN (before_case);
2493 end = get_last_insn ();
2494 reorder_insns (before_case, end, start);
2497 free_temp_slots ();
2498 free_alloc_pool (case_node_pool);
2501 /* Generate code to jump to LABEL if OP0 and OP1 are equal in mode MODE. */
2503 static void
2504 do_jump_if_equal (enum machine_mode mode, rtx op0, rtx op1, rtx label,
2505 int unsignedp)
2507 do_compare_rtx_and_jump (op0, op1, EQ, unsignedp, mode,
2508 NULL_RTX, NULL_RTX, label);
2511 /* Not all case values are encountered equally. This function
2512 uses a heuristic to weight case labels, in cases where that
2513 looks like a reasonable thing to do.
2515 Right now, all we try to guess is text, and we establish the
2516 following weights:
2518 chars above space: 16
2519 digits: 16
2520 default: 12
2521 space, punct: 8
2522 tab: 4
2523 newline: 2
2524 other "\" chars: 1
2525 remaining chars: 0
2527 If we find any cases in the switch that are not either -1 or in the range
2528 of valid ASCII characters, or are control characters other than those
2529 commonly used with "\", don't treat this switch scanning text.
2531 Return 1 if these nodes are suitable for cost estimation, otherwise
2532 return 0. */
2534 static int
2535 estimate_case_costs (case_node_ptr node)
2537 tree min_ascii = integer_minus_one_node;
2538 tree max_ascii = build_int_cst (TREE_TYPE (node->high), 127);
2539 case_node_ptr n;
2540 int i;
2542 /* If we haven't already made the cost table, make it now. Note that the
2543 lower bound of the table is -1, not zero. */
2545 if (! cost_table_initialized)
2547 cost_table_initialized = 1;
2549 for (i = 0; i < 128; i++)
2551 if (ISALNUM (i))
2552 COST_TABLE (i) = 16;
2553 else if (ISPUNCT (i))
2554 COST_TABLE (i) = 8;
2555 else if (ISCNTRL (i))
2556 COST_TABLE (i) = -1;
2559 COST_TABLE (' ') = 8;
2560 COST_TABLE ('\t') = 4;
2561 COST_TABLE ('\0') = 4;
2562 COST_TABLE ('\n') = 2;
2563 COST_TABLE ('\f') = 1;
2564 COST_TABLE ('\v') = 1;
2565 COST_TABLE ('\b') = 1;
2568 /* See if all the case expressions look like text. It is text if the
2569 constant is >= -1 and the highest constant is <= 127. Do all comparisons
2570 as signed arithmetic since we don't want to ever access cost_table with a
2571 value less than -1. Also check that none of the constants in a range
2572 are strange control characters. */
2574 for (n = node; n; n = n->right)
2576 if (tree_int_cst_lt (n->low, min_ascii)
2577 || tree_int_cst_lt (max_ascii, n->high))
2578 return 0;
2580 for (i = (HOST_WIDE_INT) TREE_INT_CST_LOW (n->low);
2581 i <= (HOST_WIDE_INT) TREE_INT_CST_LOW (n->high); i++)
2582 if (COST_TABLE (i) < 0)
2583 return 0;
2586 /* All interesting values are within the range of interesting
2587 ASCII characters. */
2588 return 1;
2591 /* Take an ordered list of case nodes
2592 and transform them into a near optimal binary tree,
2593 on the assumption that any target code selection value is as
2594 likely as any other.
2596 The transformation is performed by splitting the ordered
2597 list into two equal sections plus a pivot. The parts are
2598 then attached to the pivot as left and right branches. Each
2599 branch is then transformed recursively. */
2601 static void
2602 balance_case_nodes (case_node_ptr *head, case_node_ptr parent)
2604 case_node_ptr np;
2606 np = *head;
2607 if (np)
2609 int cost = 0;
2610 int i = 0;
2611 int ranges = 0;
2612 case_node_ptr *npp;
2613 case_node_ptr left;
2615 /* Count the number of entries on branch. Also count the ranges. */
2617 while (np)
2619 if (!tree_int_cst_equal (np->low, np->high))
2621 ranges++;
2622 if (use_cost_table)
2623 cost += COST_TABLE (TREE_INT_CST_LOW (np->high));
2626 if (use_cost_table)
2627 cost += COST_TABLE (TREE_INT_CST_LOW (np->low));
2629 i++;
2630 np = np->right;
2633 if (i > 2)
2635 /* Split this list if it is long enough for that to help. */
2636 npp = head;
2637 left = *npp;
2638 if (use_cost_table)
2640 /* Find the place in the list that bisects the list's total cost,
2641 Here I gets half the total cost. */
2642 int n_moved = 0;
2643 i = (cost + 1) / 2;
2644 while (1)
2646 /* Skip nodes while their cost does not reach that amount. */
2647 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
2648 i -= COST_TABLE (TREE_INT_CST_LOW ((*npp)->high));
2649 i -= COST_TABLE (TREE_INT_CST_LOW ((*npp)->low));
2650 if (i <= 0)
2651 break;
2652 npp = &(*npp)->right;
2653 n_moved += 1;
2655 if (n_moved == 0)
2657 /* Leave this branch lopsided, but optimize left-hand
2658 side and fill in `parent' fields for right-hand side. */
2659 np = *head;
2660 np->parent = parent;
2661 balance_case_nodes (&np->left, np);
2662 for (; np->right; np = np->right)
2663 np->right->parent = np;
2664 return;
2667 /* If there are just three nodes, split at the middle one. */
2668 else if (i == 3)
2669 npp = &(*npp)->right;
2670 else
2672 /* Find the place in the list that bisects the list's total cost,
2673 where ranges count as 2.
2674 Here I gets half the total cost. */
2675 i = (i + ranges + 1) / 2;
2676 while (1)
2678 /* Skip nodes while their cost does not reach that amount. */
2679 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
2680 i--;
2681 i--;
2682 if (i <= 0)
2683 break;
2684 npp = &(*npp)->right;
2687 *head = np = *npp;
2688 *npp = 0;
2689 np->parent = parent;
2690 np->left = left;
2692 /* Optimize each of the two split parts. */
2693 balance_case_nodes (&np->left, np);
2694 balance_case_nodes (&np->right, np);
2696 else
2698 /* Else leave this branch as one level,
2699 but fill in `parent' fields. */
2700 np = *head;
2701 np->parent = parent;
2702 for (; np->right; np = np->right)
2703 np->right->parent = np;
2708 /* Search the parent sections of the case node tree
2709 to see if a test for the lower bound of NODE would be redundant.
2710 INDEX_TYPE is the type of the index expression.
2712 The instructions to generate the case decision tree are
2713 output in the same order as nodes are processed so it is
2714 known that if a parent node checks the range of the current
2715 node minus one that the current node is bounded at its lower
2716 span. Thus the test would be redundant. */
2718 static int
2719 node_has_low_bound (case_node_ptr node, tree index_type)
2721 tree low_minus_one;
2722 case_node_ptr pnode;
2724 /* If the lower bound of this node is the lowest value in the index type,
2725 we need not test it. */
2727 if (tree_int_cst_equal (node->low, TYPE_MIN_VALUE (index_type)))
2728 return 1;
2730 /* If this node has a left branch, the value at the left must be less
2731 than that at this node, so it cannot be bounded at the bottom and
2732 we need not bother testing any further. */
2734 if (node->left)
2735 return 0;
2737 low_minus_one = fold_build2 (MINUS_EXPR, TREE_TYPE (node->low),
2738 node->low,
2739 build_int_cst (TREE_TYPE (node->low), 1));
2741 /* If the subtraction above overflowed, we can't verify anything.
2742 Otherwise, look for a parent that tests our value - 1. */
2744 if (! tree_int_cst_lt (low_minus_one, node->low))
2745 return 0;
2747 for (pnode = node->parent; pnode; pnode = pnode->parent)
2748 if (tree_int_cst_equal (low_minus_one, pnode->high))
2749 return 1;
2751 return 0;
2754 /* Search the parent sections of the case node tree
2755 to see if a test for the upper bound of NODE would be redundant.
2756 INDEX_TYPE is the type of the index expression.
2758 The instructions to generate the case decision tree are
2759 output in the same order as nodes are processed so it is
2760 known that if a parent node checks the range of the current
2761 node plus one that the current node is bounded at its upper
2762 span. Thus the test would be redundant. */
2764 static int
2765 node_has_high_bound (case_node_ptr node, tree index_type)
2767 tree high_plus_one;
2768 case_node_ptr pnode;
2770 /* If there is no upper bound, obviously no test is needed. */
2772 if (TYPE_MAX_VALUE (index_type) == NULL)
2773 return 1;
2775 /* If the upper bound of this node is the highest value in the type
2776 of the index expression, we need not test against it. */
2778 if (tree_int_cst_equal (node->high, TYPE_MAX_VALUE (index_type)))
2779 return 1;
2781 /* If this node has a right branch, the value at the right must be greater
2782 than that at this node, so it cannot be bounded at the top and
2783 we need not bother testing any further. */
2785 if (node->right)
2786 return 0;
2788 high_plus_one = fold_build2 (PLUS_EXPR, TREE_TYPE (node->high),
2789 node->high,
2790 build_int_cst (TREE_TYPE (node->high), 1));
2792 /* If the addition above overflowed, we can't verify anything.
2793 Otherwise, look for a parent that tests our value + 1. */
2795 if (! tree_int_cst_lt (node->high, high_plus_one))
2796 return 0;
2798 for (pnode = node->parent; pnode; pnode = pnode->parent)
2799 if (tree_int_cst_equal (high_plus_one, pnode->low))
2800 return 1;
2802 return 0;
2805 /* Search the parent sections of the
2806 case node tree to see if both tests for the upper and lower
2807 bounds of NODE would be redundant. */
2809 static int
2810 node_is_bounded (case_node_ptr node, tree index_type)
2812 return (node_has_low_bound (node, index_type)
2813 && node_has_high_bound (node, index_type));
2816 /* Emit step-by-step code to select a case for the value of INDEX.
2817 The thus generated decision tree follows the form of the
2818 case-node binary tree NODE, whose nodes represent test conditions.
2819 INDEX_TYPE is the type of the index of the switch.
2821 Care is taken to prune redundant tests from the decision tree
2822 by detecting any boundary conditions already checked by
2823 emitted rtx. (See node_has_high_bound, node_has_low_bound
2824 and node_is_bounded, above.)
2826 Where the test conditions can be shown to be redundant we emit
2827 an unconditional jump to the target code. As a further
2828 optimization, the subordinates of a tree node are examined to
2829 check for bounded nodes. In this case conditional and/or
2830 unconditional jumps as a result of the boundary check for the
2831 current node are arranged to target the subordinates associated
2832 code for out of bound conditions on the current node.
2834 We can assume that when control reaches the code generated here,
2835 the index value has already been compared with the parents
2836 of this node, and determined to be on the same side of each parent
2837 as this node is. Thus, if this node tests for the value 51,
2838 and a parent tested for 52, we don't need to consider
2839 the possibility of a value greater than 51. If another parent
2840 tests for the value 50, then this node need not test anything. */
2842 static void
2843 emit_case_nodes (rtx index, case_node_ptr node, rtx default_label,
2844 tree index_type)
2846 /* If INDEX has an unsigned type, we must make unsigned branches. */
2847 int unsignedp = TYPE_UNSIGNED (index_type);
2848 enum machine_mode mode = GET_MODE (index);
2849 enum machine_mode imode = TYPE_MODE (index_type);
2851 /* Handle indices detected as constant during RTL expansion. */
2852 if (mode == VOIDmode)
2853 mode = imode;
2855 /* See if our parents have already tested everything for us.
2856 If they have, emit an unconditional jump for this node. */
2857 if (node_is_bounded (node, index_type))
2858 emit_jump (label_rtx (node->code_label));
2860 else if (tree_int_cst_equal (node->low, node->high))
2862 /* Node is single valued. First see if the index expression matches
2863 this node and then check our children, if any. */
2865 do_jump_if_equal (mode, index,
2866 convert_modes (mode, imode,
2867 expand_normal (node->low),
2868 unsignedp),
2869 label_rtx (node->code_label), unsignedp);
2871 if (node->right != 0 && node->left != 0)
2873 /* This node has children on both sides.
2874 Dispatch to one side or the other
2875 by comparing the index value with this node's value.
2876 If one subtree is bounded, check that one first,
2877 so we can avoid real branches in the tree. */
2879 if (node_is_bounded (node->right, index_type))
2881 emit_cmp_and_jump_insns (index,
2882 convert_modes
2883 (mode, imode,
2884 expand_normal (node->high),
2885 unsignedp),
2886 GT, NULL_RTX, mode, unsignedp,
2887 label_rtx (node->right->code_label));
2888 emit_case_nodes (index, node->left, default_label, index_type);
2891 else if (node_is_bounded (node->left, index_type))
2893 emit_cmp_and_jump_insns (index,
2894 convert_modes
2895 (mode, imode,
2896 expand_normal (node->high),
2897 unsignedp),
2898 LT, NULL_RTX, mode, unsignedp,
2899 label_rtx (node->left->code_label));
2900 emit_case_nodes (index, node->right, default_label, index_type);
2903 /* If both children are single-valued cases with no
2904 children, finish up all the work. This way, we can save
2905 one ordered comparison. */
2906 else if (tree_int_cst_equal (node->right->low, node->right->high)
2907 && node->right->left == 0
2908 && node->right->right == 0
2909 && tree_int_cst_equal (node->left->low, node->left->high)
2910 && node->left->left == 0
2911 && node->left->right == 0)
2913 /* Neither node is bounded. First distinguish the two sides;
2914 then emit the code for one side at a time. */
2916 /* See if the value matches what the right hand side
2917 wants. */
2918 do_jump_if_equal (mode, index,
2919 convert_modes (mode, imode,
2920 expand_normal (node->right->low),
2921 unsignedp),
2922 label_rtx (node->right->code_label),
2923 unsignedp);
2925 /* See if the value matches what the left hand side
2926 wants. */
2927 do_jump_if_equal (mode, index,
2928 convert_modes (mode, imode,
2929 expand_normal (node->left->low),
2930 unsignedp),
2931 label_rtx (node->left->code_label),
2932 unsignedp);
2935 else
2937 /* Neither node is bounded. First distinguish the two sides;
2938 then emit the code for one side at a time. */
2940 tree test_label
2941 = build_decl (CURR_INSN_LOCATION,
2942 LABEL_DECL, NULL_TREE, NULL_TREE);
2944 /* See if the value is on the right. */
2945 emit_cmp_and_jump_insns (index,
2946 convert_modes
2947 (mode, imode,
2948 expand_normal (node->high),
2949 unsignedp),
2950 GT, NULL_RTX, mode, unsignedp,
2951 label_rtx (test_label));
2953 /* Value must be on the left.
2954 Handle the left-hand subtree. */
2955 emit_case_nodes (index, node->left, default_label, index_type);
2956 /* If left-hand subtree does nothing,
2957 go to default. */
2958 if (default_label)
2959 emit_jump (default_label);
2961 /* Code branches here for the right-hand subtree. */
2962 expand_label (test_label);
2963 emit_case_nodes (index, node->right, default_label, index_type);
2967 else if (node->right != 0 && node->left == 0)
2969 /* Here we have a right child but no left so we issue a conditional
2970 branch to default and process the right child.
2972 Omit the conditional branch to default if the right child
2973 does not have any children and is single valued; it would
2974 cost too much space to save so little time. */
2976 if (node->right->right || node->right->left
2977 || !tree_int_cst_equal (node->right->low, node->right->high))
2979 if (!node_has_low_bound (node, index_type))
2981 emit_cmp_and_jump_insns (index,
2982 convert_modes
2983 (mode, imode,
2984 expand_normal (node->high),
2985 unsignedp),
2986 LT, NULL_RTX, mode, unsignedp,
2987 default_label);
2990 emit_case_nodes (index, node->right, default_label, index_type);
2992 else
2993 /* We cannot process node->right normally
2994 since we haven't ruled out the numbers less than
2995 this node's value. So handle node->right explicitly. */
2996 do_jump_if_equal (mode, index,
2997 convert_modes
2998 (mode, imode,
2999 expand_normal (node->right->low),
3000 unsignedp),
3001 label_rtx (node->right->code_label), unsignedp);
3004 else if (node->right == 0 && node->left != 0)
3006 /* Just one subtree, on the left. */
3007 if (node->left->left || node->left->right
3008 || !tree_int_cst_equal (node->left->low, node->left->high))
3010 if (!node_has_high_bound (node, index_type))
3012 emit_cmp_and_jump_insns (index,
3013 convert_modes
3014 (mode, imode,
3015 expand_normal (node->high),
3016 unsignedp),
3017 GT, NULL_RTX, mode, unsignedp,
3018 default_label);
3021 emit_case_nodes (index, node->left, default_label, index_type);
3023 else
3024 /* We cannot process node->left normally
3025 since we haven't ruled out the numbers less than
3026 this node's value. So handle node->left explicitly. */
3027 do_jump_if_equal (mode, index,
3028 convert_modes
3029 (mode, imode,
3030 expand_normal (node->left->low),
3031 unsignedp),
3032 label_rtx (node->left->code_label), unsignedp);
3035 else
3037 /* Node is a range. These cases are very similar to those for a single
3038 value, except that we do not start by testing whether this node
3039 is the one to branch to. */
3041 if (node->right != 0 && node->left != 0)
3043 /* Node has subtrees on both sides.
3044 If the right-hand subtree is bounded,
3045 test for it first, since we can go straight there.
3046 Otherwise, we need to make a branch in the control structure,
3047 then handle the two subtrees. */
3048 tree test_label = 0;
3050 if (node_is_bounded (node->right, index_type))
3051 /* Right hand node is fully bounded so we can eliminate any
3052 testing and branch directly to the target code. */
3053 emit_cmp_and_jump_insns (index,
3054 convert_modes
3055 (mode, imode,
3056 expand_normal (node->high),
3057 unsignedp),
3058 GT, NULL_RTX, mode, unsignedp,
3059 label_rtx (node->right->code_label));
3060 else
3062 /* Right hand node requires testing.
3063 Branch to a label where we will handle it later. */
3065 test_label = build_decl (CURR_INSN_LOCATION,
3066 LABEL_DECL, NULL_TREE, NULL_TREE);
3067 emit_cmp_and_jump_insns (index,
3068 convert_modes
3069 (mode, imode,
3070 expand_normal (node->high),
3071 unsignedp),
3072 GT, NULL_RTX, mode, unsignedp,
3073 label_rtx (test_label));
3076 /* Value belongs to this node or to the left-hand subtree. */
3078 emit_cmp_and_jump_insns (index,
3079 convert_modes
3080 (mode, imode,
3081 expand_normal (node->low),
3082 unsignedp),
3083 GE, NULL_RTX, mode, unsignedp,
3084 label_rtx (node->code_label));
3086 /* Handle the left-hand subtree. */
3087 emit_case_nodes (index, node->left, default_label, index_type);
3089 /* If right node had to be handled later, do that now. */
3091 if (test_label)
3093 /* If the left-hand subtree fell through,
3094 don't let it fall into the right-hand subtree. */
3095 if (default_label)
3096 emit_jump (default_label);
3098 expand_label (test_label);
3099 emit_case_nodes (index, node->right, default_label, index_type);
3103 else if (node->right != 0 && node->left == 0)
3105 /* Deal with values to the left of this node,
3106 if they are possible. */
3107 if (!node_has_low_bound (node, index_type))
3109 emit_cmp_and_jump_insns (index,
3110 convert_modes
3111 (mode, imode,
3112 expand_normal (node->low),
3113 unsignedp),
3114 LT, NULL_RTX, mode, unsignedp,
3115 default_label);
3118 /* Value belongs to this node or to the right-hand subtree. */
3120 emit_cmp_and_jump_insns (index,
3121 convert_modes
3122 (mode, imode,
3123 expand_normal (node->high),
3124 unsignedp),
3125 LE, NULL_RTX, mode, unsignedp,
3126 label_rtx (node->code_label));
3128 emit_case_nodes (index, node->right, default_label, index_type);
3131 else if (node->right == 0 && node->left != 0)
3133 /* Deal with values to the right of this node,
3134 if they are possible. */
3135 if (!node_has_high_bound (node, index_type))
3137 emit_cmp_and_jump_insns (index,
3138 convert_modes
3139 (mode, imode,
3140 expand_normal (node->high),
3141 unsignedp),
3142 GT, NULL_RTX, mode, unsignedp,
3143 default_label);
3146 /* Value belongs to this node or to the left-hand subtree. */
3148 emit_cmp_and_jump_insns (index,
3149 convert_modes
3150 (mode, imode,
3151 expand_normal (node->low),
3152 unsignedp),
3153 GE, NULL_RTX, mode, unsignedp,
3154 label_rtx (node->code_label));
3156 emit_case_nodes (index, node->left, default_label, index_type);
3159 else
3161 /* Node has no children so we check low and high bounds to remove
3162 redundant tests. Only one of the bounds can exist,
3163 since otherwise this node is bounded--a case tested already. */
3164 int high_bound = node_has_high_bound (node, index_type);
3165 int low_bound = node_has_low_bound (node, index_type);
3167 if (!high_bound && low_bound)
3169 emit_cmp_and_jump_insns (index,
3170 convert_modes
3171 (mode, imode,
3172 expand_normal (node->high),
3173 unsignedp),
3174 GT, NULL_RTX, mode, unsignedp,
3175 default_label);
3178 else if (!low_bound && high_bound)
3180 emit_cmp_and_jump_insns (index,
3181 convert_modes
3182 (mode, imode,
3183 expand_normal (node->low),
3184 unsignedp),
3185 LT, NULL_RTX, mode, unsignedp,
3186 default_label);
3188 else if (!low_bound && !high_bound)
3190 /* Widen LOW and HIGH to the same width as INDEX. */
3191 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
3192 tree low = build1 (CONVERT_EXPR, type, node->low);
3193 tree high = build1 (CONVERT_EXPR, type, node->high);
3194 rtx low_rtx, new_index, new_bound;
3196 /* Instead of doing two branches, emit one unsigned branch for
3197 (index-low) > (high-low). */
3198 low_rtx = expand_expr (low, NULL_RTX, mode, EXPAND_NORMAL);
3199 new_index = expand_simple_binop (mode, MINUS, index, low_rtx,
3200 NULL_RTX, unsignedp,
3201 OPTAB_WIDEN);
3202 new_bound = expand_expr (fold_build2 (MINUS_EXPR, type,
3203 high, low),
3204 NULL_RTX, mode, EXPAND_NORMAL);
3206 emit_cmp_and_jump_insns (new_index, new_bound, GT, NULL_RTX,
3207 mode, 1, default_label);
3210 emit_jump (label_rtx (node->code_label));