1 /* Expands front end tree to back end RTL for GCC
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009,
4 2010 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /* This file handles the generation of rtl code from tree structure
23 above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
24 The functions whose names start with `expand_' are called by the
25 expander to generate RTL instructions for various kinds of constructs. */
29 #include "coretypes.h"
33 #include "hard-reg-set.h"
39 #include "insn-config.h"
47 #include "langhooks.h"
53 #include "alloc-pool.h"
54 #include "pretty-print.h"
56 /* Functions and data structures for expanding case statements. */
58 /* Case label structure, used to hold info on labels within case
59 statements. We handle "range" labels; for a single-value label
60 as in C, the high and low limits are the same.
62 We start with a vector of case nodes sorted in ascending order, and
63 the default label as the last element in the vector. Before expanding
64 to RTL, we transform this vector into a list linked via the RIGHT
65 fields in the case_node struct. Nodes with higher case values are
68 Switch statements can be output in three forms. A branch table is
69 used if there are more than a few labels and the labels are dense
70 within the range between the smallest and largest case value. If a
71 branch table is used, no further manipulations are done with the case
74 The alternative to the use of a branch table is to generate a series
75 of compare and jump insns. When that is done, we use the LEFT, RIGHT,
76 and PARENT fields to hold a binary tree. Initially the tree is
77 totally unbalanced, with everything on the right. We balance the tree
78 with nodes on the left having lower case values than the parent
79 and nodes on the right having higher values. We then output the tree
82 For very small, suitable switch statements, we can generate a series
83 of simple bit test and branches instead. */
87 struct case_node
*left
; /* Left son in binary tree */
88 struct case_node
*right
; /* Right son in binary tree; also node chain */
89 struct case_node
*parent
; /* Parent of node in binary tree */
90 tree low
; /* Lowest index value for this label */
91 tree high
; /* Highest index value for this label */
92 tree code_label
; /* Label to jump to when node matches */
95 typedef struct case_node case_node
;
96 typedef struct case_node
*case_node_ptr
;
98 /* These are used by estimate_case_costs and balance_case_nodes. */
100 /* This must be a signed type, and non-ANSI compilers lack signed char. */
101 static short cost_table_
[129];
102 static int use_cost_table
;
103 static int cost_table_initialized
;
105 /* Special care is needed because we allow -1, but TREE_INT_CST_LOW
107 #define COST_TABLE(I) cost_table_[(unsigned HOST_WIDE_INT) ((I) + 1)]
109 static int n_occurrences (int, const char *);
110 static bool tree_conflicts_with_clobbers_p (tree
, HARD_REG_SET
*);
111 static void expand_nl_goto_receiver (void);
112 static bool check_operand_nalternatives (tree
, tree
);
113 static bool check_unique_operand_names (tree
, tree
, tree
);
114 static char *resolve_operand_name_1 (char *, tree
, tree
, tree
);
115 static void expand_null_return_1 (void);
116 static void expand_value_return (rtx
);
117 static int estimate_case_costs (case_node_ptr
);
118 static bool lshift_cheap_p (void);
119 static int case_bit_test_cmp (const void *, const void *);
120 static void emit_case_bit_tests (tree
, tree
, tree
, tree
, case_node_ptr
, rtx
);
121 static void balance_case_nodes (case_node_ptr
*, case_node_ptr
);
122 static int node_has_low_bound (case_node_ptr
, tree
);
123 static int node_has_high_bound (case_node_ptr
, tree
);
124 static int node_is_bounded (case_node_ptr
, tree
);
125 static void emit_case_nodes (rtx
, case_node_ptr
, rtx
, tree
);
126 static struct case_node
*add_case_node (struct case_node
*, tree
,
127 tree
, tree
, tree
, alloc_pool
);
130 /* Return the rtx-label that corresponds to a LABEL_DECL,
131 creating it if necessary. */
134 label_rtx (tree label
)
136 gcc_assert (TREE_CODE (label
) == LABEL_DECL
);
138 if (!DECL_RTL_SET_P (label
))
140 rtx r
= gen_label_rtx ();
141 SET_DECL_RTL (label
, r
);
142 if (FORCED_LABEL (label
) || DECL_NONLOCAL (label
))
143 LABEL_PRESERVE_P (r
) = 1;
146 return DECL_RTL (label
);
149 /* As above, but also put it on the forced-reference list of the
150 function that contains it. */
152 force_label_rtx (tree label
)
154 rtx ref
= label_rtx (label
);
155 tree function
= decl_function_context (label
);
157 gcc_assert (function
);
159 forced_labels
= gen_rtx_EXPR_LIST (VOIDmode
, ref
, forced_labels
);
163 /* Add an unconditional jump to LABEL as the next sequential instruction. */
166 emit_jump (rtx label
)
168 do_pending_stack_adjust ();
169 emit_jump_insn (gen_jump (label
));
173 /* Emit code to jump to the address
174 specified by the pointer expression EXP. */
177 expand_computed_goto (tree exp
)
179 rtx x
= expand_normal (exp
);
181 x
= convert_memory_address (Pmode
, x
);
183 do_pending_stack_adjust ();
184 emit_indirect_jump (x
);
187 /* Handle goto statements and the labels that they can go to. */
189 /* Specify the location in the RTL code of a label LABEL,
190 which is a LABEL_DECL tree node.
192 This is used for the kind of label that the user can jump to with a
193 goto statement, and for alternatives of a switch or case statement.
194 RTL labels generated for loops and conditionals don't go through here;
195 they are generated directly at the RTL level, by other functions below.
197 Note that this has nothing to do with defining label *names*.
198 Languages vary in how they do that and what that even means. */
201 expand_label (tree label
)
203 rtx label_r
= label_rtx (label
);
205 do_pending_stack_adjust ();
206 emit_label (label_r
);
207 if (DECL_NAME (label
))
208 LABEL_NAME (DECL_RTL (label
)) = IDENTIFIER_POINTER (DECL_NAME (label
));
210 if (DECL_NONLOCAL (label
))
212 expand_nl_goto_receiver ();
213 nonlocal_goto_handler_labels
214 = gen_rtx_EXPR_LIST (VOIDmode
, label_r
,
215 nonlocal_goto_handler_labels
);
218 if (FORCED_LABEL (label
))
219 forced_labels
= gen_rtx_EXPR_LIST (VOIDmode
, label_r
, forced_labels
);
221 if (DECL_NONLOCAL (label
) || FORCED_LABEL (label
))
222 maybe_set_first_label_num (label_r
);
225 /* Generate RTL code for a `goto' statement with target label LABEL.
226 LABEL should be a LABEL_DECL tree node that was or will later be
227 defined with `expand_label'. */
230 expand_goto (tree label
)
232 #ifdef ENABLE_CHECKING
233 /* Check for a nonlocal goto to a containing function. Should have
234 gotten translated to __builtin_nonlocal_goto. */
235 tree context
= decl_function_context (label
);
236 gcc_assert (!context
|| context
== current_function_decl
);
239 emit_jump (label_rtx (label
));
242 /* Return the number of times character C occurs in string S. */
244 n_occurrences (int c
, const char *s
)
252 /* Generate RTL for an asm statement (explicit assembler code).
253 STRING is a STRING_CST node containing the assembler code text,
254 or an ADDR_EXPR containing a STRING_CST. VOL nonzero means the
255 insn is volatile; don't optimize it. */
258 expand_asm_loc (tree string
, int vol
, location_t locus
)
262 if (TREE_CODE (string
) == ADDR_EXPR
)
263 string
= TREE_OPERAND (string
, 0);
265 body
= gen_rtx_ASM_INPUT_loc (VOIDmode
,
266 ggc_strdup (TREE_STRING_POINTER (string
)),
269 MEM_VOLATILE_P (body
) = vol
;
274 /* Parse the output constraint pointed to by *CONSTRAINT_P. It is the
275 OPERAND_NUMth output operand, indexed from zero. There are NINPUTS
276 inputs and NOUTPUTS outputs to this extended-asm. Upon return,
277 *ALLOWS_MEM will be TRUE iff the constraint allows the use of a
278 memory operand. Similarly, *ALLOWS_REG will be TRUE iff the
279 constraint allows the use of a register operand. And, *IS_INOUT
280 will be true if the operand is read-write, i.e., if it is used as
281 an input as well as an output. If *CONSTRAINT_P is not in
282 canonical form, it will be made canonical. (Note that `+' will be
283 replaced with `=' as part of this process.)
285 Returns TRUE if all went well; FALSE if an error occurred. */
288 parse_output_constraint (const char **constraint_p
, int operand_num
,
289 int ninputs
, int noutputs
, bool *allows_mem
,
290 bool *allows_reg
, bool *is_inout
)
292 const char *constraint
= *constraint_p
;
295 /* Assume the constraint doesn't allow the use of either a register
300 /* Allow the `=' or `+' to not be at the beginning of the string,
301 since it wasn't explicitly documented that way, and there is a
302 large body of code that puts it last. Swap the character to
303 the front, so as not to uglify any place else. */
304 p
= strchr (constraint
, '=');
306 p
= strchr (constraint
, '+');
308 /* If the string doesn't contain an `=', issue an error
312 error ("output operand constraint lacks %<=%>");
316 /* If the constraint begins with `+', then the operand is both read
317 from and written to. */
318 *is_inout
= (*p
== '+');
320 /* Canonicalize the output constraint so that it begins with `='. */
321 if (p
!= constraint
|| *is_inout
)
324 size_t c_len
= strlen (constraint
);
327 warning (0, "output constraint %qc for operand %d "
328 "is not at the beginning",
331 /* Make a copy of the constraint. */
332 buf
= XALLOCAVEC (char, c_len
+ 1);
333 strcpy (buf
, constraint
);
334 /* Swap the first character and the `=' or `+'. */
335 buf
[p
- constraint
] = buf
[0];
336 /* Make sure the first character is an `='. (Until we do this,
337 it might be a `+'.) */
339 /* Replace the constraint with the canonicalized string. */
340 *constraint_p
= ggc_alloc_string (buf
, c_len
);
341 constraint
= *constraint_p
;
344 /* Loop through the constraint string. */
345 for (p
= constraint
+ 1; *p
; p
+= CONSTRAINT_LEN (*p
, p
))
350 error ("operand constraint contains incorrectly positioned "
355 if (operand_num
+ 1 == ninputs
+ noutputs
)
357 error ("%<%%%> constraint used with last operand");
362 case 'V': case TARGET_MEM_CONSTRAINT
: case 'o':
366 case '?': case '!': case '*': case '&': case '#':
367 case 'E': case 'F': case 'G': case 'H':
368 case 's': case 'i': case 'n':
369 case 'I': case 'J': case 'K': case 'L': case 'M':
370 case 'N': case 'O': case 'P': case ',':
373 case '0': case '1': case '2': case '3': case '4':
374 case '5': case '6': case '7': case '8': case '9':
376 error ("matching constraint not valid in output operand");
380 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
381 excepting those that expand_call created. So match memory
398 if (REG_CLASS_FROM_CONSTRAINT (*p
, p
) != NO_REGS
)
400 #ifdef EXTRA_CONSTRAINT_STR
401 else if (EXTRA_ADDRESS_CONSTRAINT (*p
, p
))
403 else if (EXTRA_MEMORY_CONSTRAINT (*p
, p
))
407 /* Otherwise we can't assume anything about the nature of
408 the constraint except that it isn't purely registers.
409 Treat it like "g" and hope for the best. */
420 /* Similar, but for input constraints. */
423 parse_input_constraint (const char **constraint_p
, int input_num
,
424 int ninputs
, int noutputs
, int ninout
,
425 const char * const * constraints
,
426 bool *allows_mem
, bool *allows_reg
)
428 const char *constraint
= *constraint_p
;
429 const char *orig_constraint
= constraint
;
430 size_t c_len
= strlen (constraint
);
432 bool saw_match
= false;
434 /* Assume the constraint doesn't allow the use of either
435 a register or memory. */
439 /* Make sure constraint has neither `=', `+', nor '&'. */
441 for (j
= 0; j
< c_len
; j
+= CONSTRAINT_LEN (constraint
[j
], constraint
+j
))
442 switch (constraint
[j
])
444 case '+': case '=': case '&':
445 if (constraint
== orig_constraint
)
447 error ("input operand constraint contains %qc", constraint
[j
]);
453 if (constraint
== orig_constraint
454 && input_num
+ 1 == ninputs
- ninout
)
456 error ("%<%%%> constraint used with last operand");
461 case 'V': case TARGET_MEM_CONSTRAINT
: case 'o':
466 case '?': case '!': case '*': case '#':
467 case 'E': case 'F': case 'G': case 'H':
468 case 's': case 'i': case 'n':
469 case 'I': case 'J': case 'K': case 'L': case 'M':
470 case 'N': case 'O': case 'P': case ',':
473 /* Whether or not a numeric constraint allows a register is
474 decided by the matching constraint, and so there is no need
475 to do anything special with them. We must handle them in
476 the default case, so that we don't unnecessarily force
477 operands to memory. */
478 case '0': case '1': case '2': case '3': case '4':
479 case '5': case '6': case '7': case '8': case '9':
486 match
= strtoul (constraint
+ j
, &end
, 10);
487 if (match
>= (unsigned long) noutputs
)
489 error ("matching constraint references invalid operand number");
493 /* Try and find the real constraint for this dup. Only do this
494 if the matching constraint is the only alternative. */
496 && (j
== 0 || (j
== 1 && constraint
[0] == '%')))
498 constraint
= constraints
[match
];
499 *constraint_p
= constraint
;
500 c_len
= strlen (constraint
);
502 /* ??? At the end of the loop, we will skip the first part of
503 the matched constraint. This assumes not only that the
504 other constraint is an output constraint, but also that
505 the '=' or '+' come first. */
509 j
= end
- constraint
;
510 /* Anticipate increment at end of loop. */
525 if (! ISALPHA (constraint
[j
]))
527 error ("invalid punctuation %qc in constraint", constraint
[j
]);
530 if (REG_CLASS_FROM_CONSTRAINT (constraint
[j
], constraint
+ j
)
533 #ifdef EXTRA_CONSTRAINT_STR
534 else if (EXTRA_ADDRESS_CONSTRAINT (constraint
[j
], constraint
+ j
))
536 else if (EXTRA_MEMORY_CONSTRAINT (constraint
[j
], constraint
+ j
))
540 /* Otherwise we can't assume anything about the nature of
541 the constraint except that it isn't purely registers.
542 Treat it like "g" and hope for the best. */
550 if (saw_match
&& !*allows_reg
)
551 warning (0, "matching constraint does not allow a register");
556 /* Return DECL iff there's an overlap between *REGS and DECL, where DECL
557 can be an asm-declared register. Called via walk_tree. */
560 decl_overlaps_hard_reg_set_p (tree
*declp
, int *walk_subtrees ATTRIBUTE_UNUSED
,
564 const HARD_REG_SET
*const regs
= (const HARD_REG_SET
*) data
;
566 if (TREE_CODE (decl
) == VAR_DECL
)
568 if (DECL_HARD_REGISTER (decl
)
569 && REG_P (DECL_RTL (decl
))
570 && REGNO (DECL_RTL (decl
)) < FIRST_PSEUDO_REGISTER
)
572 rtx reg
= DECL_RTL (decl
);
574 if (overlaps_hard_reg_set_p (*regs
, GET_MODE (reg
), REGNO (reg
)))
579 else if (TYPE_P (decl
) || TREE_CODE (decl
) == PARM_DECL
)
584 /* If there is an overlap between *REGS and DECL, return the first overlap
587 tree_overlaps_hard_reg_set (tree decl
, HARD_REG_SET
*regs
)
589 return walk_tree (&decl
, decl_overlaps_hard_reg_set_p
, regs
, NULL
);
592 /* Check for overlap between registers marked in CLOBBERED_REGS and
593 anything inappropriate in T. Emit error and return the register
594 variable definition for error, NULL_TREE for ok. */
597 tree_conflicts_with_clobbers_p (tree t
, HARD_REG_SET
*clobbered_regs
)
599 /* Conflicts between asm-declared register variables and the clobber
600 list are not allowed. */
601 tree overlap
= tree_overlaps_hard_reg_set (t
, clobbered_regs
);
605 error ("asm-specifier for variable %qE conflicts with asm clobber list",
606 DECL_NAME (overlap
));
608 /* Reset registerness to stop multiple errors emitted for a single
610 DECL_REGISTER (overlap
) = 0;
617 /* Generate RTL for an asm statement with arguments.
618 STRING is the instruction template.
619 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
620 Each output or input has an expression in the TREE_VALUE and
621 a tree list in TREE_PURPOSE which in turn contains a constraint
622 name in TREE_VALUE (or NULL_TREE) and a constraint string
624 CLOBBERS is a list of STRING_CST nodes each naming a hard register
625 that is clobbered by this insn.
627 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
628 Some elements of OUTPUTS may be replaced with trees representing temporary
629 values. The caller should copy those temporary values to the originally
632 VOL nonzero means the insn is volatile; don't optimize it. */
635 expand_asm_operands (tree string
, tree outputs
, tree inputs
,
636 tree clobbers
, tree labels
, int vol
, location_t locus
)
638 rtvec argvec
, constraintvec
, labelvec
;
640 int ninputs
= list_length (inputs
);
641 int noutputs
= list_length (outputs
);
642 int nlabels
= list_length (labels
);
645 HARD_REG_SET clobbered_regs
;
646 int clobber_conflict_found
= 0;
650 /* Vector of RTX's of evaluated output operands. */
651 rtx
*output_rtx
= XALLOCAVEC (rtx
, noutputs
);
652 int *inout_opnum
= XALLOCAVEC (int, noutputs
);
653 rtx
*real_output_rtx
= XALLOCAVEC (rtx
, noutputs
);
654 enum machine_mode
*inout_mode
= XALLOCAVEC (enum machine_mode
, noutputs
);
655 const char **constraints
= XALLOCAVEC (const char *, noutputs
+ ninputs
);
656 int old_generating_concat_p
= generating_concat_p
;
658 /* An ASM with no outputs needs to be treated as volatile, for now. */
662 if (! check_operand_nalternatives (outputs
, inputs
))
665 string
= resolve_asm_operand_names (string
, outputs
, inputs
, labels
);
667 /* Collect constraints. */
669 for (t
= outputs
; t
; t
= TREE_CHAIN (t
), i
++)
670 constraints
[i
] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t
)));
671 for (t
= inputs
; t
; t
= TREE_CHAIN (t
), i
++)
672 constraints
[i
] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t
)));
674 /* Sometimes we wish to automatically clobber registers across an asm.
675 Case in point is when the i386 backend moved from cc0 to a hard reg --
676 maintaining source-level compatibility means automatically clobbering
677 the flags register. */
678 clobbers
= targetm
.md_asm_clobbers (outputs
, inputs
, clobbers
);
680 /* Count the number of meaningful clobbered registers, ignoring what
681 we would ignore later. */
683 CLEAR_HARD_REG_SET (clobbered_regs
);
684 for (tail
= clobbers
; tail
; tail
= TREE_CHAIN (tail
))
688 if (TREE_VALUE (tail
) == error_mark_node
)
690 regname
= TREE_STRING_POINTER (TREE_VALUE (tail
));
692 i
= decode_reg_name (regname
);
693 if (i
>= 0 || i
== -4)
696 error ("unknown register name %qs in %<asm%>", regname
);
698 /* Mark clobbered registers. */
701 /* Clobbering the PIC register is an error. */
702 if (i
== (int) PIC_OFFSET_TABLE_REGNUM
)
704 error ("PIC register %qs clobbered in %<asm%>", regname
);
708 SET_HARD_REG_BIT (clobbered_regs
, i
);
712 /* First pass over inputs and outputs checks validity and sets
713 mark_addressable if needed. */
716 for (i
= 0, tail
= outputs
; tail
; tail
= TREE_CHAIN (tail
), i
++)
718 tree val
= TREE_VALUE (tail
);
719 tree type
= TREE_TYPE (val
);
720 const char *constraint
;
725 /* If there's an erroneous arg, emit no insn. */
726 if (type
== error_mark_node
)
729 /* Try to parse the output constraint. If that fails, there's
730 no point in going further. */
731 constraint
= constraints
[i
];
732 if (!parse_output_constraint (&constraint
, i
, ninputs
, noutputs
,
733 &allows_mem
, &allows_reg
, &is_inout
))
740 && REG_P (DECL_RTL (val
))
741 && GET_MODE (DECL_RTL (val
)) != TYPE_MODE (type
))))
742 mark_addressable (val
);
749 if (ninputs
+ noutputs
> MAX_RECOG_OPERANDS
)
751 error ("more than %d operands in %<asm%>", MAX_RECOG_OPERANDS
);
755 for (i
= 0, tail
= inputs
; tail
; i
++, tail
= TREE_CHAIN (tail
))
757 bool allows_reg
, allows_mem
;
758 const char *constraint
;
760 /* If there's an erroneous arg, emit no insn, because the ASM_INPUT
761 would get VOIDmode and that could cause a crash in reload. */
762 if (TREE_TYPE (TREE_VALUE (tail
)) == error_mark_node
)
765 constraint
= constraints
[i
+ noutputs
];
766 if (! parse_input_constraint (&constraint
, i
, ninputs
, noutputs
, ninout
,
767 constraints
, &allows_mem
, &allows_reg
))
770 if (! allows_reg
&& allows_mem
)
771 mark_addressable (TREE_VALUE (tail
));
774 /* Second pass evaluates arguments. */
777 for (i
= 0, tail
= outputs
; tail
; tail
= TREE_CHAIN (tail
), i
++)
779 tree val
= TREE_VALUE (tail
);
780 tree type
= TREE_TYPE (val
);
787 ok
= parse_output_constraint (&constraints
[i
], i
, ninputs
,
788 noutputs
, &allows_mem
, &allows_reg
,
792 /* If an output operand is not a decl or indirect ref and our constraint
793 allows a register, make a temporary to act as an intermediate.
794 Make the asm insn write into that, then our caller will copy it to
795 the real output operand. Likewise for promoted variables. */
797 generating_concat_p
= 0;
799 real_output_rtx
[i
] = NULL_RTX
;
800 if ((TREE_CODE (val
) == INDIRECT_REF
803 && (allows_mem
|| REG_P (DECL_RTL (val
)))
804 && ! (REG_P (DECL_RTL (val
))
805 && GET_MODE (DECL_RTL (val
)) != TYPE_MODE (type
)))
809 op
= expand_expr (val
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
811 op
= validize_mem (op
);
813 if (! allows_reg
&& !MEM_P (op
))
814 error ("output number %d not directly addressable", i
);
815 if ((! allows_mem
&& MEM_P (op
))
816 || GET_CODE (op
) == CONCAT
)
818 real_output_rtx
[i
] = op
;
819 op
= gen_reg_rtx (GET_MODE (op
));
821 emit_move_insn (op
, real_output_rtx
[i
]);
826 op
= assign_temp (type
, 0, 0, 1);
827 op
= validize_mem (op
);
828 if (!MEM_P (op
) && TREE_CODE (TREE_VALUE (tail
)) == SSA_NAME
)
829 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (TREE_VALUE (tail
)), op
);
830 TREE_VALUE (tail
) = make_tree (type
, op
);
834 generating_concat_p
= old_generating_concat_p
;
838 inout_mode
[ninout
] = TYPE_MODE (type
);
839 inout_opnum
[ninout
++] = i
;
842 if (tree_conflicts_with_clobbers_p (val
, &clobbered_regs
))
843 clobber_conflict_found
= 1;
846 /* Make vectors for the expression-rtx, constraint strings,
847 and named operands. */
849 argvec
= rtvec_alloc (ninputs
);
850 constraintvec
= rtvec_alloc (ninputs
);
851 labelvec
= rtvec_alloc (nlabels
);
853 body
= gen_rtx_ASM_OPERANDS ((noutputs
== 0 ? VOIDmode
854 : GET_MODE (output_rtx
[0])),
855 ggc_strdup (TREE_STRING_POINTER (string
)),
856 empty_string
, 0, argvec
, constraintvec
,
859 MEM_VOLATILE_P (body
) = vol
;
861 /* Eval the inputs and put them into ARGVEC.
862 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
864 for (i
= 0, tail
= inputs
; tail
; tail
= TREE_CHAIN (tail
), ++i
)
866 bool allows_reg
, allows_mem
;
867 const char *constraint
;
872 constraint
= constraints
[i
+ noutputs
];
873 ok
= parse_input_constraint (&constraint
, i
, ninputs
, noutputs
, ninout
,
874 constraints
, &allows_mem
, &allows_reg
);
877 generating_concat_p
= 0;
879 val
= TREE_VALUE (tail
);
880 type
= TREE_TYPE (val
);
881 /* EXPAND_INITIALIZER will not generate code for valid initializer
882 constants, but will still generate code for other types of operand.
883 This is the behavior we want for constant constraints. */
884 op
= expand_expr (val
, NULL_RTX
, VOIDmode
,
885 allows_reg
? EXPAND_NORMAL
886 : allows_mem
? EXPAND_MEMORY
887 : EXPAND_INITIALIZER
);
889 /* Never pass a CONCAT to an ASM. */
890 if (GET_CODE (op
) == CONCAT
)
891 op
= force_reg (GET_MODE (op
), op
);
893 op
= validize_mem (op
);
895 if (asm_operand_ok (op
, constraint
, NULL
) <= 0)
897 if (allows_reg
&& TYPE_MODE (type
) != BLKmode
)
898 op
= force_reg (TYPE_MODE (type
), op
);
899 else if (!allows_mem
)
900 warning (0, "asm operand %d probably doesn%'t match constraints",
904 /* We won't recognize either volatile memory or memory
905 with a queued address as available a memory_operand
906 at this point. Ignore it: clearly this *is* a memory. */
910 warning (0, "use of memory input without lvalue in "
911 "asm operand %d is deprecated", i
+ noutputs
);
915 rtx mem
= force_const_mem (TYPE_MODE (type
), op
);
917 op
= validize_mem (mem
);
919 op
= force_reg (TYPE_MODE (type
), op
);
922 || GET_CODE (op
) == SUBREG
923 || GET_CODE (op
) == CONCAT
)
925 tree qual_type
= build_qualified_type (type
,
928 rtx memloc
= assign_temp (qual_type
, 1, 1, 1);
929 memloc
= validize_mem (memloc
);
930 emit_move_insn (memloc
, op
);
936 generating_concat_p
= old_generating_concat_p
;
937 ASM_OPERANDS_INPUT (body
, i
) = op
;
939 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body
, i
)
940 = gen_rtx_ASM_INPUT (TYPE_MODE (type
),
941 ggc_strdup (constraints
[i
+ noutputs
]));
943 if (tree_conflicts_with_clobbers_p (val
, &clobbered_regs
))
944 clobber_conflict_found
= 1;
947 /* Protect all the operands from the queue now that they have all been
950 generating_concat_p
= 0;
952 /* For in-out operands, copy output rtx to input rtx. */
953 for (i
= 0; i
< ninout
; i
++)
955 int j
= inout_opnum
[i
];
958 ASM_OPERANDS_INPUT (body
, ninputs
- ninout
+ i
)
961 sprintf (buffer
, "%d", j
);
962 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body
, ninputs
- ninout
+ i
)
963 = gen_rtx_ASM_INPUT (inout_mode
[i
], ggc_strdup (buffer
));
966 /* Copy labels to the vector. */
967 for (i
= 0, tail
= labels
; i
< nlabels
; ++i
, tail
= TREE_CHAIN (tail
))
968 ASM_OPERANDS_LABEL (body
, i
)
969 = gen_rtx_LABEL_REF (Pmode
, label_rtx (TREE_VALUE (tail
)));
971 generating_concat_p
= old_generating_concat_p
;
973 /* Now, for each output, construct an rtx
974 (set OUTPUT (asm_operands INSN OUTPUTCONSTRAINT OUTPUTNUMBER
975 ARGVEC CONSTRAINTS OPNAMES))
976 If there is more than one, put them inside a PARALLEL. */
978 if (nlabels
> 0 && nclobbers
== 0)
980 gcc_assert (noutputs
== 0);
981 emit_jump_insn (body
);
983 else if (noutputs
== 0 && nclobbers
== 0)
985 /* No output operands: put in a raw ASM_OPERANDS rtx. */
988 else if (noutputs
== 1 && nclobbers
== 0)
990 ASM_OPERANDS_OUTPUT_CONSTRAINT (body
) = ggc_strdup (constraints
[0]);
991 emit_insn (gen_rtx_SET (VOIDmode
, output_rtx
[0], body
));
1001 body
= gen_rtx_PARALLEL (VOIDmode
, rtvec_alloc (num
+ nclobbers
));
1003 /* For each output operand, store a SET. */
1004 for (i
= 0, tail
= outputs
; tail
; tail
= TREE_CHAIN (tail
), i
++)
1006 XVECEXP (body
, 0, i
)
1007 = gen_rtx_SET (VOIDmode
,
1009 gen_rtx_ASM_OPERANDS
1010 (GET_MODE (output_rtx
[i
]),
1011 ggc_strdup (TREE_STRING_POINTER (string
)),
1012 ggc_strdup (constraints
[i
]),
1013 i
, argvec
, constraintvec
, labelvec
, locus
));
1015 MEM_VOLATILE_P (SET_SRC (XVECEXP (body
, 0, i
))) = vol
;
1018 /* If there are no outputs (but there are some clobbers)
1019 store the bare ASM_OPERANDS into the PARALLEL. */
1022 XVECEXP (body
, 0, i
++) = obody
;
1024 /* Store (clobber REG) for each clobbered register specified. */
1026 for (tail
= clobbers
; tail
; tail
= TREE_CHAIN (tail
))
1028 const char *regname
= TREE_STRING_POINTER (TREE_VALUE (tail
));
1029 int j
= decode_reg_name (regname
);
1034 if (j
== -3) /* `cc', which is not a register */
1037 if (j
== -4) /* `memory', don't cache memory across asm */
1039 XVECEXP (body
, 0, i
++)
1040 = gen_rtx_CLOBBER (VOIDmode
,
1043 gen_rtx_SCRATCH (VOIDmode
)));
1047 /* Ignore unknown register, error already signaled. */
1051 /* Use QImode since that's guaranteed to clobber just one reg. */
1052 clobbered_reg
= gen_rtx_REG (QImode
, j
);
1054 /* Do sanity check for overlap between clobbers and respectively
1055 input and outputs that hasn't been handled. Such overlap
1056 should have been detected and reported above. */
1057 if (!clobber_conflict_found
)
1061 /* We test the old body (obody) contents to avoid tripping
1062 over the under-construction body. */
1063 for (opno
= 0; opno
< noutputs
; opno
++)
1064 if (reg_overlap_mentioned_p (clobbered_reg
, output_rtx
[opno
]))
1065 internal_error ("asm clobber conflict with output operand");
1067 for (opno
= 0; opno
< ninputs
- ninout
; opno
++)
1068 if (reg_overlap_mentioned_p (clobbered_reg
,
1069 ASM_OPERANDS_INPUT (obody
, opno
)))
1070 internal_error ("asm clobber conflict with input operand");
1073 XVECEXP (body
, 0, i
++)
1074 = gen_rtx_CLOBBER (VOIDmode
, clobbered_reg
);
1078 emit_jump_insn (body
);
1083 /* For any outputs that needed reloading into registers, spill them
1084 back to where they belong. */
1085 for (i
= 0; i
< noutputs
; ++i
)
1086 if (real_output_rtx
[i
])
1087 emit_move_insn (real_output_rtx
[i
], output_rtx
[i
]);
1089 crtl
->has_asm_statement
= 1;
1094 expand_asm_stmt (gimple stmt
)
1097 tree outputs
, tail
, t
;
1101 tree str
, out
, in
, cl
, labels
;
1102 location_t locus
= gimple_location (stmt
);
1104 /* Meh... convert the gimple asm operands into real tree lists.
1105 Eventually we should make all routines work on the vectors instead
1106 of relying on TREE_CHAIN. */
1108 n
= gimple_asm_noutputs (stmt
);
1111 t
= out
= gimple_asm_output_op (stmt
, 0);
1112 for (i
= 1; i
< n
; i
++)
1113 t
= TREE_CHAIN (t
) = gimple_asm_output_op (stmt
, i
);
1117 n
= gimple_asm_ninputs (stmt
);
1120 t
= in
= gimple_asm_input_op (stmt
, 0);
1121 for (i
= 1; i
< n
; i
++)
1122 t
= TREE_CHAIN (t
) = gimple_asm_input_op (stmt
, i
);
1126 n
= gimple_asm_nclobbers (stmt
);
1129 t
= cl
= gimple_asm_clobber_op (stmt
, 0);
1130 for (i
= 1; i
< n
; i
++)
1131 t
= TREE_CHAIN (t
) = gimple_asm_clobber_op (stmt
, i
);
1135 n
= gimple_asm_nlabels (stmt
);
1138 t
= labels
= gimple_asm_label_op (stmt
, 0);
1139 for (i
= 1; i
< n
; i
++)
1140 t
= TREE_CHAIN (t
) = gimple_asm_label_op (stmt
, i
);
1143 s
= gimple_asm_string (stmt
);
1144 str
= build_string (strlen (s
), s
);
1146 if (gimple_asm_input_p (stmt
))
1148 expand_asm_loc (str
, gimple_asm_volatile_p (stmt
), locus
);
1153 noutputs
= gimple_asm_noutputs (stmt
);
1154 /* o[I] is the place that output number I should be written. */
1155 o
= (tree
*) alloca (noutputs
* sizeof (tree
));
1157 /* Record the contents of OUTPUTS before it is modified. */
1158 for (i
= 0, tail
= outputs
; tail
; tail
= TREE_CHAIN (tail
), i
++)
1159 o
[i
] = TREE_VALUE (tail
);
1161 /* Generate the ASM_OPERANDS insn; store into the TREE_VALUEs of
1162 OUTPUTS some trees for where the values were actually stored. */
1163 expand_asm_operands (str
, outputs
, in
, cl
, labels
,
1164 gimple_asm_volatile_p (stmt
), locus
);
1166 /* Copy all the intermediate outputs into the specified outputs. */
1167 for (i
= 0, tail
= outputs
; tail
; tail
= TREE_CHAIN (tail
), i
++)
1169 if (o
[i
] != TREE_VALUE (tail
))
1171 expand_assignment (o
[i
], TREE_VALUE (tail
), false);
1174 /* Restore the original value so that it's correct the next
1175 time we expand this function. */
1176 TREE_VALUE (tail
) = o
[i
];
1181 /* A subroutine of expand_asm_operands. Check that all operands have
1182 the same number of alternatives. Return true if so. */
1185 check_operand_nalternatives (tree outputs
, tree inputs
)
1187 if (outputs
|| inputs
)
1189 tree tmp
= TREE_PURPOSE (outputs
? outputs
: inputs
);
1191 = n_occurrences (',', TREE_STRING_POINTER (TREE_VALUE (tmp
)));
1194 if (nalternatives
+ 1 > MAX_RECOG_ALTERNATIVES
)
1196 error ("too many alternatives in %<asm%>");
1203 const char *constraint
1204 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (tmp
)));
1206 if (n_occurrences (',', constraint
) != nalternatives
)
1208 error ("operand constraints for %<asm%> differ "
1209 "in number of alternatives");
1213 if (TREE_CHAIN (tmp
))
1214 tmp
= TREE_CHAIN (tmp
);
1216 tmp
= next
, next
= 0;
1223 /* A subroutine of expand_asm_operands. Check that all operand names
1224 are unique. Return true if so. We rely on the fact that these names
1225 are identifiers, and so have been canonicalized by get_identifier,
1226 so all we need are pointer comparisons. */
1229 check_unique_operand_names (tree outputs
, tree inputs
, tree labels
)
1233 for (i
= outputs
; i
; i
= TREE_CHAIN (i
))
1235 tree i_name
= TREE_PURPOSE (TREE_PURPOSE (i
));
1239 for (j
= TREE_CHAIN (i
); j
; j
= TREE_CHAIN (j
))
1240 if (simple_cst_equal (i_name
, TREE_PURPOSE (TREE_PURPOSE (j
))))
1244 for (i
= inputs
; i
; i
= TREE_CHAIN (i
))
1246 tree i_name
= TREE_PURPOSE (TREE_PURPOSE (i
));
1250 for (j
= TREE_CHAIN (i
); j
; j
= TREE_CHAIN (j
))
1251 if (simple_cst_equal (i_name
, TREE_PURPOSE (TREE_PURPOSE (j
))))
1253 for (j
= outputs
; j
; j
= TREE_CHAIN (j
))
1254 if (simple_cst_equal (i_name
, TREE_PURPOSE (TREE_PURPOSE (j
))))
1258 for (i
= labels
; i
; i
= TREE_CHAIN (i
))
1260 tree i_name
= TREE_PURPOSE (i
);
1264 for (j
= TREE_CHAIN (i
); j
; j
= TREE_CHAIN (j
))
1265 if (simple_cst_equal (i_name
, TREE_PURPOSE (j
)))
1267 for (j
= inputs
; j
; j
= TREE_CHAIN (j
))
1268 if (simple_cst_equal (i_name
, TREE_PURPOSE (TREE_PURPOSE (j
))))
1275 error ("duplicate asm operand name %qs",
1276 TREE_STRING_POINTER (TREE_PURPOSE (TREE_PURPOSE (i
))));
1280 /* A subroutine of expand_asm_operands. Resolve the names of the operands
1281 in *POUTPUTS and *PINPUTS to numbers, and replace the name expansions in
1282 STRING and in the constraints to those numbers. */
1285 resolve_asm_operand_names (tree string
, tree outputs
, tree inputs
, tree labels
)
1292 check_unique_operand_names (outputs
, inputs
, labels
);
1294 /* Substitute [<name>] in input constraint strings. There should be no
1295 named operands in output constraints. */
1296 for (t
= inputs
; t
; t
= TREE_CHAIN (t
))
1298 c
= TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t
)));
1299 if (strchr (c
, '[') != NULL
)
1301 p
= buffer
= xstrdup (c
);
1302 while ((p
= strchr (p
, '[')) != NULL
)
1303 p
= resolve_operand_name_1 (p
, outputs
, inputs
, NULL
);
1304 TREE_VALUE (TREE_PURPOSE (t
))
1305 = build_string (strlen (buffer
), buffer
);
1310 /* Now check for any needed substitutions in the template. */
1311 c
= TREE_STRING_POINTER (string
);
1312 while ((c
= strchr (c
, '%')) != NULL
)
1316 else if (ISALPHA (c
[1]) && c
[2] == '[')
1327 /* OK, we need to make a copy so we can perform the substitutions.
1328 Assume that we will not need extra space--we get to remove '['
1329 and ']', which means we cannot have a problem until we have more
1330 than 999 operands. */
1331 buffer
= xstrdup (TREE_STRING_POINTER (string
));
1332 p
= buffer
+ (c
- TREE_STRING_POINTER (string
));
1334 while ((p
= strchr (p
, '%')) != NULL
)
1338 else if (ISALPHA (p
[1]) && p
[2] == '[')
1346 p
= resolve_operand_name_1 (p
, outputs
, inputs
, labels
);
1349 string
= build_string (strlen (buffer
), buffer
);
1356 /* A subroutine of resolve_operand_names. P points to the '[' for a
1357 potential named operand of the form [<name>]. In place, replace
1358 the name and brackets with a number. Return a pointer to the
1359 balance of the string after substitution. */
1362 resolve_operand_name_1 (char *p
, tree outputs
, tree inputs
, tree labels
)
1368 /* Collect the operand name. */
1369 q
= strchr (++p
, ']');
1372 error ("missing close brace for named operand");
1373 return strchr (p
, '\0');
1377 /* Resolve the name to a number. */
1378 for (op
= 0, t
= outputs
; t
; t
= TREE_CHAIN (t
), op
++)
1380 tree name
= TREE_PURPOSE (TREE_PURPOSE (t
));
1381 if (name
&& strcmp (TREE_STRING_POINTER (name
), p
) == 0)
1384 for (t
= inputs
; t
; t
= TREE_CHAIN (t
), op
++)
1386 tree name
= TREE_PURPOSE (TREE_PURPOSE (t
));
1387 if (name
&& strcmp (TREE_STRING_POINTER (name
), p
) == 0)
1390 for (t
= labels
; t
; t
= TREE_CHAIN (t
), op
++)
1392 tree name
= TREE_PURPOSE (t
);
1393 if (name
&& strcmp (TREE_STRING_POINTER (name
), p
) == 0)
1397 error ("undefined named operand %qs", identifier_to_locale (p
));
1401 /* Replace the name with the number. Unfortunately, not all libraries
1402 get the return value of sprintf correct, so search for the end of the
1403 generated string by hand. */
1404 sprintf (--p
, "%d", op
);
1405 p
= strchr (p
, '\0');
1407 /* Verify the no extra buffer space assumption. */
1408 gcc_assert (p
<= q
);
1410 /* Shift the rest of the buffer down to fill the gap. */
1411 memmove (p
, q
+ 1, strlen (q
+ 1) + 1);
1416 /* Generate RTL to evaluate the expression EXP. */
1419 expand_expr_stmt (tree exp
)
1424 value
= expand_expr (exp
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
1425 type
= TREE_TYPE (exp
);
1427 /* If all we do is reference a volatile value in memory,
1428 copy it to a register to be sure it is actually touched. */
1429 if (value
&& MEM_P (value
) && TREE_THIS_VOLATILE (exp
))
1431 if (TYPE_MODE (type
) == VOIDmode
)
1433 else if (TYPE_MODE (type
) != BLKmode
)
1434 value
= copy_to_reg (value
);
1437 rtx lab
= gen_label_rtx ();
1439 /* Compare the value with itself to reference it. */
1440 emit_cmp_and_jump_insns (value
, value
, EQ
,
1441 expand_normal (TYPE_SIZE (type
)),
1447 /* Free any temporaries used to evaluate this expression. */
1451 /* Warn if EXP contains any computations whose results are not used.
1452 Return 1 if a warning is printed; 0 otherwise. LOCUS is the
1453 (potential) location of the expression. */
1456 warn_if_unused_value (const_tree exp
, location_t locus
)
1459 if (TREE_USED (exp
) || TREE_NO_WARNING (exp
))
1462 /* Don't warn about void constructs. This includes casting to void,
1463 void function calls, and statement expressions with a final cast
1465 if (VOID_TYPE_P (TREE_TYPE (exp
)))
1468 if (EXPR_HAS_LOCATION (exp
))
1469 locus
= EXPR_LOCATION (exp
);
1471 switch (TREE_CODE (exp
))
1473 case PREINCREMENT_EXPR
:
1474 case POSTINCREMENT_EXPR
:
1475 case PREDECREMENT_EXPR
:
1476 case POSTDECREMENT_EXPR
:
1481 case TRY_CATCH_EXPR
:
1482 case WITH_CLEANUP_EXPR
:
1488 /* For a binding, warn if no side effect within it. */
1489 exp
= BIND_EXPR_BODY (exp
);
1493 case NON_LVALUE_EXPR
:
1494 exp
= TREE_OPERAND (exp
, 0);
1497 case TRUTH_ORIF_EXPR
:
1498 case TRUTH_ANDIF_EXPR
:
1499 /* In && or ||, warn if 2nd operand has no side effect. */
1500 exp
= TREE_OPERAND (exp
, 1);
1504 if (warn_if_unused_value (TREE_OPERAND (exp
, 0), locus
))
1506 /* Let people do `(foo (), 0)' without a warning. */
1507 if (TREE_CONSTANT (TREE_OPERAND (exp
, 1)))
1509 exp
= TREE_OPERAND (exp
, 1);
1513 /* If this is an expression with side effects, don't warn; this
1514 case commonly appears in macro expansions. */
1515 if (TREE_SIDE_EFFECTS (exp
))
1520 /* Don't warn about automatic dereferencing of references, since
1521 the user cannot control it. */
1522 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) == REFERENCE_TYPE
)
1524 exp
= TREE_OPERAND (exp
, 0);
1530 /* Referencing a volatile value is a side effect, so don't warn. */
1531 if ((DECL_P (exp
) || REFERENCE_CLASS_P (exp
))
1532 && TREE_THIS_VOLATILE (exp
))
1535 /* If this is an expression which has no operands, there is no value
1536 to be unused. There are no such language-independent codes,
1537 but front ends may define such. */
1538 if (EXPRESSION_CLASS_P (exp
) && TREE_OPERAND_LENGTH (exp
) == 0)
1542 warning_at (locus
, OPT_Wunused_value
, "value computed is not used");
1548 /* Generate RTL to return from the current function, with no value.
1549 (That is, we do not do anything about returning any value.) */
1552 expand_null_return (void)
1554 /* If this function was declared to return a value, but we
1555 didn't, clobber the return registers so that they are not
1556 propagated live to the rest of the function. */
1557 clobber_return_register ();
1559 expand_null_return_1 ();
1562 /* Generate RTL to return directly from the current function.
1563 (That is, we bypass any return value.) */
1566 expand_naked_return (void)
1570 clear_pending_stack_adjust ();
1571 do_pending_stack_adjust ();
1573 end_label
= naked_return_label
;
1575 end_label
= naked_return_label
= gen_label_rtx ();
1577 emit_jump (end_label
);
1580 /* Generate RTL to return from the current function, with value VAL. */
1583 expand_value_return (rtx val
)
1585 /* Copy the value to the return location unless it's already there. */
1587 tree decl
= DECL_RESULT (current_function_decl
);
1588 rtx return_reg
= DECL_RTL (decl
);
1589 if (return_reg
!= val
)
1591 tree funtype
= TREE_TYPE (current_function_decl
);
1592 tree type
= TREE_TYPE (decl
);
1593 int unsignedp
= TYPE_UNSIGNED (type
);
1594 enum machine_mode old_mode
= DECL_MODE (decl
);
1595 enum machine_mode mode
= promote_function_mode (type
, old_mode
,
1596 &unsignedp
, funtype
, 1);
1598 if (mode
!= old_mode
)
1599 val
= convert_modes (mode
, old_mode
, val
, unsignedp
);
1601 if (GET_CODE (return_reg
) == PARALLEL
)
1602 emit_group_load (return_reg
, val
, type
, int_size_in_bytes (type
));
1604 emit_move_insn (return_reg
, val
);
1607 expand_null_return_1 ();
1610 /* Output a return with no value. */
1613 expand_null_return_1 (void)
1615 clear_pending_stack_adjust ();
1616 do_pending_stack_adjust ();
1617 emit_jump (return_label
);
1620 /* Generate RTL to evaluate the expression RETVAL and return it
1621 from the current function. */
1624 expand_return (tree retval
)
1630 /* If function wants no value, give it none. */
1631 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl
))) == VOID_TYPE
)
1633 expand_normal (retval
);
1634 expand_null_return ();
1638 if (retval
== error_mark_node
)
1640 /* Treat this like a return of no value from a function that
1642 expand_null_return ();
1645 else if ((TREE_CODE (retval
) == MODIFY_EXPR
1646 || TREE_CODE (retval
) == INIT_EXPR
)
1647 && TREE_CODE (TREE_OPERAND (retval
, 0)) == RESULT_DECL
)
1648 retval_rhs
= TREE_OPERAND (retval
, 1);
1650 retval_rhs
= retval
;
1652 result_rtl
= DECL_RTL (DECL_RESULT (current_function_decl
));
1654 /* If we are returning the RESULT_DECL, then the value has already
1655 been stored into it, so we don't have to do anything special. */
1656 if (TREE_CODE (retval_rhs
) == RESULT_DECL
)
1657 expand_value_return (result_rtl
);
1659 /* If the result is an aggregate that is being returned in one (or more)
1660 registers, load the registers here. The compiler currently can't handle
1661 copying a BLKmode value into registers. We could put this code in a
1662 more general area (for use by everyone instead of just function
1663 call/return), but until this feature is generally usable it is kept here
1664 (and in expand_call). */
1666 else if (retval_rhs
!= 0
1667 && TYPE_MODE (TREE_TYPE (retval_rhs
)) == BLKmode
1668 && REG_P (result_rtl
))
1671 unsigned HOST_WIDE_INT bitpos
, xbitpos
;
1672 unsigned HOST_WIDE_INT padding_correction
= 0;
1673 unsigned HOST_WIDE_INT bytes
1674 = int_size_in_bytes (TREE_TYPE (retval_rhs
));
1675 int n_regs
= (bytes
+ UNITS_PER_WORD
- 1) / UNITS_PER_WORD
;
1676 unsigned int bitsize
1677 = MIN (TYPE_ALIGN (TREE_TYPE (retval_rhs
)), BITS_PER_WORD
);
1678 rtx
*result_pseudos
= XALLOCAVEC (rtx
, n_regs
);
1679 rtx result_reg
, src
= NULL_RTX
, dst
= NULL_RTX
;
1680 rtx result_val
= expand_normal (retval_rhs
);
1681 enum machine_mode tmpmode
, result_reg_mode
;
1685 expand_null_return ();
1689 /* If the structure doesn't take up a whole number of words, see
1690 whether the register value should be padded on the left or on
1691 the right. Set PADDING_CORRECTION to the number of padding
1692 bits needed on the left side.
1694 In most ABIs, the structure will be returned at the least end of
1695 the register, which translates to right padding on little-endian
1696 targets and left padding on big-endian targets. The opposite
1697 holds if the structure is returned at the most significant
1698 end of the register. */
1699 if (bytes
% UNITS_PER_WORD
!= 0
1700 && (targetm
.calls
.return_in_msb (TREE_TYPE (retval_rhs
))
1702 : BYTES_BIG_ENDIAN
))
1703 padding_correction
= (BITS_PER_WORD
- ((bytes
% UNITS_PER_WORD
)
1706 /* Copy the structure BITSIZE bits at a time. */
1707 for (bitpos
= 0, xbitpos
= padding_correction
;
1708 bitpos
< bytes
* BITS_PER_UNIT
;
1709 bitpos
+= bitsize
, xbitpos
+= bitsize
)
1711 /* We need a new destination pseudo each time xbitpos is
1712 on a word boundary and when xbitpos == padding_correction
1713 (the first time through). */
1714 if (xbitpos
% BITS_PER_WORD
== 0
1715 || xbitpos
== padding_correction
)
1717 /* Generate an appropriate register. */
1718 dst
= gen_reg_rtx (word_mode
);
1719 result_pseudos
[xbitpos
/ BITS_PER_WORD
] = dst
;
1721 /* Clear the destination before we move anything into it. */
1722 emit_move_insn (dst
, CONST0_RTX (GET_MODE (dst
)));
1725 /* We need a new source operand each time bitpos is on a word
1727 if (bitpos
% BITS_PER_WORD
== 0)
1728 src
= operand_subword_force (result_val
,
1729 bitpos
/ BITS_PER_WORD
,
1732 /* Use bitpos for the source extraction (left justified) and
1733 xbitpos for the destination store (right justified). */
1734 store_bit_field (dst
, bitsize
, xbitpos
% BITS_PER_WORD
, word_mode
,
1735 extract_bit_field (src
, bitsize
,
1736 bitpos
% BITS_PER_WORD
, 1,
1737 NULL_RTX
, word_mode
, word_mode
));
1740 tmpmode
= GET_MODE (result_rtl
);
1741 if (tmpmode
== BLKmode
)
1743 /* Find the smallest integer mode large enough to hold the
1744 entire structure and use that mode instead of BLKmode
1745 on the USE insn for the return register. */
1746 for (tmpmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1747 tmpmode
!= VOIDmode
;
1748 tmpmode
= GET_MODE_WIDER_MODE (tmpmode
))
1749 /* Have we found a large enough mode? */
1750 if (GET_MODE_SIZE (tmpmode
) >= bytes
)
1753 /* A suitable mode should have been found. */
1754 gcc_assert (tmpmode
!= VOIDmode
);
1756 PUT_MODE (result_rtl
, tmpmode
);
1759 if (GET_MODE_SIZE (tmpmode
) < GET_MODE_SIZE (word_mode
))
1760 result_reg_mode
= word_mode
;
1762 result_reg_mode
= tmpmode
;
1763 result_reg
= gen_reg_rtx (result_reg_mode
);
1765 for (i
= 0; i
< n_regs
; i
++)
1766 emit_move_insn (operand_subword (result_reg
, i
, 0, result_reg_mode
),
1769 if (tmpmode
!= result_reg_mode
)
1770 result_reg
= gen_lowpart (tmpmode
, result_reg
);
1772 expand_value_return (result_reg
);
1774 else if (retval_rhs
!= 0
1775 && !VOID_TYPE_P (TREE_TYPE (retval_rhs
))
1776 && (REG_P (result_rtl
)
1777 || (GET_CODE (result_rtl
) == PARALLEL
)))
1779 /* Calculate the return value into a temporary (usually a pseudo
1781 tree ot
= TREE_TYPE (DECL_RESULT (current_function_decl
));
1782 tree nt
= build_qualified_type (ot
, TYPE_QUALS (ot
) | TYPE_QUAL_CONST
);
1784 val
= assign_temp (nt
, 0, 0, 1);
1785 val
= expand_expr (retval_rhs
, val
, GET_MODE (val
), EXPAND_NORMAL
);
1786 val
= force_not_mem (val
);
1787 /* Return the calculated value. */
1788 expand_value_return (val
);
1792 /* No hard reg used; calculate value into hard return reg. */
1793 expand_expr (retval
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
1794 expand_value_return (result_rtl
);
1798 /* Emit code to restore vital registers at the beginning of a nonlocal goto
1801 expand_nl_goto_receiver (void)
1805 /* Clobber the FP when we get here, so we have to make sure it's
1806 marked as used by this function. */
1807 emit_use (hard_frame_pointer_rtx
);
1809 /* Mark the static chain as clobbered here so life information
1810 doesn't get messed up for it. */
1811 chain
= targetm
.calls
.static_chain (current_function_decl
, true);
1812 if (chain
&& REG_P (chain
))
1813 emit_clobber (chain
);
1815 #ifdef HAVE_nonlocal_goto
1816 if (! HAVE_nonlocal_goto
)
1818 /* First adjust our frame pointer to its actual value. It was
1819 previously set to the start of the virtual area corresponding to
1820 the stacked variables when we branched here and now needs to be
1821 adjusted to the actual hardware fp value.
1823 Assignments are to virtual registers are converted by
1824 instantiate_virtual_regs into the corresponding assignment
1825 to the underlying register (fp in this case) that makes
1826 the original assignment true.
1827 So the following insn will actually be
1828 decrementing fp by STARTING_FRAME_OFFSET. */
1829 emit_move_insn (virtual_stack_vars_rtx
, hard_frame_pointer_rtx
);
1831 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
1832 if (fixed_regs
[ARG_POINTER_REGNUM
])
1834 #ifdef ELIMINABLE_REGS
1835 /* If the argument pointer can be eliminated in favor of the
1836 frame pointer, we don't need to restore it. We assume here
1837 that if such an elimination is present, it can always be used.
1838 This is the case on all known machines; if we don't make this
1839 assumption, we do unnecessary saving on many machines. */
1840 static const struct elims
{const int from
, to
;} elim_regs
[] = ELIMINABLE_REGS
;
1843 for (i
= 0; i
< ARRAY_SIZE (elim_regs
); i
++)
1844 if (elim_regs
[i
].from
== ARG_POINTER_REGNUM
1845 && elim_regs
[i
].to
== HARD_FRAME_POINTER_REGNUM
)
1848 if (i
== ARRAY_SIZE (elim_regs
))
1851 /* Now restore our arg pointer from the address at which it
1852 was saved in our stack frame. */
1853 emit_move_insn (crtl
->args
.internal_arg_pointer
,
1854 copy_to_reg (get_arg_pointer_save_area ()));
1859 #ifdef HAVE_nonlocal_goto_receiver
1860 if (HAVE_nonlocal_goto_receiver
)
1861 emit_insn (gen_nonlocal_goto_receiver ());
1864 /* We must not allow the code we just generated to be reordered by
1865 scheduling. Specifically, the update of the frame pointer must
1866 happen immediately, not later. */
1867 emit_insn (gen_blockage ());
1870 /* Generate RTL for the automatic variable declaration DECL.
1871 (Other kinds of declarations are simply ignored if seen here.) */
1874 expand_decl (tree decl
)
1878 type
= TREE_TYPE (decl
);
1880 /* For a CONST_DECL, set mode, alignment, and sizes from those of the
1881 type in case this node is used in a reference. */
1882 if (TREE_CODE (decl
) == CONST_DECL
)
1884 DECL_MODE (decl
) = TYPE_MODE (type
);
1885 DECL_ALIGN (decl
) = TYPE_ALIGN (type
);
1886 DECL_SIZE (decl
) = TYPE_SIZE (type
);
1887 DECL_SIZE_UNIT (decl
) = TYPE_SIZE_UNIT (type
);
1891 /* Otherwise, only automatic variables need any expansion done. Static and
1892 external variables, and external functions, will be handled by
1893 `assemble_variable' (called from finish_decl). TYPE_DECL requires
1894 nothing. PARM_DECLs are handled in `assign_parms'. */
1895 if (TREE_CODE (decl
) != VAR_DECL
)
1898 if (TREE_STATIC (decl
) || DECL_EXTERNAL (decl
))
1901 /* Create the RTL representation for the variable. */
1903 if (type
== error_mark_node
)
1904 SET_DECL_RTL (decl
, gen_rtx_MEM (BLKmode
, const0_rtx
));
1906 else if (DECL_SIZE (decl
) == 0)
1908 /* Variable with incomplete type. */
1910 if (DECL_INITIAL (decl
) == 0)
1911 /* Error message was already done; now avoid a crash. */
1912 x
= gen_rtx_MEM (BLKmode
, const0_rtx
);
1914 /* An initializer is going to decide the size of this array.
1915 Until we know the size, represent its address with a reg. */
1916 x
= gen_rtx_MEM (BLKmode
, gen_reg_rtx (Pmode
));
1918 set_mem_attributes (x
, decl
, 1);
1919 SET_DECL_RTL (decl
, x
);
1921 else if (use_register_for_decl (decl
))
1923 /* Automatic variable that can go in a register. */
1924 enum machine_mode reg_mode
= promote_decl_mode (decl
, NULL
);
1926 SET_DECL_RTL (decl
, gen_reg_rtx (reg_mode
));
1928 /* Note if the object is a user variable. */
1929 if (!DECL_ARTIFICIAL (decl
))
1930 mark_user_reg (DECL_RTL (decl
));
1932 if (POINTER_TYPE_P (type
))
1933 mark_reg_pointer (DECL_RTL (decl
),
1934 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (decl
))));
1943 /* Variable-sized decls are dealt with in the gimplifier. */
1944 gcc_assert (TREE_CODE (DECL_SIZE_UNIT (decl
)) == INTEGER_CST
);
1946 /* If we previously made RTL for this decl, it must be an array
1947 whose size was determined by the initializer.
1948 The old address was a register; set that register now
1949 to the proper address. */
1950 if (DECL_RTL_SET_P (decl
))
1952 gcc_assert (MEM_P (DECL_RTL (decl
)));
1953 gcc_assert (REG_P (XEXP (DECL_RTL (decl
), 0)));
1954 oldaddr
= XEXP (DECL_RTL (decl
), 0);
1957 /* Set alignment we actually gave this decl. */
1958 DECL_ALIGN (decl
) = (DECL_MODE (decl
) == BLKmode
? BIGGEST_ALIGNMENT
1959 : GET_MODE_BITSIZE (DECL_MODE (decl
)));
1960 DECL_USER_ALIGN (decl
) = 0;
1962 x
= assign_temp (decl
, 1, 1, 1);
1963 set_mem_attributes (x
, decl
, 1);
1964 SET_DECL_RTL (decl
, x
);
1968 addr
= force_operand (XEXP (DECL_RTL (decl
), 0), oldaddr
);
1969 if (addr
!= oldaddr
)
1970 emit_move_insn (oldaddr
, addr
);
1975 /* Emit code to save the current value of stack. */
1977 expand_stack_save (void)
1981 do_pending_stack_adjust ();
1982 emit_stack_save (SAVE_BLOCK
, &ret
, NULL_RTX
);
1986 /* Emit code to restore the current value of stack. */
1988 expand_stack_restore (tree var
)
1990 rtx sa
= expand_normal (var
);
1992 sa
= convert_memory_address (Pmode
, sa
);
1993 emit_stack_restore (SAVE_BLOCK
, sa
, NULL_RTX
);
1996 /* Do the insertion of a case label into case_list. The labels are
1997 fed to us in descending order from the sorted vector of case labels used
1998 in the tree part of the middle end. So the list we construct is
1999 sorted in ascending order. The bounds on the case range, LOW and HIGH,
2000 are converted to case's index type TYPE. */
2002 static struct case_node
*
2003 add_case_node (struct case_node
*head
, tree type
, tree low
, tree high
,
2004 tree label
, alloc_pool case_node_pool
)
2006 tree min_value
, max_value
;
2007 struct case_node
*r
;
2009 gcc_assert (TREE_CODE (low
) == INTEGER_CST
);
2010 gcc_assert (!high
|| TREE_CODE (high
) == INTEGER_CST
);
2012 min_value
= TYPE_MIN_VALUE (type
);
2013 max_value
= TYPE_MAX_VALUE (type
);
2015 /* If there's no HIGH value, then this is not a case range; it's
2016 just a simple case label. But that's just a degenerate case
2018 If the bounds are equal, turn this into the one-value case. */
2019 if (!high
|| tree_int_cst_equal (low
, high
))
2021 /* If the simple case value is unreachable, ignore it. */
2022 if ((TREE_CODE (min_value
) == INTEGER_CST
2023 && tree_int_cst_compare (low
, min_value
) < 0)
2024 || (TREE_CODE (max_value
) == INTEGER_CST
2025 && tree_int_cst_compare (low
, max_value
) > 0))
2027 low
= fold_convert (type
, low
);
2032 /* If the entire case range is unreachable, ignore it. */
2033 if ((TREE_CODE (min_value
) == INTEGER_CST
2034 && tree_int_cst_compare (high
, min_value
) < 0)
2035 || (TREE_CODE (max_value
) == INTEGER_CST
2036 && tree_int_cst_compare (low
, max_value
) > 0))
2039 /* If the lower bound is less than the index type's minimum
2040 value, truncate the range bounds. */
2041 if (TREE_CODE (min_value
) == INTEGER_CST
2042 && tree_int_cst_compare (low
, min_value
) < 0)
2044 low
= fold_convert (type
, low
);
2046 /* If the upper bound is greater than the index type's maximum
2047 value, truncate the range bounds. */
2048 if (TREE_CODE (max_value
) == INTEGER_CST
2049 && tree_int_cst_compare (high
, max_value
) > 0)
2051 high
= fold_convert (type
, high
);
2055 /* Add this label to the chain. Make sure to drop overflow flags. */
2056 r
= (struct case_node
*) pool_alloc (case_node_pool
);
2057 r
->low
= build_int_cst_wide (TREE_TYPE (low
), TREE_INT_CST_LOW (low
),
2058 TREE_INT_CST_HIGH (low
));
2059 r
->high
= build_int_cst_wide (TREE_TYPE (high
), TREE_INT_CST_LOW (high
),
2060 TREE_INT_CST_HIGH (high
));
2061 r
->code_label
= label
;
2062 r
->parent
= r
->left
= NULL
;
2067 /* Maximum number of case bit tests. */
2068 #define MAX_CASE_BIT_TESTS 3
2070 /* By default, enable case bit tests on targets with ashlsi3. */
2071 #ifndef CASE_USE_BIT_TESTS
2072 #define CASE_USE_BIT_TESTS (optab_handler (ashl_optab, word_mode)->insn_code \
2073 != CODE_FOR_nothing)
2077 /* A case_bit_test represents a set of case nodes that may be
2078 selected from using a bit-wise comparison. HI and LO hold
2079 the integer to be tested against, LABEL contains the label
2080 to jump to upon success and BITS counts the number of case
2081 nodes handled by this test, typically the number of bits
2084 struct case_bit_test
2092 /* Determine whether "1 << x" is relatively cheap in word_mode. */
2095 bool lshift_cheap_p (void)
2097 static bool init
= false;
2098 static bool cheap
= true;
2102 rtx reg
= gen_rtx_REG (word_mode
, 10000);
2103 int cost
= rtx_cost (gen_rtx_ASHIFT (word_mode
, const1_rtx
, reg
), SET
,
2104 optimize_insn_for_speed_p ());
2105 cheap
= cost
< COSTS_N_INSNS (3);
2112 /* Comparison function for qsort to order bit tests by decreasing
2113 number of case nodes, i.e. the node with the most cases gets
2117 case_bit_test_cmp (const void *p1
, const void *p2
)
2119 const struct case_bit_test
*const d1
= (const struct case_bit_test
*) p1
;
2120 const struct case_bit_test
*const d2
= (const struct case_bit_test
*) p2
;
2122 if (d2
->bits
!= d1
->bits
)
2123 return d2
->bits
- d1
->bits
;
2125 /* Stabilize the sort. */
2126 return CODE_LABEL_NUMBER (d2
->label
) - CODE_LABEL_NUMBER (d1
->label
);
2129 /* Expand a switch statement by a short sequence of bit-wise
2130 comparisons. "switch(x)" is effectively converted into
2131 "if ((1 << (x-MINVAL)) & CST)" where CST and MINVAL are
2134 INDEX_EXPR is the value being switched on, which is of
2135 type INDEX_TYPE. MINVAL is the lowest case value of in
2136 the case nodes, of INDEX_TYPE type, and RANGE is highest
2137 value minus MINVAL, also of type INDEX_TYPE. NODES is
2138 the set of case nodes, and DEFAULT_LABEL is the label to
2139 branch to should none of the cases match.
2141 There *MUST* be MAX_CASE_BIT_TESTS or less unique case
2145 emit_case_bit_tests (tree index_type
, tree index_expr
, tree minval
,
2146 tree range
, case_node_ptr nodes
, rtx default_label
)
2148 struct case_bit_test test
[MAX_CASE_BIT_TESTS
];
2149 enum machine_mode mode
;
2150 rtx expr
, index
, label
;
2151 unsigned int i
,j
,lo
,hi
;
2152 struct case_node
*n
;
2156 for (n
= nodes
; n
; n
= n
->right
)
2158 label
= label_rtx (n
->code_label
);
2159 for (i
= 0; i
< count
; i
++)
2160 if (label
== test
[i
].label
)
2165 gcc_assert (count
< MAX_CASE_BIT_TESTS
);
2168 test
[i
].label
= label
;
2175 lo
= tree_low_cst (fold_build2 (MINUS_EXPR
, index_type
,
2176 n
->low
, minval
), 1);
2177 hi
= tree_low_cst (fold_build2 (MINUS_EXPR
, index_type
,
2178 n
->high
, minval
), 1);
2179 for (j
= lo
; j
<= hi
; j
++)
2180 if (j
>= HOST_BITS_PER_WIDE_INT
)
2181 test
[i
].hi
|= (HOST_WIDE_INT
) 1 << (j
- HOST_BITS_PER_INT
);
2183 test
[i
].lo
|= (HOST_WIDE_INT
) 1 << j
;
2186 qsort (test
, count
, sizeof(*test
), case_bit_test_cmp
);
2188 index_expr
= fold_build2 (MINUS_EXPR
, index_type
,
2189 fold_convert (index_type
, index_expr
),
2190 fold_convert (index_type
, minval
));
2191 index
= expand_normal (index_expr
);
2192 do_pending_stack_adjust ();
2194 mode
= TYPE_MODE (index_type
);
2195 expr
= expand_normal (range
);
2197 emit_cmp_and_jump_insns (index
, expr
, GTU
, NULL_RTX
, mode
, 1,
2200 index
= convert_to_mode (word_mode
, index
, 0);
2201 index
= expand_binop (word_mode
, ashl_optab
, const1_rtx
,
2202 index
, NULL_RTX
, 1, OPTAB_WIDEN
);
2204 for (i
= 0; i
< count
; i
++)
2206 expr
= immed_double_const (test
[i
].lo
, test
[i
].hi
, word_mode
);
2207 expr
= expand_binop (word_mode
, and_optab
, index
, expr
,
2208 NULL_RTX
, 1, OPTAB_WIDEN
);
2209 emit_cmp_and_jump_insns (expr
, const0_rtx
, NE
, NULL_RTX
,
2210 word_mode
, 1, test
[i
].label
);
2214 emit_jump (default_label
);
2218 #define HAVE_casesi 0
2221 #ifndef HAVE_tablejump
2222 #define HAVE_tablejump 0
2225 /* Terminate a case (Pascal/Ada) or switch (C) statement
2226 in which ORIG_INDEX is the expression to be tested.
2227 If ORIG_TYPE is not NULL, it is the original ORIG_INDEX
2228 type as given in the source before any compiler conversions.
2229 Generate the code to test it and jump to the right place. */
2232 expand_case (gimple stmt
)
2234 tree minval
= NULL_TREE
, maxval
= NULL_TREE
, range
= NULL_TREE
;
2235 rtx default_label
= 0;
2236 struct case_node
*n
;
2237 unsigned int count
, uniq
;
2243 rtx before_case
, end
, lab
;
2245 tree index_expr
= gimple_switch_index (stmt
);
2246 tree index_type
= TREE_TYPE (index_expr
);
2247 int unsignedp
= TYPE_UNSIGNED (index_type
);
2249 /* The insn after which the case dispatch should finally
2250 be emitted. Zero for a dummy. */
2253 /* A list of case labels; it is first built as a list and it may then
2254 be rearranged into a nearly balanced binary tree. */
2255 struct case_node
*case_list
= 0;
2257 /* Label to jump to if no case matches. */
2258 tree default_label_decl
= NULL_TREE
;
2260 alloc_pool case_node_pool
= create_alloc_pool ("struct case_node pool",
2261 sizeof (struct case_node
),
2264 do_pending_stack_adjust ();
2266 /* An ERROR_MARK occurs for various reasons including invalid data type. */
2267 if (index_type
!= error_mark_node
)
2270 bitmap label_bitmap
;
2273 /* cleanup_tree_cfg removes all SWITCH_EXPR with their index
2274 expressions being INTEGER_CST. */
2275 gcc_assert (TREE_CODE (index_expr
) != INTEGER_CST
);
2277 /* The default case, if ever taken, is the first element. */
2278 elt
= gimple_switch_label (stmt
, 0);
2279 if (!CASE_LOW (elt
) && !CASE_HIGH (elt
))
2281 default_label_decl
= CASE_LABEL (elt
);
2285 for (i
= gimple_switch_num_labels (stmt
) - 1; i
>= stopi
; --i
)
2288 elt
= gimple_switch_label (stmt
, i
);
2290 low
= CASE_LOW (elt
);
2292 high
= CASE_HIGH (elt
);
2294 /* Discard empty ranges. */
2295 if (high
&& tree_int_cst_lt (high
, low
))
2298 case_list
= add_case_node (case_list
, index_type
, low
, high
,
2299 CASE_LABEL (elt
), case_node_pool
);
2303 before_case
= start
= get_last_insn ();
2304 if (default_label_decl
)
2305 default_label
= label_rtx (default_label_decl
);
2307 /* Get upper and lower bounds of case values. */
2311 label_bitmap
= BITMAP_ALLOC (NULL
);
2312 for (n
= case_list
; n
; n
= n
->right
)
2314 /* Count the elements and track the largest and smallest
2315 of them (treating them as signed even if they are not). */
2323 if (tree_int_cst_lt (n
->low
, minval
))
2325 if (tree_int_cst_lt (maxval
, n
->high
))
2328 /* A range counts double, since it requires two compares. */
2329 if (! tree_int_cst_equal (n
->low
, n
->high
))
2332 /* If we have not seen this label yet, then increase the
2333 number of unique case node targets seen. */
2334 lab
= label_rtx (n
->code_label
);
2335 if (!bitmap_bit_p (label_bitmap
, CODE_LABEL_NUMBER (lab
)))
2337 bitmap_set_bit (label_bitmap
, CODE_LABEL_NUMBER (lab
));
2342 BITMAP_FREE (label_bitmap
);
2344 /* cleanup_tree_cfg removes all SWITCH_EXPR with a single
2345 destination, such as one with a default case only. However,
2346 it doesn't remove cases that are out of range for the switch
2347 type, so we may still get a zero here. */
2351 emit_jump (default_label
);
2352 free_alloc_pool (case_node_pool
);
2356 /* Compute span of values. */
2357 range
= fold_build2 (MINUS_EXPR
, index_type
, maxval
, minval
);
2359 /* Try implementing this switch statement by a short sequence of
2360 bit-wise comparisons. However, we let the binary-tree case
2361 below handle constant index expressions. */
2362 if (CASE_USE_BIT_TESTS
2363 && ! TREE_CONSTANT (index_expr
)
2364 && compare_tree_int (range
, GET_MODE_BITSIZE (word_mode
)) < 0
2365 && compare_tree_int (range
, 0) > 0
2366 && lshift_cheap_p ()
2367 && ((uniq
== 1 && count
>= 3)
2368 || (uniq
== 2 && count
>= 5)
2369 || (uniq
== 3 && count
>= 6)))
2371 /* Optimize the case where all the case values fit in a
2372 word without having to subtract MINVAL. In this case,
2373 we can optimize away the subtraction. */
2374 if (compare_tree_int (minval
, 0) > 0
2375 && compare_tree_int (maxval
, GET_MODE_BITSIZE (word_mode
)) < 0)
2377 minval
= build_int_cst (index_type
, 0);
2380 emit_case_bit_tests (index_type
, index_expr
, minval
, range
,
2381 case_list
, default_label
);
2384 /* If range of values is much bigger than number of values,
2385 make a sequence of conditional branches instead of a dispatch.
2386 If the switch-index is a constant, do it this way
2387 because we can optimize it. */
2389 else if (count
< targetm
.case_values_threshold ()
2390 || compare_tree_int (range
,
2391 (optimize_insn_for_size_p () ? 3 : 10) * count
) > 0
2392 /* RANGE may be signed, and really large ranges will show up
2393 as negative numbers. */
2394 || compare_tree_int (range
, 0) < 0
2395 #ifndef ASM_OUTPUT_ADDR_DIFF_ELT
2398 || !flag_jump_tables
2399 || TREE_CONSTANT (index_expr
)
2400 /* If neither casesi or tablejump is available, we can
2401 only go this way. */
2402 || (!HAVE_casesi
&& !HAVE_tablejump
))
2404 index
= expand_normal (index_expr
);
2406 /* If the index is a short or char that we do not have
2407 an insn to handle comparisons directly, convert it to
2408 a full integer now, rather than letting each comparison
2409 generate the conversion. */
2411 if (GET_MODE_CLASS (GET_MODE (index
)) == MODE_INT
2412 && ! have_insn_for (COMPARE
, GET_MODE (index
)))
2414 enum machine_mode wider_mode
;
2415 for (wider_mode
= GET_MODE (index
); wider_mode
!= VOIDmode
;
2416 wider_mode
= GET_MODE_WIDER_MODE (wider_mode
))
2417 if (have_insn_for (COMPARE
, wider_mode
))
2419 index
= convert_to_mode (wider_mode
, index
, unsignedp
);
2424 do_pending_stack_adjust ();
2427 index
= copy_to_reg (index
);
2429 /* We generate a binary decision tree to select the
2430 appropriate target code. This is done as follows:
2432 The list of cases is rearranged into a binary tree,
2433 nearly optimal assuming equal probability for each case.
2435 The tree is transformed into RTL, eliminating
2436 redundant test conditions at the same time.
2438 If program flow could reach the end of the
2439 decision tree an unconditional jump to the
2440 default code is emitted. */
2442 use_cost_table
= estimate_case_costs (case_list
);
2443 balance_case_nodes (&case_list
, NULL
);
2444 emit_case_nodes (index
, case_list
, default_label
, index_type
);
2446 emit_jump (default_label
);
2450 rtx fallback_label
= label_rtx (case_list
->code_label
);
2451 table_label
= gen_label_rtx ();
2452 if (! try_casesi (index_type
, index_expr
, minval
, range
,
2453 table_label
, default_label
, fallback_label
))
2457 /* Index jumptables from zero for suitable values of
2458 minval to avoid a subtraction. */
2459 if (optimize_insn_for_speed_p ()
2460 && compare_tree_int (minval
, 0) > 0
2461 && compare_tree_int (minval
, 3) < 0)
2463 minval
= build_int_cst (index_type
, 0);
2467 ok
= try_tablejump (index_type
, index_expr
, minval
, range
,
2468 table_label
, default_label
);
2472 /* Get table of labels to jump to, in order of case index. */
2474 ncases
= tree_low_cst (range
, 0) + 1;
2475 labelvec
= XALLOCAVEC (rtx
, ncases
);
2476 memset (labelvec
, 0, ncases
* sizeof (rtx
));
2478 for (n
= case_list
; n
; n
= n
->right
)
2480 /* Compute the low and high bounds relative to the minimum
2481 value since that should fit in a HOST_WIDE_INT while the
2482 actual values may not. */
2484 = tree_low_cst (fold_build2 (MINUS_EXPR
, index_type
,
2485 n
->low
, minval
), 1);
2486 HOST_WIDE_INT i_high
2487 = tree_low_cst (fold_build2 (MINUS_EXPR
, index_type
,
2488 n
->high
, minval
), 1);
2491 for (i
= i_low
; i
<= i_high
; i
++)
2493 = gen_rtx_LABEL_REF (Pmode
, label_rtx (n
->code_label
));
2496 /* Fill in the gaps with the default. We may have gaps at
2497 the beginning if we tried to avoid the minval subtraction,
2498 so substitute some label even if the default label was
2499 deemed unreachable. */
2501 default_label
= fallback_label
;
2502 for (i
= 0; i
< ncases
; i
++)
2503 if (labelvec
[i
] == 0)
2504 labelvec
[i
] = gen_rtx_LABEL_REF (Pmode
, default_label
);
2506 /* Output the table. */
2507 emit_label (table_label
);
2509 if (CASE_VECTOR_PC_RELATIVE
|| flag_pic
)
2510 emit_jump_insn (gen_rtx_ADDR_DIFF_VEC (CASE_VECTOR_MODE
,
2511 gen_rtx_LABEL_REF (Pmode
, table_label
),
2512 gen_rtvec_v (ncases
, labelvec
),
2513 const0_rtx
, const0_rtx
));
2515 emit_jump_insn (gen_rtx_ADDR_VEC (CASE_VECTOR_MODE
,
2516 gen_rtvec_v (ncases
, labelvec
)));
2518 /* Record no drop-through after the table. */
2522 before_case
= NEXT_INSN (before_case
);
2523 end
= get_last_insn ();
2524 reorder_insns (before_case
, end
, start
);
2528 free_alloc_pool (case_node_pool
);
2531 /* Generate code to jump to LABEL if OP0 and OP1 are equal in mode MODE. */
2534 do_jump_if_equal (enum machine_mode mode
, rtx op0
, rtx op1
, rtx label
,
2537 do_compare_rtx_and_jump (op0
, op1
, EQ
, unsignedp
, mode
,
2538 NULL_RTX
, NULL_RTX
, label
, -1);
2541 /* Not all case values are encountered equally. This function
2542 uses a heuristic to weight case labels, in cases where that
2543 looks like a reasonable thing to do.
2545 Right now, all we try to guess is text, and we establish the
2548 chars above space: 16
2557 If we find any cases in the switch that are not either -1 or in the range
2558 of valid ASCII characters, or are control characters other than those
2559 commonly used with "\", don't treat this switch scanning text.
2561 Return 1 if these nodes are suitable for cost estimation, otherwise
2565 estimate_case_costs (case_node_ptr node
)
2567 tree min_ascii
= integer_minus_one_node
;
2568 tree max_ascii
= build_int_cst (TREE_TYPE (node
->high
), 127);
2572 /* If we haven't already made the cost table, make it now. Note that the
2573 lower bound of the table is -1, not zero. */
2575 if (! cost_table_initialized
)
2577 cost_table_initialized
= 1;
2579 for (i
= 0; i
< 128; i
++)
2582 COST_TABLE (i
) = 16;
2583 else if (ISPUNCT (i
))
2585 else if (ISCNTRL (i
))
2586 COST_TABLE (i
) = -1;
2589 COST_TABLE (' ') = 8;
2590 COST_TABLE ('\t') = 4;
2591 COST_TABLE ('\0') = 4;
2592 COST_TABLE ('\n') = 2;
2593 COST_TABLE ('\f') = 1;
2594 COST_TABLE ('\v') = 1;
2595 COST_TABLE ('\b') = 1;
2598 /* See if all the case expressions look like text. It is text if the
2599 constant is >= -1 and the highest constant is <= 127. Do all comparisons
2600 as signed arithmetic since we don't want to ever access cost_table with a
2601 value less than -1. Also check that none of the constants in a range
2602 are strange control characters. */
2604 for (n
= node
; n
; n
= n
->right
)
2606 if (tree_int_cst_lt (n
->low
, min_ascii
)
2607 || tree_int_cst_lt (max_ascii
, n
->high
))
2610 for (i
= (HOST_WIDE_INT
) TREE_INT_CST_LOW (n
->low
);
2611 i
<= (HOST_WIDE_INT
) TREE_INT_CST_LOW (n
->high
); i
++)
2612 if (COST_TABLE (i
) < 0)
2616 /* All interesting values are within the range of interesting
2617 ASCII characters. */
2621 /* Take an ordered list of case nodes
2622 and transform them into a near optimal binary tree,
2623 on the assumption that any target code selection value is as
2624 likely as any other.
2626 The transformation is performed by splitting the ordered
2627 list into two equal sections plus a pivot. The parts are
2628 then attached to the pivot as left and right branches. Each
2629 branch is then transformed recursively. */
2632 balance_case_nodes (case_node_ptr
*head
, case_node_ptr parent
)
2645 /* Count the number of entries on branch. Also count the ranges. */
2649 if (!tree_int_cst_equal (np
->low
, np
->high
))
2653 cost
+= COST_TABLE (TREE_INT_CST_LOW (np
->high
));
2657 cost
+= COST_TABLE (TREE_INT_CST_LOW (np
->low
));
2665 /* Split this list if it is long enough for that to help. */
2670 /* Find the place in the list that bisects the list's total cost,
2671 Here I gets half the total cost. */
2676 /* Skip nodes while their cost does not reach that amount. */
2677 if (!tree_int_cst_equal ((*npp
)->low
, (*npp
)->high
))
2678 i
-= COST_TABLE (TREE_INT_CST_LOW ((*npp
)->high
));
2679 i
-= COST_TABLE (TREE_INT_CST_LOW ((*npp
)->low
));
2682 npp
= &(*npp
)->right
;
2687 /* Leave this branch lopsided, but optimize left-hand
2688 side and fill in `parent' fields for right-hand side. */
2690 np
->parent
= parent
;
2691 balance_case_nodes (&np
->left
, np
);
2692 for (; np
->right
; np
= np
->right
)
2693 np
->right
->parent
= np
;
2697 /* If there are just three nodes, split at the middle one. */
2699 npp
= &(*npp
)->right
;
2702 /* Find the place in the list that bisects the list's total cost,
2703 where ranges count as 2.
2704 Here I gets half the total cost. */
2705 i
= (i
+ ranges
+ 1) / 2;
2708 /* Skip nodes while their cost does not reach that amount. */
2709 if (!tree_int_cst_equal ((*npp
)->low
, (*npp
)->high
))
2714 npp
= &(*npp
)->right
;
2719 np
->parent
= parent
;
2722 /* Optimize each of the two split parts. */
2723 balance_case_nodes (&np
->left
, np
);
2724 balance_case_nodes (&np
->right
, np
);
2728 /* Else leave this branch as one level,
2729 but fill in `parent' fields. */
2731 np
->parent
= parent
;
2732 for (; np
->right
; np
= np
->right
)
2733 np
->right
->parent
= np
;
2738 /* Search the parent sections of the case node tree
2739 to see if a test for the lower bound of NODE would be redundant.
2740 INDEX_TYPE is the type of the index expression.
2742 The instructions to generate the case decision tree are
2743 output in the same order as nodes are processed so it is
2744 known that if a parent node checks the range of the current
2745 node minus one that the current node is bounded at its lower
2746 span. Thus the test would be redundant. */
2749 node_has_low_bound (case_node_ptr node
, tree index_type
)
2752 case_node_ptr pnode
;
2754 /* If the lower bound of this node is the lowest value in the index type,
2755 we need not test it. */
2757 if (tree_int_cst_equal (node
->low
, TYPE_MIN_VALUE (index_type
)))
2760 /* If this node has a left branch, the value at the left must be less
2761 than that at this node, so it cannot be bounded at the bottom and
2762 we need not bother testing any further. */
2767 low_minus_one
= fold_build2 (MINUS_EXPR
, TREE_TYPE (node
->low
),
2769 build_int_cst (TREE_TYPE (node
->low
), 1));
2771 /* If the subtraction above overflowed, we can't verify anything.
2772 Otherwise, look for a parent that tests our value - 1. */
2774 if (! tree_int_cst_lt (low_minus_one
, node
->low
))
2777 for (pnode
= node
->parent
; pnode
; pnode
= pnode
->parent
)
2778 if (tree_int_cst_equal (low_minus_one
, pnode
->high
))
2784 /* Search the parent sections of the case node tree
2785 to see if a test for the upper bound of NODE would be redundant.
2786 INDEX_TYPE is the type of the index expression.
2788 The instructions to generate the case decision tree are
2789 output in the same order as nodes are processed so it is
2790 known that if a parent node checks the range of the current
2791 node plus one that the current node is bounded at its upper
2792 span. Thus the test would be redundant. */
2795 node_has_high_bound (case_node_ptr node
, tree index_type
)
2798 case_node_ptr pnode
;
2800 /* If there is no upper bound, obviously no test is needed. */
2802 if (TYPE_MAX_VALUE (index_type
) == NULL
)
2805 /* If the upper bound of this node is the highest value in the type
2806 of the index expression, we need not test against it. */
2808 if (tree_int_cst_equal (node
->high
, TYPE_MAX_VALUE (index_type
)))
2811 /* If this node has a right branch, the value at the right must be greater
2812 than that at this node, so it cannot be bounded at the top and
2813 we need not bother testing any further. */
2818 high_plus_one
= fold_build2 (PLUS_EXPR
, TREE_TYPE (node
->high
),
2820 build_int_cst (TREE_TYPE (node
->high
), 1));
2822 /* If the addition above overflowed, we can't verify anything.
2823 Otherwise, look for a parent that tests our value + 1. */
2825 if (! tree_int_cst_lt (node
->high
, high_plus_one
))
2828 for (pnode
= node
->parent
; pnode
; pnode
= pnode
->parent
)
2829 if (tree_int_cst_equal (high_plus_one
, pnode
->low
))
2835 /* Search the parent sections of the
2836 case node tree to see if both tests for the upper and lower
2837 bounds of NODE would be redundant. */
2840 node_is_bounded (case_node_ptr node
, tree index_type
)
2842 return (node_has_low_bound (node
, index_type
)
2843 && node_has_high_bound (node
, index_type
));
2846 /* Emit step-by-step code to select a case for the value of INDEX.
2847 The thus generated decision tree follows the form of the
2848 case-node binary tree NODE, whose nodes represent test conditions.
2849 INDEX_TYPE is the type of the index of the switch.
2851 Care is taken to prune redundant tests from the decision tree
2852 by detecting any boundary conditions already checked by
2853 emitted rtx. (See node_has_high_bound, node_has_low_bound
2854 and node_is_bounded, above.)
2856 Where the test conditions can be shown to be redundant we emit
2857 an unconditional jump to the target code. As a further
2858 optimization, the subordinates of a tree node are examined to
2859 check for bounded nodes. In this case conditional and/or
2860 unconditional jumps as a result of the boundary check for the
2861 current node are arranged to target the subordinates associated
2862 code for out of bound conditions on the current node.
2864 We can assume that when control reaches the code generated here,
2865 the index value has already been compared with the parents
2866 of this node, and determined to be on the same side of each parent
2867 as this node is. Thus, if this node tests for the value 51,
2868 and a parent tested for 52, we don't need to consider
2869 the possibility of a value greater than 51. If another parent
2870 tests for the value 50, then this node need not test anything. */
2873 emit_case_nodes (rtx index
, case_node_ptr node
, rtx default_label
,
2876 /* If INDEX has an unsigned type, we must make unsigned branches. */
2877 int unsignedp
= TYPE_UNSIGNED (index_type
);
2878 enum machine_mode mode
= GET_MODE (index
);
2879 enum machine_mode imode
= TYPE_MODE (index_type
);
2881 /* Handle indices detected as constant during RTL expansion. */
2882 if (mode
== VOIDmode
)
2885 /* See if our parents have already tested everything for us.
2886 If they have, emit an unconditional jump for this node. */
2887 if (node_is_bounded (node
, index_type
))
2888 emit_jump (label_rtx (node
->code_label
));
2890 else if (tree_int_cst_equal (node
->low
, node
->high
))
2892 /* Node is single valued. First see if the index expression matches
2893 this node and then check our children, if any. */
2895 do_jump_if_equal (mode
, index
,
2896 convert_modes (mode
, imode
,
2897 expand_normal (node
->low
),
2899 label_rtx (node
->code_label
), unsignedp
);
2901 if (node
->right
!= 0 && node
->left
!= 0)
2903 /* This node has children on both sides.
2904 Dispatch to one side or the other
2905 by comparing the index value with this node's value.
2906 If one subtree is bounded, check that one first,
2907 so we can avoid real branches in the tree. */
2909 if (node_is_bounded (node
->right
, index_type
))
2911 emit_cmp_and_jump_insns (index
,
2914 expand_normal (node
->high
),
2916 GT
, NULL_RTX
, mode
, unsignedp
,
2917 label_rtx (node
->right
->code_label
));
2918 emit_case_nodes (index
, node
->left
, default_label
, index_type
);
2921 else if (node_is_bounded (node
->left
, index_type
))
2923 emit_cmp_and_jump_insns (index
,
2926 expand_normal (node
->high
),
2928 LT
, NULL_RTX
, mode
, unsignedp
,
2929 label_rtx (node
->left
->code_label
));
2930 emit_case_nodes (index
, node
->right
, default_label
, index_type
);
2933 /* If both children are single-valued cases with no
2934 children, finish up all the work. This way, we can save
2935 one ordered comparison. */
2936 else if (tree_int_cst_equal (node
->right
->low
, node
->right
->high
)
2937 && node
->right
->left
== 0
2938 && node
->right
->right
== 0
2939 && tree_int_cst_equal (node
->left
->low
, node
->left
->high
)
2940 && node
->left
->left
== 0
2941 && node
->left
->right
== 0)
2943 /* Neither node is bounded. First distinguish the two sides;
2944 then emit the code for one side at a time. */
2946 /* See if the value matches what the right hand side
2948 do_jump_if_equal (mode
, index
,
2949 convert_modes (mode
, imode
,
2950 expand_normal (node
->right
->low
),
2952 label_rtx (node
->right
->code_label
),
2955 /* See if the value matches what the left hand side
2957 do_jump_if_equal (mode
, index
,
2958 convert_modes (mode
, imode
,
2959 expand_normal (node
->left
->low
),
2961 label_rtx (node
->left
->code_label
),
2967 /* Neither node is bounded. First distinguish the two sides;
2968 then emit the code for one side at a time. */
2971 = build_decl (CURR_INSN_LOCATION
,
2972 LABEL_DECL
, NULL_TREE
, NULL_TREE
);
2974 /* See if the value is on the right. */
2975 emit_cmp_and_jump_insns (index
,
2978 expand_normal (node
->high
),
2980 GT
, NULL_RTX
, mode
, unsignedp
,
2981 label_rtx (test_label
));
2983 /* Value must be on the left.
2984 Handle the left-hand subtree. */
2985 emit_case_nodes (index
, node
->left
, default_label
, index_type
);
2986 /* If left-hand subtree does nothing,
2989 emit_jump (default_label
);
2991 /* Code branches here for the right-hand subtree. */
2992 expand_label (test_label
);
2993 emit_case_nodes (index
, node
->right
, default_label
, index_type
);
2997 else if (node
->right
!= 0 && node
->left
== 0)
2999 /* Here we have a right child but no left so we issue a conditional
3000 branch to default and process the right child.
3002 Omit the conditional branch to default if the right child
3003 does not have any children and is single valued; it would
3004 cost too much space to save so little time. */
3006 if (node
->right
->right
|| node
->right
->left
3007 || !tree_int_cst_equal (node
->right
->low
, node
->right
->high
))
3009 if (!node_has_low_bound (node
, index_type
))
3011 emit_cmp_and_jump_insns (index
,
3014 expand_normal (node
->high
),
3016 LT
, NULL_RTX
, mode
, unsignedp
,
3020 emit_case_nodes (index
, node
->right
, default_label
, index_type
);
3023 /* We cannot process node->right normally
3024 since we haven't ruled out the numbers less than
3025 this node's value. So handle node->right explicitly. */
3026 do_jump_if_equal (mode
, index
,
3029 expand_normal (node
->right
->low
),
3031 label_rtx (node
->right
->code_label
), unsignedp
);
3034 else if (node
->right
== 0 && node
->left
!= 0)
3036 /* Just one subtree, on the left. */
3037 if (node
->left
->left
|| node
->left
->right
3038 || !tree_int_cst_equal (node
->left
->low
, node
->left
->high
))
3040 if (!node_has_high_bound (node
, index_type
))
3042 emit_cmp_and_jump_insns (index
,
3045 expand_normal (node
->high
),
3047 GT
, NULL_RTX
, mode
, unsignedp
,
3051 emit_case_nodes (index
, node
->left
, default_label
, index_type
);
3054 /* We cannot process node->left normally
3055 since we haven't ruled out the numbers less than
3056 this node's value. So handle node->left explicitly. */
3057 do_jump_if_equal (mode
, index
,
3060 expand_normal (node
->left
->low
),
3062 label_rtx (node
->left
->code_label
), unsignedp
);
3067 /* Node is a range. These cases are very similar to those for a single
3068 value, except that we do not start by testing whether this node
3069 is the one to branch to. */
3071 if (node
->right
!= 0 && node
->left
!= 0)
3073 /* Node has subtrees on both sides.
3074 If the right-hand subtree is bounded,
3075 test for it first, since we can go straight there.
3076 Otherwise, we need to make a branch in the control structure,
3077 then handle the two subtrees. */
3078 tree test_label
= 0;
3080 if (node_is_bounded (node
->right
, index_type
))
3081 /* Right hand node is fully bounded so we can eliminate any
3082 testing and branch directly to the target code. */
3083 emit_cmp_and_jump_insns (index
,
3086 expand_normal (node
->high
),
3088 GT
, NULL_RTX
, mode
, unsignedp
,
3089 label_rtx (node
->right
->code_label
));
3092 /* Right hand node requires testing.
3093 Branch to a label where we will handle it later. */
3095 test_label
= build_decl (CURR_INSN_LOCATION
,
3096 LABEL_DECL
, NULL_TREE
, NULL_TREE
);
3097 emit_cmp_and_jump_insns (index
,
3100 expand_normal (node
->high
),
3102 GT
, NULL_RTX
, mode
, unsignedp
,
3103 label_rtx (test_label
));
3106 /* Value belongs to this node or to the left-hand subtree. */
3108 emit_cmp_and_jump_insns (index
,
3111 expand_normal (node
->low
),
3113 GE
, NULL_RTX
, mode
, unsignedp
,
3114 label_rtx (node
->code_label
));
3116 /* Handle the left-hand subtree. */
3117 emit_case_nodes (index
, node
->left
, default_label
, index_type
);
3119 /* If right node had to be handled later, do that now. */
3123 /* If the left-hand subtree fell through,
3124 don't let it fall into the right-hand subtree. */
3126 emit_jump (default_label
);
3128 expand_label (test_label
);
3129 emit_case_nodes (index
, node
->right
, default_label
, index_type
);
3133 else if (node
->right
!= 0 && node
->left
== 0)
3135 /* Deal with values to the left of this node,
3136 if they are possible. */
3137 if (!node_has_low_bound (node
, index_type
))
3139 emit_cmp_and_jump_insns (index
,
3142 expand_normal (node
->low
),
3144 LT
, NULL_RTX
, mode
, unsignedp
,
3148 /* Value belongs to this node or to the right-hand subtree. */
3150 emit_cmp_and_jump_insns (index
,
3153 expand_normal (node
->high
),
3155 LE
, NULL_RTX
, mode
, unsignedp
,
3156 label_rtx (node
->code_label
));
3158 emit_case_nodes (index
, node
->right
, default_label
, index_type
);
3161 else if (node
->right
== 0 && node
->left
!= 0)
3163 /* Deal with values to the right of this node,
3164 if they are possible. */
3165 if (!node_has_high_bound (node
, index_type
))
3167 emit_cmp_and_jump_insns (index
,
3170 expand_normal (node
->high
),
3172 GT
, NULL_RTX
, mode
, unsignedp
,
3176 /* Value belongs to this node or to the left-hand subtree. */
3178 emit_cmp_and_jump_insns (index
,
3181 expand_normal (node
->low
),
3183 GE
, NULL_RTX
, mode
, unsignedp
,
3184 label_rtx (node
->code_label
));
3186 emit_case_nodes (index
, node
->left
, default_label
, index_type
);
3191 /* Node has no children so we check low and high bounds to remove
3192 redundant tests. Only one of the bounds can exist,
3193 since otherwise this node is bounded--a case tested already. */
3194 int high_bound
= node_has_high_bound (node
, index_type
);
3195 int low_bound
= node_has_low_bound (node
, index_type
);
3197 if (!high_bound
&& low_bound
)
3199 emit_cmp_and_jump_insns (index
,
3202 expand_normal (node
->high
),
3204 GT
, NULL_RTX
, mode
, unsignedp
,
3208 else if (!low_bound
&& high_bound
)
3210 emit_cmp_and_jump_insns (index
,
3213 expand_normal (node
->low
),
3215 LT
, NULL_RTX
, mode
, unsignedp
,
3218 else if (!low_bound
&& !high_bound
)
3220 /* Widen LOW and HIGH to the same width as INDEX. */
3221 tree type
= lang_hooks
.types
.type_for_mode (mode
, unsignedp
);
3222 tree low
= build1 (CONVERT_EXPR
, type
, node
->low
);
3223 tree high
= build1 (CONVERT_EXPR
, type
, node
->high
);
3224 rtx low_rtx
, new_index
, new_bound
;
3226 /* Instead of doing two branches, emit one unsigned branch for
3227 (index-low) > (high-low). */
3228 low_rtx
= expand_expr (low
, NULL_RTX
, mode
, EXPAND_NORMAL
);
3229 new_index
= expand_simple_binop (mode
, MINUS
, index
, low_rtx
,
3230 NULL_RTX
, unsignedp
,
3232 new_bound
= expand_expr (fold_build2 (MINUS_EXPR
, type
,
3234 NULL_RTX
, mode
, EXPAND_NORMAL
);
3236 emit_cmp_and_jump_insns (new_index
, new_bound
, GT
, NULL_RTX
,
3237 mode
, 1, default_label
);
3240 emit_jump (label_rtx (node
->code_label
));