1 /* Expands front end tree to back end RTL for GCC
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009,
4 2010, 2011, 2012 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /* This file handles the generation of rtl code from tree structure
23 above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
24 The functions whose names start with `expand_' are called by the
25 expander to generate RTL instructions for various kinds of constructs. */
29 #include "coretypes.h"
33 #include "hard-reg-set.h"
39 #include "insn-config.h"
44 #include "diagnostic-core.h"
47 #include "langhooks.h"
53 #include "alloc-pool.h"
54 #include "pretty-print.h"
60 /* Functions and data structures for expanding case statements. */
62 /* Case label structure, used to hold info on labels within case
63 statements. We handle "range" labels; for a single-value label
64 as in C, the high and low limits are the same.
66 We start with a vector of case nodes sorted in ascending order, and
67 the default label as the last element in the vector. Before expanding
68 to RTL, we transform this vector into a list linked via the RIGHT
69 fields in the case_node struct. Nodes with higher case values are
72 Switch statements can be output in three forms. A branch table is
73 used if there are more than a few labels and the labels are dense
74 within the range between the smallest and largest case value. If a
75 branch table is used, no further manipulations are done with the case
78 The alternative to the use of a branch table is to generate a series
79 of compare and jump insns. When that is done, we use the LEFT, RIGHT,
80 and PARENT fields to hold a binary tree. Initially the tree is
81 totally unbalanced, with everything on the right. We balance the tree
82 with nodes on the left having lower case values than the parent
83 and nodes on the right having higher values. We then output the tree
86 For very small, suitable switch statements, we can generate a series
87 of simple bit test and branches instead. */
91 struct case_node
*left
; /* Left son in binary tree */
92 struct case_node
*right
; /* Right son in binary tree; also node chain */
93 struct case_node
*parent
; /* Parent of node in binary tree */
94 tree low
; /* Lowest index value for this label */
95 tree high
; /* Highest index value for this label */
96 tree code_label
; /* Label to jump to when node matches */
99 typedef struct case_node case_node
;
100 typedef struct case_node
*case_node_ptr
;
103 static int n_occurrences (int, const char *);
104 static bool tree_conflicts_with_clobbers_p (tree
, HARD_REG_SET
*);
105 static void expand_nl_goto_receiver (void);
106 static bool check_operand_nalternatives (tree
, tree
);
107 static bool check_unique_operand_names (tree
, tree
, tree
);
108 static char *resolve_operand_name_1 (char *, tree
, tree
, tree
);
109 static void expand_null_return_1 (void);
110 static void expand_value_return (rtx
);
111 static void balance_case_nodes (case_node_ptr
*, case_node_ptr
);
112 static int node_has_low_bound (case_node_ptr
, tree
);
113 static int node_has_high_bound (case_node_ptr
, tree
);
114 static int node_is_bounded (case_node_ptr
, tree
);
115 static void emit_case_nodes (rtx
, case_node_ptr
, rtx
, tree
);
116 static struct case_node
*add_case_node (struct case_node
*, tree
,
117 tree
, tree
, tree
, alloc_pool
);
120 /* Return the rtx-label that corresponds to a LABEL_DECL,
121 creating it if necessary. */
124 label_rtx (tree label
)
126 gcc_assert (TREE_CODE (label
) == LABEL_DECL
);
128 if (!DECL_RTL_SET_P (label
))
130 rtx r
= gen_label_rtx ();
131 SET_DECL_RTL (label
, r
);
132 if (FORCED_LABEL (label
) || DECL_NONLOCAL (label
))
133 LABEL_PRESERVE_P (r
) = 1;
136 return DECL_RTL (label
);
139 /* As above, but also put it on the forced-reference list of the
140 function that contains it. */
142 force_label_rtx (tree label
)
144 rtx ref
= label_rtx (label
);
145 tree function
= decl_function_context (label
);
147 gcc_assert (function
);
149 forced_labels
= gen_rtx_EXPR_LIST (VOIDmode
, ref
, forced_labels
);
153 /* Add an unconditional jump to LABEL as the next sequential instruction. */
156 emit_jump (rtx label
)
158 do_pending_stack_adjust ();
159 emit_jump_insn (gen_jump (label
));
163 /* Emit code to jump to the address
164 specified by the pointer expression EXP. */
167 expand_computed_goto (tree exp
)
169 rtx x
= expand_normal (exp
);
171 x
= convert_memory_address (Pmode
, x
);
173 do_pending_stack_adjust ();
174 emit_indirect_jump (x
);
177 /* Handle goto statements and the labels that they can go to. */
179 /* Specify the location in the RTL code of a label LABEL,
180 which is a LABEL_DECL tree node.
182 This is used for the kind of label that the user can jump to with a
183 goto statement, and for alternatives of a switch or case statement.
184 RTL labels generated for loops and conditionals don't go through here;
185 they are generated directly at the RTL level, by other functions below.
187 Note that this has nothing to do with defining label *names*.
188 Languages vary in how they do that and what that even means. */
191 expand_label (tree label
)
193 rtx label_r
= label_rtx (label
);
195 do_pending_stack_adjust ();
196 emit_label (label_r
);
197 if (DECL_NAME (label
))
198 LABEL_NAME (DECL_RTL (label
)) = IDENTIFIER_POINTER (DECL_NAME (label
));
200 if (DECL_NONLOCAL (label
))
202 expand_nl_goto_receiver ();
203 nonlocal_goto_handler_labels
204 = gen_rtx_EXPR_LIST (VOIDmode
, label_r
,
205 nonlocal_goto_handler_labels
);
208 if (FORCED_LABEL (label
))
209 forced_labels
= gen_rtx_EXPR_LIST (VOIDmode
, label_r
, forced_labels
);
211 if (DECL_NONLOCAL (label
) || FORCED_LABEL (label
))
212 maybe_set_first_label_num (label_r
);
215 /* Generate RTL code for a `goto' statement with target label LABEL.
216 LABEL should be a LABEL_DECL tree node that was or will later be
217 defined with `expand_label'. */
220 expand_goto (tree label
)
222 #ifdef ENABLE_CHECKING
223 /* Check for a nonlocal goto to a containing function. Should have
224 gotten translated to __builtin_nonlocal_goto. */
225 tree context
= decl_function_context (label
);
226 gcc_assert (!context
|| context
== current_function_decl
);
229 emit_jump (label_rtx (label
));
232 /* Return the number of times character C occurs in string S. */
234 n_occurrences (int c
, const char *s
)
242 /* Generate RTL for an asm statement (explicit assembler code).
243 STRING is a STRING_CST node containing the assembler code text,
244 or an ADDR_EXPR containing a STRING_CST. VOL nonzero means the
245 insn is volatile; don't optimize it. */
248 expand_asm_loc (tree string
, int vol
, location_t locus
)
252 if (TREE_CODE (string
) == ADDR_EXPR
)
253 string
= TREE_OPERAND (string
, 0);
255 body
= gen_rtx_ASM_INPUT_loc (VOIDmode
,
256 ggc_strdup (TREE_STRING_POINTER (string
)),
259 MEM_VOLATILE_P (body
) = vol
;
264 /* Parse the output constraint pointed to by *CONSTRAINT_P. It is the
265 OPERAND_NUMth output operand, indexed from zero. There are NINPUTS
266 inputs and NOUTPUTS outputs to this extended-asm. Upon return,
267 *ALLOWS_MEM will be TRUE iff the constraint allows the use of a
268 memory operand. Similarly, *ALLOWS_REG will be TRUE iff the
269 constraint allows the use of a register operand. And, *IS_INOUT
270 will be true if the operand is read-write, i.e., if it is used as
271 an input as well as an output. If *CONSTRAINT_P is not in
272 canonical form, it will be made canonical. (Note that `+' will be
273 replaced with `=' as part of this process.)
275 Returns TRUE if all went well; FALSE if an error occurred. */
278 parse_output_constraint (const char **constraint_p
, int operand_num
,
279 int ninputs
, int noutputs
, bool *allows_mem
,
280 bool *allows_reg
, bool *is_inout
)
282 const char *constraint
= *constraint_p
;
285 /* Assume the constraint doesn't allow the use of either a register
290 /* Allow the `=' or `+' to not be at the beginning of the string,
291 since it wasn't explicitly documented that way, and there is a
292 large body of code that puts it last. Swap the character to
293 the front, so as not to uglify any place else. */
294 p
= strchr (constraint
, '=');
296 p
= strchr (constraint
, '+');
298 /* If the string doesn't contain an `=', issue an error
302 error ("output operand constraint lacks %<=%>");
306 /* If the constraint begins with `+', then the operand is both read
307 from and written to. */
308 *is_inout
= (*p
== '+');
310 /* Canonicalize the output constraint so that it begins with `='. */
311 if (p
!= constraint
|| *is_inout
)
314 size_t c_len
= strlen (constraint
);
317 warning (0, "output constraint %qc for operand %d "
318 "is not at the beginning",
321 /* Make a copy of the constraint. */
322 buf
= XALLOCAVEC (char, c_len
+ 1);
323 strcpy (buf
, constraint
);
324 /* Swap the first character and the `=' or `+'. */
325 buf
[p
- constraint
] = buf
[0];
326 /* Make sure the first character is an `='. (Until we do this,
327 it might be a `+'.) */
329 /* Replace the constraint with the canonicalized string. */
330 *constraint_p
= ggc_alloc_string (buf
, c_len
);
331 constraint
= *constraint_p
;
334 /* Loop through the constraint string. */
335 for (p
= constraint
+ 1; *p
; p
+= CONSTRAINT_LEN (*p
, p
))
340 error ("operand constraint contains incorrectly positioned "
345 if (operand_num
+ 1 == ninputs
+ noutputs
)
347 error ("%<%%%> constraint used with last operand");
352 case 'V': case TARGET_MEM_CONSTRAINT
: case 'o':
356 case '?': case '!': case '*': case '&': case '#':
357 case 'E': case 'F': case 'G': case 'H':
358 case 's': case 'i': case 'n':
359 case 'I': case 'J': case 'K': case 'L': case 'M':
360 case 'N': case 'O': case 'P': case ',':
363 case '0': case '1': case '2': case '3': case '4':
364 case '5': case '6': case '7': case '8': case '9':
366 error ("matching constraint not valid in output operand");
370 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
371 excepting those that expand_call created. So match memory
388 if (REG_CLASS_FROM_CONSTRAINT (*p
, p
) != NO_REGS
)
390 #ifdef EXTRA_CONSTRAINT_STR
391 else if (EXTRA_ADDRESS_CONSTRAINT (*p
, p
))
393 else if (EXTRA_MEMORY_CONSTRAINT (*p
, p
))
397 /* Otherwise we can't assume anything about the nature of
398 the constraint except that it isn't purely registers.
399 Treat it like "g" and hope for the best. */
410 /* Similar, but for input constraints. */
413 parse_input_constraint (const char **constraint_p
, int input_num
,
414 int ninputs
, int noutputs
, int ninout
,
415 const char * const * constraints
,
416 bool *allows_mem
, bool *allows_reg
)
418 const char *constraint
= *constraint_p
;
419 const char *orig_constraint
= constraint
;
420 size_t c_len
= strlen (constraint
);
422 bool saw_match
= false;
424 /* Assume the constraint doesn't allow the use of either
425 a register or memory. */
429 /* Make sure constraint has neither `=', `+', nor '&'. */
431 for (j
= 0; j
< c_len
; j
+= CONSTRAINT_LEN (constraint
[j
], constraint
+j
))
432 switch (constraint
[j
])
434 case '+': case '=': case '&':
435 if (constraint
== orig_constraint
)
437 error ("input operand constraint contains %qc", constraint
[j
]);
443 if (constraint
== orig_constraint
444 && input_num
+ 1 == ninputs
- ninout
)
446 error ("%<%%%> constraint used with last operand");
451 case 'V': case TARGET_MEM_CONSTRAINT
: case 'o':
456 case '?': case '!': case '*': case '#':
457 case 'E': case 'F': case 'G': case 'H':
458 case 's': case 'i': case 'n':
459 case 'I': case 'J': case 'K': case 'L': case 'M':
460 case 'N': case 'O': case 'P': case ',':
463 /* Whether or not a numeric constraint allows a register is
464 decided by the matching constraint, and so there is no need
465 to do anything special with them. We must handle them in
466 the default case, so that we don't unnecessarily force
467 operands to memory. */
468 case '0': case '1': case '2': case '3': case '4':
469 case '5': case '6': case '7': case '8': case '9':
476 match
= strtoul (constraint
+ j
, &end
, 10);
477 if (match
>= (unsigned long) noutputs
)
479 error ("matching constraint references invalid operand number");
483 /* Try and find the real constraint for this dup. Only do this
484 if the matching constraint is the only alternative. */
486 && (j
== 0 || (j
== 1 && constraint
[0] == '%')))
488 constraint
= constraints
[match
];
489 *constraint_p
= constraint
;
490 c_len
= strlen (constraint
);
492 /* ??? At the end of the loop, we will skip the first part of
493 the matched constraint. This assumes not only that the
494 other constraint is an output constraint, but also that
495 the '=' or '+' come first. */
499 j
= end
- constraint
;
500 /* Anticipate increment at end of loop. */
515 if (! ISALPHA (constraint
[j
]))
517 error ("invalid punctuation %qc in constraint", constraint
[j
]);
520 if (REG_CLASS_FROM_CONSTRAINT (constraint
[j
], constraint
+ j
)
523 #ifdef EXTRA_CONSTRAINT_STR
524 else if (EXTRA_ADDRESS_CONSTRAINT (constraint
[j
], constraint
+ j
))
526 else if (EXTRA_MEMORY_CONSTRAINT (constraint
[j
], constraint
+ j
))
530 /* Otherwise we can't assume anything about the nature of
531 the constraint except that it isn't purely registers.
532 Treat it like "g" and hope for the best. */
540 if (saw_match
&& !*allows_reg
)
541 warning (0, "matching constraint does not allow a register");
546 /* Return DECL iff there's an overlap between *REGS and DECL, where DECL
547 can be an asm-declared register. Called via walk_tree. */
550 decl_overlaps_hard_reg_set_p (tree
*declp
, int *walk_subtrees ATTRIBUTE_UNUSED
,
554 const HARD_REG_SET
*const regs
= (const HARD_REG_SET
*) data
;
556 if (TREE_CODE (decl
) == VAR_DECL
)
558 if (DECL_HARD_REGISTER (decl
)
559 && REG_P (DECL_RTL (decl
))
560 && REGNO (DECL_RTL (decl
)) < FIRST_PSEUDO_REGISTER
)
562 rtx reg
= DECL_RTL (decl
);
564 if (overlaps_hard_reg_set_p (*regs
, GET_MODE (reg
), REGNO (reg
)))
569 else if (TYPE_P (decl
) || TREE_CODE (decl
) == PARM_DECL
)
574 /* If there is an overlap between *REGS and DECL, return the first overlap
577 tree_overlaps_hard_reg_set (tree decl
, HARD_REG_SET
*regs
)
579 return walk_tree (&decl
, decl_overlaps_hard_reg_set_p
, regs
, NULL
);
582 /* Check for overlap between registers marked in CLOBBERED_REGS and
583 anything inappropriate in T. Emit error and return the register
584 variable definition for error, NULL_TREE for ok. */
587 tree_conflicts_with_clobbers_p (tree t
, HARD_REG_SET
*clobbered_regs
)
589 /* Conflicts between asm-declared register variables and the clobber
590 list are not allowed. */
591 tree overlap
= tree_overlaps_hard_reg_set (t
, clobbered_regs
);
595 error ("asm-specifier for variable %qE conflicts with asm clobber list",
596 DECL_NAME (overlap
));
598 /* Reset registerness to stop multiple errors emitted for a single
600 DECL_REGISTER (overlap
) = 0;
607 /* Generate RTL for an asm statement with arguments.
608 STRING is the instruction template.
609 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
610 Each output or input has an expression in the TREE_VALUE and
611 a tree list in TREE_PURPOSE which in turn contains a constraint
612 name in TREE_VALUE (or NULL_TREE) and a constraint string
614 CLOBBERS is a list of STRING_CST nodes each naming a hard register
615 that is clobbered by this insn.
617 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
618 Some elements of OUTPUTS may be replaced with trees representing temporary
619 values. The caller should copy those temporary values to the originally
622 VOL nonzero means the insn is volatile; don't optimize it. */
625 expand_asm_operands (tree string
, tree outputs
, tree inputs
,
626 tree clobbers
, tree labels
, int vol
, location_t locus
)
628 rtvec argvec
, constraintvec
, labelvec
;
630 int ninputs
= list_length (inputs
);
631 int noutputs
= list_length (outputs
);
632 int nlabels
= list_length (labels
);
635 HARD_REG_SET clobbered_regs
;
636 int clobber_conflict_found
= 0;
640 /* Vector of RTX's of evaluated output operands. */
641 rtx
*output_rtx
= XALLOCAVEC (rtx
, noutputs
);
642 int *inout_opnum
= XALLOCAVEC (int, noutputs
);
643 rtx
*real_output_rtx
= XALLOCAVEC (rtx
, noutputs
);
644 enum machine_mode
*inout_mode
= XALLOCAVEC (enum machine_mode
, noutputs
);
645 const char **constraints
= XALLOCAVEC (const char *, noutputs
+ ninputs
);
646 int old_generating_concat_p
= generating_concat_p
;
648 /* An ASM with no outputs needs to be treated as volatile, for now. */
652 if (! check_operand_nalternatives (outputs
, inputs
))
655 string
= resolve_asm_operand_names (string
, outputs
, inputs
, labels
);
657 /* Collect constraints. */
659 for (t
= outputs
; t
; t
= TREE_CHAIN (t
), i
++)
660 constraints
[i
] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t
)));
661 for (t
= inputs
; t
; t
= TREE_CHAIN (t
), i
++)
662 constraints
[i
] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t
)));
664 /* Sometimes we wish to automatically clobber registers across an asm.
665 Case in point is when the i386 backend moved from cc0 to a hard reg --
666 maintaining source-level compatibility means automatically clobbering
667 the flags register. */
668 clobbers
= targetm
.md_asm_clobbers (outputs
, inputs
, clobbers
);
670 /* Count the number of meaningful clobbered registers, ignoring what
671 we would ignore later. */
673 CLEAR_HARD_REG_SET (clobbered_regs
);
674 for (tail
= clobbers
; tail
; tail
= TREE_CHAIN (tail
))
679 if (TREE_VALUE (tail
) == error_mark_node
)
681 regname
= TREE_STRING_POINTER (TREE_VALUE (tail
));
683 i
= decode_reg_name_and_count (regname
, &nregs
);
687 error ("unknown register name %qs in %<asm%>", regname
);
689 /* Mark clobbered registers. */
694 for (reg
= i
; reg
< i
+ nregs
; reg
++)
698 /* Clobbering the PIC register is an error. */
699 if (reg
== (int) PIC_OFFSET_TABLE_REGNUM
)
701 error ("PIC register clobbered by %qs in %<asm%>", regname
);
705 SET_HARD_REG_BIT (clobbered_regs
, reg
);
710 /* First pass over inputs and outputs checks validity and sets
711 mark_addressable if needed. */
714 for (i
= 0, tail
= outputs
; tail
; tail
= TREE_CHAIN (tail
), i
++)
716 tree val
= TREE_VALUE (tail
);
717 tree type
= TREE_TYPE (val
);
718 const char *constraint
;
723 /* If there's an erroneous arg, emit no insn. */
724 if (type
== error_mark_node
)
727 /* Try to parse the output constraint. If that fails, there's
728 no point in going further. */
729 constraint
= constraints
[i
];
730 if (!parse_output_constraint (&constraint
, i
, ninputs
, noutputs
,
731 &allows_mem
, &allows_reg
, &is_inout
))
738 && REG_P (DECL_RTL (val
))
739 && GET_MODE (DECL_RTL (val
)) != TYPE_MODE (type
))))
740 mark_addressable (val
);
747 if (ninputs
+ noutputs
> MAX_RECOG_OPERANDS
)
749 error ("more than %d operands in %<asm%>", MAX_RECOG_OPERANDS
);
753 for (i
= 0, tail
= inputs
; tail
; i
++, tail
= TREE_CHAIN (tail
))
755 bool allows_reg
, allows_mem
;
756 const char *constraint
;
758 /* If there's an erroneous arg, emit no insn, because the ASM_INPUT
759 would get VOIDmode and that could cause a crash in reload. */
760 if (TREE_TYPE (TREE_VALUE (tail
)) == error_mark_node
)
763 constraint
= constraints
[i
+ noutputs
];
764 if (! parse_input_constraint (&constraint
, i
, ninputs
, noutputs
, ninout
,
765 constraints
, &allows_mem
, &allows_reg
))
768 if (! allows_reg
&& allows_mem
)
769 mark_addressable (TREE_VALUE (tail
));
772 /* Second pass evaluates arguments. */
774 /* Make sure stack is consistent for asm goto. */
776 do_pending_stack_adjust ();
779 for (i
= 0, tail
= outputs
; tail
; tail
= TREE_CHAIN (tail
), i
++)
781 tree val
= TREE_VALUE (tail
);
782 tree type
= TREE_TYPE (val
);
789 ok
= parse_output_constraint (&constraints
[i
], i
, ninputs
,
790 noutputs
, &allows_mem
, &allows_reg
,
794 /* If an output operand is not a decl or indirect ref and our constraint
795 allows a register, make a temporary to act as an intermediate.
796 Make the asm insn write into that, then our caller will copy it to
797 the real output operand. Likewise for promoted variables. */
799 generating_concat_p
= 0;
801 real_output_rtx
[i
] = NULL_RTX
;
802 if ((TREE_CODE (val
) == INDIRECT_REF
805 && (allows_mem
|| REG_P (DECL_RTL (val
)))
806 && ! (REG_P (DECL_RTL (val
))
807 && GET_MODE (DECL_RTL (val
)) != TYPE_MODE (type
)))
811 op
= expand_expr (val
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
813 op
= validize_mem (op
);
815 if (! allows_reg
&& !MEM_P (op
))
816 error ("output number %d not directly addressable", i
);
817 if ((! allows_mem
&& MEM_P (op
))
818 || GET_CODE (op
) == CONCAT
)
820 real_output_rtx
[i
] = op
;
821 op
= gen_reg_rtx (GET_MODE (op
));
823 emit_move_insn (op
, real_output_rtx
[i
]);
828 op
= assign_temp (type
, 0, 1);
829 op
= validize_mem (op
);
830 if (!MEM_P (op
) && TREE_CODE (TREE_VALUE (tail
)) == SSA_NAME
)
831 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (TREE_VALUE (tail
)), op
);
832 TREE_VALUE (tail
) = make_tree (type
, op
);
836 generating_concat_p
= old_generating_concat_p
;
840 inout_mode
[ninout
] = TYPE_MODE (type
);
841 inout_opnum
[ninout
++] = i
;
844 if (tree_conflicts_with_clobbers_p (val
, &clobbered_regs
))
845 clobber_conflict_found
= 1;
848 /* Make vectors for the expression-rtx, constraint strings,
849 and named operands. */
851 argvec
= rtvec_alloc (ninputs
);
852 constraintvec
= rtvec_alloc (ninputs
);
853 labelvec
= rtvec_alloc (nlabels
);
855 body
= gen_rtx_ASM_OPERANDS ((noutputs
== 0 ? VOIDmode
856 : GET_MODE (output_rtx
[0])),
857 ggc_strdup (TREE_STRING_POINTER (string
)),
858 empty_string
, 0, argvec
, constraintvec
,
861 MEM_VOLATILE_P (body
) = vol
;
863 /* Eval the inputs and put them into ARGVEC.
864 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
866 for (i
= 0, tail
= inputs
; tail
; tail
= TREE_CHAIN (tail
), ++i
)
868 bool allows_reg
, allows_mem
;
869 const char *constraint
;
874 constraint
= constraints
[i
+ noutputs
];
875 ok
= parse_input_constraint (&constraint
, i
, ninputs
, noutputs
, ninout
,
876 constraints
, &allows_mem
, &allows_reg
);
879 generating_concat_p
= 0;
881 val
= TREE_VALUE (tail
);
882 type
= TREE_TYPE (val
);
883 /* EXPAND_INITIALIZER will not generate code for valid initializer
884 constants, but will still generate code for other types of operand.
885 This is the behavior we want for constant constraints. */
886 op
= expand_expr (val
, NULL_RTX
, VOIDmode
,
887 allows_reg
? EXPAND_NORMAL
888 : allows_mem
? EXPAND_MEMORY
889 : EXPAND_INITIALIZER
);
891 /* Never pass a CONCAT to an ASM. */
892 if (GET_CODE (op
) == CONCAT
)
893 op
= force_reg (GET_MODE (op
), op
);
895 op
= validize_mem (op
);
897 if (asm_operand_ok (op
, constraint
, NULL
) <= 0)
899 if (allows_reg
&& TYPE_MODE (type
) != BLKmode
)
900 op
= force_reg (TYPE_MODE (type
), op
);
901 else if (!allows_mem
)
902 warning (0, "asm operand %d probably doesn%'t match constraints",
906 /* We won't recognize either volatile memory or memory
907 with a queued address as available a memory_operand
908 at this point. Ignore it: clearly this *is* a memory. */
914 generating_concat_p
= old_generating_concat_p
;
915 ASM_OPERANDS_INPUT (body
, i
) = op
;
917 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body
, i
)
918 = gen_rtx_ASM_INPUT (TYPE_MODE (type
),
919 ggc_strdup (constraints
[i
+ noutputs
]));
921 if (tree_conflicts_with_clobbers_p (val
, &clobbered_regs
))
922 clobber_conflict_found
= 1;
925 /* Protect all the operands from the queue now that they have all been
928 generating_concat_p
= 0;
930 /* For in-out operands, copy output rtx to input rtx. */
931 for (i
= 0; i
< ninout
; i
++)
933 int j
= inout_opnum
[i
];
936 ASM_OPERANDS_INPUT (body
, ninputs
- ninout
+ i
)
939 sprintf (buffer
, "%d", j
);
940 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body
, ninputs
- ninout
+ i
)
941 = gen_rtx_ASM_INPUT (inout_mode
[i
], ggc_strdup (buffer
));
944 /* Copy labels to the vector. */
945 for (i
= 0, tail
= labels
; i
< nlabels
; ++i
, tail
= TREE_CHAIN (tail
))
946 ASM_OPERANDS_LABEL (body
, i
)
947 = gen_rtx_LABEL_REF (Pmode
, label_rtx (TREE_VALUE (tail
)));
949 generating_concat_p
= old_generating_concat_p
;
951 /* Now, for each output, construct an rtx
952 (set OUTPUT (asm_operands INSN OUTPUTCONSTRAINT OUTPUTNUMBER
953 ARGVEC CONSTRAINTS OPNAMES))
954 If there is more than one, put them inside a PARALLEL. */
956 if (nlabels
> 0 && nclobbers
== 0)
958 gcc_assert (noutputs
== 0);
959 emit_jump_insn (body
);
961 else if (noutputs
== 0 && nclobbers
== 0)
963 /* No output operands: put in a raw ASM_OPERANDS rtx. */
966 else if (noutputs
== 1 && nclobbers
== 0)
968 ASM_OPERANDS_OUTPUT_CONSTRAINT (body
) = ggc_strdup (constraints
[0]);
969 emit_insn (gen_rtx_SET (VOIDmode
, output_rtx
[0], body
));
979 body
= gen_rtx_PARALLEL (VOIDmode
, rtvec_alloc (num
+ nclobbers
));
981 /* For each output operand, store a SET. */
982 for (i
= 0, tail
= outputs
; tail
; tail
= TREE_CHAIN (tail
), i
++)
985 = gen_rtx_SET (VOIDmode
,
988 (GET_MODE (output_rtx
[i
]),
989 ggc_strdup (TREE_STRING_POINTER (string
)),
990 ggc_strdup (constraints
[i
]),
991 i
, argvec
, constraintvec
, labelvec
, locus
));
993 MEM_VOLATILE_P (SET_SRC (XVECEXP (body
, 0, i
))) = vol
;
996 /* If there are no outputs (but there are some clobbers)
997 store the bare ASM_OPERANDS into the PARALLEL. */
1000 XVECEXP (body
, 0, i
++) = obody
;
1002 /* Store (clobber REG) for each clobbered register specified. */
1004 for (tail
= clobbers
; tail
; tail
= TREE_CHAIN (tail
))
1006 const char *regname
= TREE_STRING_POINTER (TREE_VALUE (tail
));
1008 int j
= decode_reg_name_and_count (regname
, &nregs
);
1013 if (j
== -3) /* `cc', which is not a register */
1016 if (j
== -4) /* `memory', don't cache memory across asm */
1018 XVECEXP (body
, 0, i
++)
1019 = gen_rtx_CLOBBER (VOIDmode
,
1022 gen_rtx_SCRATCH (VOIDmode
)));
1026 /* Ignore unknown register, error already signaled. */
1030 for (reg
= j
; reg
< j
+ nregs
; reg
++)
1032 /* Use QImode since that's guaranteed to clobber just
1034 clobbered_reg
= gen_rtx_REG (QImode
, reg
);
1036 /* Do sanity check for overlap between clobbers and
1037 respectively input and outputs that hasn't been
1038 handled. Such overlap should have been detected and
1040 if (!clobber_conflict_found
)
1044 /* We test the old body (obody) contents to avoid
1045 tripping over the under-construction body. */
1046 for (opno
= 0; opno
< noutputs
; opno
++)
1047 if (reg_overlap_mentioned_p (clobbered_reg
,
1050 ("asm clobber conflict with output operand");
1052 for (opno
= 0; opno
< ninputs
- ninout
; opno
++)
1053 if (reg_overlap_mentioned_p (clobbered_reg
,
1054 ASM_OPERANDS_INPUT (obody
,
1057 ("asm clobber conflict with input operand");
1060 XVECEXP (body
, 0, i
++)
1061 = gen_rtx_CLOBBER (VOIDmode
, clobbered_reg
);
1066 emit_jump_insn (body
);
1071 /* For any outputs that needed reloading into registers, spill them
1072 back to where they belong. */
1073 for (i
= 0; i
< noutputs
; ++i
)
1074 if (real_output_rtx
[i
])
1075 emit_move_insn (real_output_rtx
[i
], output_rtx
[i
]);
1077 crtl
->has_asm_statement
= 1;
1082 expand_asm_stmt (gimple stmt
)
1085 tree outputs
, tail
, t
;
1089 tree str
, out
, in
, cl
, labels
;
1090 location_t locus
= gimple_location (stmt
);
1092 /* Meh... convert the gimple asm operands into real tree lists.
1093 Eventually we should make all routines work on the vectors instead
1094 of relying on TREE_CHAIN. */
1096 n
= gimple_asm_noutputs (stmt
);
1099 t
= out
= gimple_asm_output_op (stmt
, 0);
1100 for (i
= 1; i
< n
; i
++)
1101 t
= TREE_CHAIN (t
) = gimple_asm_output_op (stmt
, i
);
1105 n
= gimple_asm_ninputs (stmt
);
1108 t
= in
= gimple_asm_input_op (stmt
, 0);
1109 for (i
= 1; i
< n
; i
++)
1110 t
= TREE_CHAIN (t
) = gimple_asm_input_op (stmt
, i
);
1114 n
= gimple_asm_nclobbers (stmt
);
1117 t
= cl
= gimple_asm_clobber_op (stmt
, 0);
1118 for (i
= 1; i
< n
; i
++)
1119 t
= TREE_CHAIN (t
) = gimple_asm_clobber_op (stmt
, i
);
1123 n
= gimple_asm_nlabels (stmt
);
1126 t
= labels
= gimple_asm_label_op (stmt
, 0);
1127 for (i
= 1; i
< n
; i
++)
1128 t
= TREE_CHAIN (t
) = gimple_asm_label_op (stmt
, i
);
1131 s
= gimple_asm_string (stmt
);
1132 str
= build_string (strlen (s
), s
);
1134 if (gimple_asm_input_p (stmt
))
1136 expand_asm_loc (str
, gimple_asm_volatile_p (stmt
), locus
);
1141 noutputs
= gimple_asm_noutputs (stmt
);
1142 /* o[I] is the place that output number I should be written. */
1143 o
= (tree
*) alloca (noutputs
* sizeof (tree
));
1145 /* Record the contents of OUTPUTS before it is modified. */
1146 for (i
= 0, tail
= outputs
; tail
; tail
= TREE_CHAIN (tail
), i
++)
1147 o
[i
] = TREE_VALUE (tail
);
1149 /* Generate the ASM_OPERANDS insn; store into the TREE_VALUEs of
1150 OUTPUTS some trees for where the values were actually stored. */
1151 expand_asm_operands (str
, outputs
, in
, cl
, labels
,
1152 gimple_asm_volatile_p (stmt
), locus
);
1154 /* Copy all the intermediate outputs into the specified outputs. */
1155 for (i
= 0, tail
= outputs
; tail
; tail
= TREE_CHAIN (tail
), i
++)
1157 if (o
[i
] != TREE_VALUE (tail
))
1159 expand_assignment (o
[i
], TREE_VALUE (tail
), false);
1162 /* Restore the original value so that it's correct the next
1163 time we expand this function. */
1164 TREE_VALUE (tail
) = o
[i
];
1169 /* A subroutine of expand_asm_operands. Check that all operands have
1170 the same number of alternatives. Return true if so. */
1173 check_operand_nalternatives (tree outputs
, tree inputs
)
1175 if (outputs
|| inputs
)
1177 tree tmp
= TREE_PURPOSE (outputs
? outputs
: inputs
);
1179 = n_occurrences (',', TREE_STRING_POINTER (TREE_VALUE (tmp
)));
1182 if (nalternatives
+ 1 > MAX_RECOG_ALTERNATIVES
)
1184 error ("too many alternatives in %<asm%>");
1191 const char *constraint
1192 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (tmp
)));
1194 if (n_occurrences (',', constraint
) != nalternatives
)
1196 error ("operand constraints for %<asm%> differ "
1197 "in number of alternatives");
1201 if (TREE_CHAIN (tmp
))
1202 tmp
= TREE_CHAIN (tmp
);
1204 tmp
= next
, next
= 0;
1211 /* A subroutine of expand_asm_operands. Check that all operand names
1212 are unique. Return true if so. We rely on the fact that these names
1213 are identifiers, and so have been canonicalized by get_identifier,
1214 so all we need are pointer comparisons. */
1217 check_unique_operand_names (tree outputs
, tree inputs
, tree labels
)
1219 tree i
, j
, i_name
= NULL_TREE
;
1221 for (i
= outputs
; i
; i
= TREE_CHAIN (i
))
1223 i_name
= TREE_PURPOSE (TREE_PURPOSE (i
));
1227 for (j
= TREE_CHAIN (i
); j
; j
= TREE_CHAIN (j
))
1228 if (simple_cst_equal (i_name
, TREE_PURPOSE (TREE_PURPOSE (j
))))
1232 for (i
= inputs
; i
; i
= TREE_CHAIN (i
))
1234 i_name
= TREE_PURPOSE (TREE_PURPOSE (i
));
1238 for (j
= TREE_CHAIN (i
); j
; j
= TREE_CHAIN (j
))
1239 if (simple_cst_equal (i_name
, TREE_PURPOSE (TREE_PURPOSE (j
))))
1241 for (j
= outputs
; j
; j
= TREE_CHAIN (j
))
1242 if (simple_cst_equal (i_name
, TREE_PURPOSE (TREE_PURPOSE (j
))))
1246 for (i
= labels
; i
; i
= TREE_CHAIN (i
))
1248 i_name
= TREE_PURPOSE (i
);
1252 for (j
= TREE_CHAIN (i
); j
; j
= TREE_CHAIN (j
))
1253 if (simple_cst_equal (i_name
, TREE_PURPOSE (j
)))
1255 for (j
= inputs
; j
; j
= TREE_CHAIN (j
))
1256 if (simple_cst_equal (i_name
, TREE_PURPOSE (TREE_PURPOSE (j
))))
1263 error ("duplicate asm operand name %qs", TREE_STRING_POINTER (i_name
));
1267 /* A subroutine of expand_asm_operands. Resolve the names of the operands
1268 in *POUTPUTS and *PINPUTS to numbers, and replace the name expansions in
1269 STRING and in the constraints to those numbers. */
1272 resolve_asm_operand_names (tree string
, tree outputs
, tree inputs
, tree labels
)
1279 check_unique_operand_names (outputs
, inputs
, labels
);
1281 /* Substitute [<name>] in input constraint strings. There should be no
1282 named operands in output constraints. */
1283 for (t
= inputs
; t
; t
= TREE_CHAIN (t
))
1285 c
= TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t
)));
1286 if (strchr (c
, '[') != NULL
)
1288 p
= buffer
= xstrdup (c
);
1289 while ((p
= strchr (p
, '[')) != NULL
)
1290 p
= resolve_operand_name_1 (p
, outputs
, inputs
, NULL
);
1291 TREE_VALUE (TREE_PURPOSE (t
))
1292 = build_string (strlen (buffer
), buffer
);
1297 /* Now check for any needed substitutions in the template. */
1298 c
= TREE_STRING_POINTER (string
);
1299 while ((c
= strchr (c
, '%')) != NULL
)
1303 else if (ISALPHA (c
[1]) && c
[2] == '[')
1307 c
+= 1 + (c
[1] == '%');
1314 /* OK, we need to make a copy so we can perform the substitutions.
1315 Assume that we will not need extra space--we get to remove '['
1316 and ']', which means we cannot have a problem until we have more
1317 than 999 operands. */
1318 buffer
= xstrdup (TREE_STRING_POINTER (string
));
1319 p
= buffer
+ (c
- TREE_STRING_POINTER (string
));
1321 while ((p
= strchr (p
, '%')) != NULL
)
1325 else if (ISALPHA (p
[1]) && p
[2] == '[')
1329 p
+= 1 + (p
[1] == '%');
1333 p
= resolve_operand_name_1 (p
, outputs
, inputs
, labels
);
1336 string
= build_string (strlen (buffer
), buffer
);
1343 /* A subroutine of resolve_operand_names. P points to the '[' for a
1344 potential named operand of the form [<name>]. In place, replace
1345 the name and brackets with a number. Return a pointer to the
1346 balance of the string after substitution. */
1349 resolve_operand_name_1 (char *p
, tree outputs
, tree inputs
, tree labels
)
1355 /* Collect the operand name. */
1356 q
= strchr (++p
, ']');
1359 error ("missing close brace for named operand");
1360 return strchr (p
, '\0');
1364 /* Resolve the name to a number. */
1365 for (op
= 0, t
= outputs
; t
; t
= TREE_CHAIN (t
), op
++)
1367 tree name
= TREE_PURPOSE (TREE_PURPOSE (t
));
1368 if (name
&& strcmp (TREE_STRING_POINTER (name
), p
) == 0)
1371 for (t
= inputs
; t
; t
= TREE_CHAIN (t
), op
++)
1373 tree name
= TREE_PURPOSE (TREE_PURPOSE (t
));
1374 if (name
&& strcmp (TREE_STRING_POINTER (name
), p
) == 0)
1377 for (t
= labels
; t
; t
= TREE_CHAIN (t
), op
++)
1379 tree name
= TREE_PURPOSE (t
);
1380 if (name
&& strcmp (TREE_STRING_POINTER (name
), p
) == 0)
1384 error ("undefined named operand %qs", identifier_to_locale (p
));
1388 /* Replace the name with the number. Unfortunately, not all libraries
1389 get the return value of sprintf correct, so search for the end of the
1390 generated string by hand. */
1391 sprintf (--p
, "%d", op
);
1392 p
= strchr (p
, '\0');
1394 /* Verify the no extra buffer space assumption. */
1395 gcc_assert (p
<= q
);
1397 /* Shift the rest of the buffer down to fill the gap. */
1398 memmove (p
, q
+ 1, strlen (q
+ 1) + 1);
1403 /* Generate RTL to return from the current function, with no value.
1404 (That is, we do not do anything about returning any value.) */
1407 expand_null_return (void)
1409 /* If this function was declared to return a value, but we
1410 didn't, clobber the return registers so that they are not
1411 propagated live to the rest of the function. */
1412 clobber_return_register ();
1414 expand_null_return_1 ();
1417 /* Generate RTL to return directly from the current function.
1418 (That is, we bypass any return value.) */
1421 expand_naked_return (void)
1425 clear_pending_stack_adjust ();
1426 do_pending_stack_adjust ();
1428 end_label
= naked_return_label
;
1430 end_label
= naked_return_label
= gen_label_rtx ();
1432 emit_jump (end_label
);
1435 /* Generate RTL to return from the current function, with value VAL. */
1438 expand_value_return (rtx val
)
1440 /* Copy the value to the return location unless it's already there. */
1442 tree decl
= DECL_RESULT (current_function_decl
);
1443 rtx return_reg
= DECL_RTL (decl
);
1444 if (return_reg
!= val
)
1446 tree funtype
= TREE_TYPE (current_function_decl
);
1447 tree type
= TREE_TYPE (decl
);
1448 int unsignedp
= TYPE_UNSIGNED (type
);
1449 enum machine_mode old_mode
= DECL_MODE (decl
);
1450 enum machine_mode mode
;
1451 if (DECL_BY_REFERENCE (decl
))
1452 mode
= promote_function_mode (type
, old_mode
, &unsignedp
, funtype
, 2);
1454 mode
= promote_function_mode (type
, old_mode
, &unsignedp
, funtype
, 1);
1456 if (mode
!= old_mode
)
1457 val
= convert_modes (mode
, old_mode
, val
, unsignedp
);
1459 if (GET_CODE (return_reg
) == PARALLEL
)
1460 emit_group_load (return_reg
, val
, type
, int_size_in_bytes (type
));
1462 emit_move_insn (return_reg
, val
);
1465 expand_null_return_1 ();
1468 /* Output a return with no value. */
1471 expand_null_return_1 (void)
1473 clear_pending_stack_adjust ();
1474 do_pending_stack_adjust ();
1475 emit_jump (return_label
);
1478 /* Generate RTL to evaluate the expression RETVAL and return it
1479 from the current function. */
1482 expand_return (tree retval
)
1488 /* If function wants no value, give it none. */
1489 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl
))) == VOID_TYPE
)
1491 expand_normal (retval
);
1492 expand_null_return ();
1496 if (retval
== error_mark_node
)
1498 /* Treat this like a return of no value from a function that
1500 expand_null_return ();
1503 else if ((TREE_CODE (retval
) == MODIFY_EXPR
1504 || TREE_CODE (retval
) == INIT_EXPR
)
1505 && TREE_CODE (TREE_OPERAND (retval
, 0)) == RESULT_DECL
)
1506 retval_rhs
= TREE_OPERAND (retval
, 1);
1508 retval_rhs
= retval
;
1510 result_rtl
= DECL_RTL (DECL_RESULT (current_function_decl
));
1512 /* If we are returning the RESULT_DECL, then the value has already
1513 been stored into it, so we don't have to do anything special. */
1514 if (TREE_CODE (retval_rhs
) == RESULT_DECL
)
1515 expand_value_return (result_rtl
);
1517 /* If the result is an aggregate that is being returned in one (or more)
1518 registers, load the registers here. */
1520 else if (retval_rhs
!= 0
1521 && TYPE_MODE (TREE_TYPE (retval_rhs
)) == BLKmode
1522 && REG_P (result_rtl
))
1524 val
= copy_blkmode_to_reg (GET_MODE (result_rtl
), retval_rhs
);
1527 /* Use the mode of the result value on the return register. */
1528 PUT_MODE (result_rtl
, GET_MODE (val
));
1529 expand_value_return (val
);
1532 expand_null_return ();
1534 else if (retval_rhs
!= 0
1535 && !VOID_TYPE_P (TREE_TYPE (retval_rhs
))
1536 && (REG_P (result_rtl
)
1537 || (GET_CODE (result_rtl
) == PARALLEL
)))
1539 /* Calculate the return value into a temporary (usually a pseudo
1541 tree ot
= TREE_TYPE (DECL_RESULT (current_function_decl
));
1542 tree nt
= build_qualified_type (ot
, TYPE_QUALS (ot
) | TYPE_QUAL_CONST
);
1544 val
= assign_temp (nt
, 0, 1);
1545 val
= expand_expr (retval_rhs
, val
, GET_MODE (val
), EXPAND_NORMAL
);
1546 val
= force_not_mem (val
);
1547 /* Return the calculated value. */
1548 expand_value_return (val
);
1552 /* No hard reg used; calculate value into hard return reg. */
1553 expand_expr (retval
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
1554 expand_value_return (result_rtl
);
1558 /* Emit code to restore vital registers at the beginning of a nonlocal goto
1561 expand_nl_goto_receiver (void)
1565 /* Clobber the FP when we get here, so we have to make sure it's
1566 marked as used by this function. */
1567 emit_use (hard_frame_pointer_rtx
);
1569 /* Mark the static chain as clobbered here so life information
1570 doesn't get messed up for it. */
1571 chain
= targetm
.calls
.static_chain (current_function_decl
, true);
1572 if (chain
&& REG_P (chain
))
1573 emit_clobber (chain
);
1575 #ifdef HAVE_nonlocal_goto
1576 if (! HAVE_nonlocal_goto
)
1578 /* First adjust our frame pointer to its actual value. It was
1579 previously set to the start of the virtual area corresponding to
1580 the stacked variables when we branched here and now needs to be
1581 adjusted to the actual hardware fp value.
1583 Assignments are to virtual registers are converted by
1584 instantiate_virtual_regs into the corresponding assignment
1585 to the underlying register (fp in this case) that makes
1586 the original assignment true.
1587 So the following insn will actually be
1588 decrementing fp by STARTING_FRAME_OFFSET. */
1589 emit_move_insn (virtual_stack_vars_rtx
, hard_frame_pointer_rtx
);
1591 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
1592 if (fixed_regs
[ARG_POINTER_REGNUM
])
1594 #ifdef ELIMINABLE_REGS
1595 /* If the argument pointer can be eliminated in favor of the
1596 frame pointer, we don't need to restore it. We assume here
1597 that if such an elimination is present, it can always be used.
1598 This is the case on all known machines; if we don't make this
1599 assumption, we do unnecessary saving on many machines. */
1600 static const struct elims
{const int from
, to
;} elim_regs
[] = ELIMINABLE_REGS
;
1603 for (i
= 0; i
< ARRAY_SIZE (elim_regs
); i
++)
1604 if (elim_regs
[i
].from
== ARG_POINTER_REGNUM
1605 && elim_regs
[i
].to
== HARD_FRAME_POINTER_REGNUM
)
1608 if (i
== ARRAY_SIZE (elim_regs
))
1611 /* Now restore our arg pointer from the address at which it
1612 was saved in our stack frame. */
1613 emit_move_insn (crtl
->args
.internal_arg_pointer
,
1614 copy_to_reg (get_arg_pointer_save_area ()));
1619 #ifdef HAVE_nonlocal_goto_receiver
1620 if (HAVE_nonlocal_goto_receiver
)
1621 emit_insn (gen_nonlocal_goto_receiver ());
1624 /* We must not allow the code we just generated to be reordered by
1625 scheduling. Specifically, the update of the frame pointer must
1626 happen immediately, not later. */
1627 emit_insn (gen_blockage ());
1630 /* Emit code to save the current value of stack. */
1632 expand_stack_save (void)
1636 do_pending_stack_adjust ();
1637 emit_stack_save (SAVE_BLOCK
, &ret
);
1641 /* Emit code to restore the current value of stack. */
1643 expand_stack_restore (tree var
)
1645 rtx prev
, sa
= expand_normal (var
);
1647 sa
= convert_memory_address (Pmode
, sa
);
1649 prev
= get_last_insn ();
1650 emit_stack_restore (SAVE_BLOCK
, sa
);
1651 fixup_args_size_notes (prev
, get_last_insn (), 0);
1654 /* Do the insertion of a case label into case_list. The labels are
1655 fed to us in descending order from the sorted vector of case labels used
1656 in the tree part of the middle end. So the list we construct is
1657 sorted in ascending order. The bounds on the case range, LOW and HIGH,
1658 are converted to case's index type TYPE. Note that the original type
1659 of the case index in the source code is usually "lost" during
1660 gimplification due to type promotion, but the case labels retain the
1663 static struct case_node
*
1664 add_case_node (struct case_node
*head
, tree type
, tree low
, tree high
,
1665 tree label
, alloc_pool case_node_pool
)
1667 struct case_node
*r
;
1669 gcc_checking_assert (low
);
1670 gcc_checking_assert (! high
|| (TREE_TYPE (low
) == TREE_TYPE (high
)));
1672 /* Add this label to the chain. Make sure to drop overflow flags. */
1673 r
= (struct case_node
*) pool_alloc (case_node_pool
);
1674 r
->low
= build_int_cst_wide (type
, TREE_INT_CST_LOW (low
),
1675 TREE_INT_CST_HIGH (low
));
1676 r
->high
= build_int_cst_wide (type
, TREE_INT_CST_LOW (high
),
1677 TREE_INT_CST_HIGH (high
));
1678 r
->code_label
= label
;
1679 r
->parent
= r
->left
= NULL
;
1684 /* Dump ROOT, a list or tree of case nodes, to file. */
1687 dump_case_nodes (FILE *f
, struct case_node
*root
,
1688 int indent_step
, int indent_level
)
1690 HOST_WIDE_INT low
, high
;
1696 dump_case_nodes (f
, root
->left
, indent_step
, indent_level
);
1698 low
= tree_low_cst (root
->low
, 0);
1699 high
= tree_low_cst (root
->high
, 0);
1703 fprintf(f
, "%*s" HOST_WIDE_INT_PRINT_DEC
,
1704 indent_step
* indent_level
, "", low
);
1706 fprintf(f
, "%*s" HOST_WIDE_INT_PRINT_DEC
" ... " HOST_WIDE_INT_PRINT_DEC
,
1707 indent_step
* indent_level
, "", low
, high
);
1710 dump_case_nodes (f
, root
->right
, indent_step
, indent_level
);
1714 #define HAVE_casesi 0
1717 #ifndef HAVE_tablejump
1718 #define HAVE_tablejump 0
1721 /* Return the smallest number of different values for which it is best to use a
1722 jump-table instead of a tree of conditional branches. */
1725 case_values_threshold (void)
1727 unsigned int threshold
= PARAM_VALUE (PARAM_CASE_VALUES_THRESHOLD
);
1730 threshold
= targetm
.case_values_threshold ();
1735 /* Return true if a switch should be expanded as a decision tree.
1736 RANGE is the difference between highest and lowest case.
1737 UNIQ is number of unique case node targets, not counting the default case.
1738 COUNT is the number of comparisons needed, not counting the default case. */
1741 expand_switch_as_decision_tree_p (tree range
,
1742 unsigned int uniq ATTRIBUTE_UNUSED
,
1747 /* If neither casesi or tablejump is available, or flag_jump_tables
1748 over-ruled us, we really have no choice. */
1749 if (!HAVE_casesi
&& !HAVE_tablejump
)
1751 if (!flag_jump_tables
)
1754 /* If the switch is relatively small such that the cost of one
1755 indirect jump on the target are higher than the cost of a
1756 decision tree, go with the decision tree.
1758 If range of values is much bigger than number of values,
1759 or if it is too large to represent in a HOST_WIDE_INT,
1760 make a sequence of conditional branches instead of a dispatch.
1762 The definition of "much bigger" depends on whether we are
1763 optimizing for size or for speed. If the former, the maximum
1764 ratio range/count = 3, because this was found to be the optimal
1765 ratio for size on i686-pc-linux-gnu, see PR11823. The ratio
1766 10 is much older, and was probably selected after an extensive
1767 benchmarking investigation on numerous platforms. Or maybe it
1768 just made sense to someone at some point in the history of GCC,
1770 max_ratio
= optimize_insn_for_size_p () ? 3 : 10;
1771 if (count
< case_values_threshold ()
1772 || ! host_integerp (range
, /*pos=*/1)
1773 || compare_tree_int (range
, max_ratio
* count
) > 0)
1779 /* Generate a decision tree, switching on INDEX_EXPR and jumping to
1780 one of the labels in CASE_LIST or to the DEFAULT_LABEL.
1782 We generate a binary decision tree to select the appropriate target
1783 code. This is done as follows:
1785 If the index is a short or char that we do not have
1786 an insn to handle comparisons directly, convert it to
1787 a full integer now, rather than letting each comparison
1788 generate the conversion.
1790 Load the index into a register.
1792 The list of cases is rearranged into a binary tree,
1793 nearly optimal assuming equal probability for each case.
1795 The tree is transformed into RTL, eliminating redundant
1796 test conditions at the same time.
1798 If program flow could reach the end of the decision tree
1799 an unconditional jump to the default code is emitted.
1801 The above process is unaware of the CFG. The caller has to fix up
1802 the CFG itself. This is done in cfgexpand.c. */
1805 emit_case_decision_tree (tree index_expr
, tree index_type
,
1806 struct case_node
*case_list
, rtx default_label
)
1808 rtx index
= expand_normal (index_expr
);
1810 if (GET_MODE_CLASS (GET_MODE (index
)) == MODE_INT
1811 && ! have_insn_for (COMPARE
, GET_MODE (index
)))
1813 int unsignedp
= TYPE_UNSIGNED (index_type
);
1814 enum machine_mode wider_mode
;
1815 for (wider_mode
= GET_MODE (index
); wider_mode
!= VOIDmode
;
1816 wider_mode
= GET_MODE_WIDER_MODE (wider_mode
))
1817 if (have_insn_for (COMPARE
, wider_mode
))
1819 index
= convert_to_mode (wider_mode
, index
, unsignedp
);
1824 do_pending_stack_adjust ();
1828 index
= copy_to_reg (index
);
1829 if (TREE_CODE (index_expr
) == SSA_NAME
)
1830 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (index_expr
), index
);
1833 balance_case_nodes (&case_list
, NULL
);
1835 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1837 int indent_step
= ceil_log2 (TYPE_PRECISION (index_type
)) + 2;
1838 fprintf (dump_file
, ";; Expanding GIMPLE switch as decision tree:\n");
1839 dump_case_nodes (dump_file
, case_list
, indent_step
, 0);
1842 emit_case_nodes (index
, case_list
, default_label
, index_type
);
1844 emit_jump (default_label
);
1847 /* Generate a dispatch tabler, switching on INDEX_EXPR and jumping to
1848 one of the labels in CASE_LIST or to the DEFAULT_LABEL.
1849 MINVAL, MAXVAL, and RANGE are the extrema and range of the case
1850 labels in CASE_LIST.
1852 First, a jump insn is emitted. First we try "casesi". If that
1853 fails, try "tablejump". A target *must* have one of them (or both).
1855 Then, a table with the target labels is emitted.
1857 The process is unaware of the CFG. The caller has to fix up
1858 the CFG itself. This is done in cfgexpand.c. */
1861 emit_case_dispatch_table (tree index_expr
, tree index_type
,
1862 struct case_node
*case_list
, rtx default_label
,
1863 tree minval
, tree maxval
, tree range
)
1866 struct case_node
*n
;
1868 rtx fallback_label
= label_rtx (case_list
->code_label
);
1869 rtx table_label
= gen_label_rtx ();
1871 if (! try_casesi (index_type
, index_expr
, minval
, range
,
1872 table_label
, default_label
, fallback_label
))
1876 /* Index jumptables from zero for suitable values of minval to avoid
1877 a subtraction. For the rationale see:
1878 "http://gcc.gnu.org/ml/gcc-patches/2001-10/msg01234.html". */
1879 if (optimize_insn_for_speed_p ()
1880 && compare_tree_int (minval
, 0) > 0
1881 && compare_tree_int (minval
, 3) < 0)
1883 minval
= build_int_cst (index_type
, 0);
1887 ok
= try_tablejump (index_type
, index_expr
, minval
, range
,
1888 table_label
, default_label
);
1892 /* Get table of labels to jump to, in order of case index. */
1894 ncases
= tree_low_cst (range
, 0) + 1;
1895 labelvec
= XALLOCAVEC (rtx
, ncases
);
1896 memset (labelvec
, 0, ncases
* sizeof (rtx
));
1898 for (n
= case_list
; n
; n
= n
->right
)
1900 /* Compute the low and high bounds relative to the minimum
1901 value since that should fit in a HOST_WIDE_INT while the
1902 actual values may not. */
1904 = tree_low_cst (fold_build2 (MINUS_EXPR
, index_type
,
1905 n
->low
, minval
), 1);
1906 HOST_WIDE_INT i_high
1907 = tree_low_cst (fold_build2 (MINUS_EXPR
, index_type
,
1908 n
->high
, minval
), 1);
1911 for (i
= i_low
; i
<= i_high
; i
++)
1913 = gen_rtx_LABEL_REF (Pmode
, label_rtx (n
->code_label
));
1916 /* Fill in the gaps with the default. We may have gaps at
1917 the beginning if we tried to avoid the minval subtraction,
1918 so substitute some label even if the default label was
1919 deemed unreachable. */
1921 default_label
= fallback_label
;
1922 for (i
= 0; i
< ncases
; i
++)
1923 if (labelvec
[i
] == 0)
1924 labelvec
[i
] = gen_rtx_LABEL_REF (Pmode
, default_label
);
1926 /* Output the table. */
1927 emit_label (table_label
);
1929 if (CASE_VECTOR_PC_RELATIVE
|| flag_pic
)
1930 emit_jump_insn (gen_rtx_ADDR_DIFF_VEC (CASE_VECTOR_MODE
,
1931 gen_rtx_LABEL_REF (Pmode
, table_label
),
1932 gen_rtvec_v (ncases
, labelvec
),
1933 const0_rtx
, const0_rtx
));
1935 emit_jump_insn (gen_rtx_ADDR_VEC (CASE_VECTOR_MODE
,
1936 gen_rtvec_v (ncases
, labelvec
)));
1938 /* Record no drop-through after the table. */
1942 /* Terminate a case (Pascal/Ada) or switch (C) statement
1943 in which ORIG_INDEX is the expression to be tested.
1944 If ORIG_TYPE is not NULL, it is the original ORIG_INDEX
1945 type as given in the source before any compiler conversions.
1946 Generate the code to test it and jump to the right place. */
1949 expand_case (gimple stmt
)
1951 tree minval
= NULL_TREE
, maxval
= NULL_TREE
, range
= NULL_TREE
;
1952 rtx default_label
= NULL_RTX
;
1953 unsigned int count
, uniq
;
1955 rtx before_case
, end
;
1956 int ncases
= gimple_switch_num_labels (stmt
);
1957 tree index_expr
= gimple_switch_index (stmt
);
1958 tree index_type
= TREE_TYPE (index_expr
);
1961 bitmap label_bitmap
;
1963 /* The insn after which the case dispatch should finally
1964 be emitted. Zero for a dummy. */
1967 /* A list of case labels; it is first built as a list and it may then
1968 be rearranged into a nearly balanced binary tree. */
1969 struct case_node
*case_list
= 0;
1971 /* A pool for case nodes. */
1972 alloc_pool case_node_pool
;
1974 /* An ERROR_MARK occurs for various reasons including invalid data type.
1975 ??? Can this still happen, with GIMPLE and all? */
1976 if (index_type
== error_mark_node
)
1979 /* cleanup_tree_cfg removes all SWITCH_EXPR with their index
1980 expressions being INTEGER_CST. */
1981 gcc_assert (TREE_CODE (index_expr
) != INTEGER_CST
);
1983 case_node_pool
= create_alloc_pool ("struct case_node pool",
1984 sizeof (struct case_node
),
1987 do_pending_stack_adjust ();
1989 /* The default case, if ever taken, is the first element. */
1990 elt
= gimple_switch_label (stmt
, 0);
1991 if (!CASE_LOW (elt
) && !CASE_HIGH (elt
))
1993 default_label
= label_rtx (CASE_LABEL (elt
));
1997 /* Get upper and lower bounds of case values. */
1998 elt
= gimple_switch_label (stmt
, stopi
);
1999 minval
= fold_convert (index_type
, CASE_LOW (elt
));
2000 elt
= gimple_switch_label (stmt
, ncases
- 1);
2001 if (CASE_HIGH (elt
))
2002 maxval
= fold_convert (index_type
, CASE_HIGH (elt
));
2004 maxval
= fold_convert (index_type
, CASE_LOW (elt
));
2006 /* Compute span of values. */
2007 range
= fold_build2 (MINUS_EXPR
, index_type
, maxval
, minval
);
2009 /* Listify the labels queue and gather some numbers to decide
2010 how to expand this switch(). */
2013 label_bitmap
= BITMAP_ALLOC (NULL
);
2014 for (i
= gimple_switch_num_labels (stmt
) - 1; i
>= stopi
; --i
)
2019 elt
= gimple_switch_label (stmt
, i
);
2020 low
= CASE_LOW (elt
);
2022 high
= CASE_HIGH (elt
);
2023 gcc_assert (! high
|| tree_int_cst_lt (low
, high
));
2025 /* Count the elements.
2026 A range counts double, since it requires two compares. */
2031 /* If we have not seen this label yet, then increase the
2032 number of unique case node targets seen. */
2033 lab
= label_rtx (CASE_LABEL (elt
));
2034 if (bitmap_set_bit (label_bitmap
, CODE_LABEL_NUMBER (lab
)))
2037 /* The canonical from of a case label in GIMPLE is that a simple case
2038 has an empty CASE_HIGH. For the casesi and tablejump expanders,
2039 the back ends want simple cases to have high == low. */
2043 case_list
= add_case_node (case_list
, index_type
, low
, high
,
2044 CASE_LABEL (elt
), case_node_pool
);
2046 BITMAP_FREE (label_bitmap
);
2048 /* cleanup_tree_cfg removes all SWITCH_EXPR with a single
2049 destination, such as one with a default case only.
2050 It also removes cases that are out of range for the switch
2051 type, so we should never get a zero here. */
2052 gcc_assert (count
> 0);
2054 before_case
= start
= get_last_insn ();
2056 /* Decide how to expand this switch.
2057 The two options at this point are a dispatch table (casesi or
2058 tablejump) or a decision tree. */
2060 if (expand_switch_as_decision_tree_p (range
, uniq
, count
))
2061 emit_case_decision_tree (index_expr
, index_type
,
2062 case_list
, default_label
);
2064 emit_case_dispatch_table (index_expr
, index_type
,
2065 case_list
, default_label
,
2066 minval
, maxval
, range
);
2068 before_case
= NEXT_INSN (before_case
);
2069 end
= get_last_insn ();
2070 reorder_insns (before_case
, end
, start
);
2073 free_alloc_pool (case_node_pool
);
2076 /* Generate code to jump to LABEL if OP0 and OP1 are equal in mode MODE. */
2079 do_jump_if_equal (enum machine_mode mode
, rtx op0
, rtx op1
, rtx label
,
2082 do_compare_rtx_and_jump (op0
, op1
, EQ
, unsignedp
, mode
,
2083 NULL_RTX
, NULL_RTX
, label
, -1);
2086 /* Take an ordered list of case nodes
2087 and transform them into a near optimal binary tree,
2088 on the assumption that any target code selection value is as
2089 likely as any other.
2091 The transformation is performed by splitting the ordered
2092 list into two equal sections plus a pivot. The parts are
2093 then attached to the pivot as left and right branches. Each
2094 branch is then transformed recursively. */
2097 balance_case_nodes (case_node_ptr
*head
, case_node_ptr parent
)
2109 /* Count the number of entries on branch. Also count the ranges. */
2113 if (!tree_int_cst_equal (np
->low
, np
->high
))
2122 /* Split this list if it is long enough for that to help. */
2126 /* If there are just three nodes, split at the middle one. */
2128 npp
= &(*npp
)->right
;
2131 /* Find the place in the list that bisects the list's total cost,
2132 where ranges count as 2.
2133 Here I gets half the total cost. */
2134 i
= (i
+ ranges
+ 1) / 2;
2137 /* Skip nodes while their cost does not reach that amount. */
2138 if (!tree_int_cst_equal ((*npp
)->low
, (*npp
)->high
))
2143 npp
= &(*npp
)->right
;
2148 np
->parent
= parent
;
2151 /* Optimize each of the two split parts. */
2152 balance_case_nodes (&np
->left
, np
);
2153 balance_case_nodes (&np
->right
, np
);
2157 /* Else leave this branch as one level,
2158 but fill in `parent' fields. */
2160 np
->parent
= parent
;
2161 for (; np
->right
; np
= np
->right
)
2162 np
->right
->parent
= np
;
2167 /* Search the parent sections of the case node tree
2168 to see if a test for the lower bound of NODE would be redundant.
2169 INDEX_TYPE is the type of the index expression.
2171 The instructions to generate the case decision tree are
2172 output in the same order as nodes are processed so it is
2173 known that if a parent node checks the range of the current
2174 node minus one that the current node is bounded at its lower
2175 span. Thus the test would be redundant. */
2178 node_has_low_bound (case_node_ptr node
, tree index_type
)
2181 case_node_ptr pnode
;
2183 /* If the lower bound of this node is the lowest value in the index type,
2184 we need not test it. */
2186 if (tree_int_cst_equal (node
->low
, TYPE_MIN_VALUE (index_type
)))
2189 /* If this node has a left branch, the value at the left must be less
2190 than that at this node, so it cannot be bounded at the bottom and
2191 we need not bother testing any further. */
2196 low_minus_one
= fold_build2 (MINUS_EXPR
, TREE_TYPE (node
->low
),
2198 build_int_cst (TREE_TYPE (node
->low
), 1));
2200 /* If the subtraction above overflowed, we can't verify anything.
2201 Otherwise, look for a parent that tests our value - 1. */
2203 if (! tree_int_cst_lt (low_minus_one
, node
->low
))
2206 for (pnode
= node
->parent
; pnode
; pnode
= pnode
->parent
)
2207 if (tree_int_cst_equal (low_minus_one
, pnode
->high
))
2213 /* Search the parent sections of the case node tree
2214 to see if a test for the upper bound of NODE would be redundant.
2215 INDEX_TYPE is the type of the index expression.
2217 The instructions to generate the case decision tree are
2218 output in the same order as nodes are processed so it is
2219 known that if a parent node checks the range of the current
2220 node plus one that the current node is bounded at its upper
2221 span. Thus the test would be redundant. */
2224 node_has_high_bound (case_node_ptr node
, tree index_type
)
2227 case_node_ptr pnode
;
2229 /* If there is no upper bound, obviously no test is needed. */
2231 if (TYPE_MAX_VALUE (index_type
) == NULL
)
2234 /* If the upper bound of this node is the highest value in the type
2235 of the index expression, we need not test against it. */
2237 if (tree_int_cst_equal (node
->high
, TYPE_MAX_VALUE (index_type
)))
2240 /* If this node has a right branch, the value at the right must be greater
2241 than that at this node, so it cannot be bounded at the top and
2242 we need not bother testing any further. */
2247 high_plus_one
= fold_build2 (PLUS_EXPR
, TREE_TYPE (node
->high
),
2249 build_int_cst (TREE_TYPE (node
->high
), 1));
2251 /* If the addition above overflowed, we can't verify anything.
2252 Otherwise, look for a parent that tests our value + 1. */
2254 if (! tree_int_cst_lt (node
->high
, high_plus_one
))
2257 for (pnode
= node
->parent
; pnode
; pnode
= pnode
->parent
)
2258 if (tree_int_cst_equal (high_plus_one
, pnode
->low
))
2264 /* Search the parent sections of the
2265 case node tree to see if both tests for the upper and lower
2266 bounds of NODE would be redundant. */
2269 node_is_bounded (case_node_ptr node
, tree index_type
)
2271 return (node_has_low_bound (node
, index_type
)
2272 && node_has_high_bound (node
, index_type
));
2275 /* Emit step-by-step code to select a case for the value of INDEX.
2276 The thus generated decision tree follows the form of the
2277 case-node binary tree NODE, whose nodes represent test conditions.
2278 INDEX_TYPE is the type of the index of the switch.
2280 Care is taken to prune redundant tests from the decision tree
2281 by detecting any boundary conditions already checked by
2282 emitted rtx. (See node_has_high_bound, node_has_low_bound
2283 and node_is_bounded, above.)
2285 Where the test conditions can be shown to be redundant we emit
2286 an unconditional jump to the target code. As a further
2287 optimization, the subordinates of a tree node are examined to
2288 check for bounded nodes. In this case conditional and/or
2289 unconditional jumps as a result of the boundary check for the
2290 current node are arranged to target the subordinates associated
2291 code for out of bound conditions on the current node.
2293 We can assume that when control reaches the code generated here,
2294 the index value has already been compared with the parents
2295 of this node, and determined to be on the same side of each parent
2296 as this node is. Thus, if this node tests for the value 51,
2297 and a parent tested for 52, we don't need to consider
2298 the possibility of a value greater than 51. If another parent
2299 tests for the value 50, then this node need not test anything. */
2302 emit_case_nodes (rtx index
, case_node_ptr node
, rtx default_label
,
2305 /* If INDEX has an unsigned type, we must make unsigned branches. */
2306 int unsignedp
= TYPE_UNSIGNED (index_type
);
2307 enum machine_mode mode
= GET_MODE (index
);
2308 enum machine_mode imode
= TYPE_MODE (index_type
);
2310 /* Handle indices detected as constant during RTL expansion. */
2311 if (mode
== VOIDmode
)
2314 /* See if our parents have already tested everything for us.
2315 If they have, emit an unconditional jump for this node. */
2316 if (node_is_bounded (node
, index_type
))
2317 emit_jump (label_rtx (node
->code_label
));
2319 else if (tree_int_cst_equal (node
->low
, node
->high
))
2321 /* Node is single valued. First see if the index expression matches
2322 this node and then check our children, if any. */
2324 do_jump_if_equal (mode
, index
,
2325 convert_modes (mode
, imode
,
2326 expand_normal (node
->low
),
2328 label_rtx (node
->code_label
), unsignedp
);
2330 if (node
->right
!= 0 && node
->left
!= 0)
2332 /* This node has children on both sides.
2333 Dispatch to one side or the other
2334 by comparing the index value with this node's value.
2335 If one subtree is bounded, check that one first,
2336 so we can avoid real branches in the tree. */
2338 if (node_is_bounded (node
->right
, index_type
))
2340 emit_cmp_and_jump_insns (index
,
2343 expand_normal (node
->high
),
2345 GT
, NULL_RTX
, mode
, unsignedp
,
2346 label_rtx (node
->right
->code_label
));
2347 emit_case_nodes (index
, node
->left
, default_label
, index_type
);
2350 else if (node_is_bounded (node
->left
, index_type
))
2352 emit_cmp_and_jump_insns (index
,
2355 expand_normal (node
->high
),
2357 LT
, NULL_RTX
, mode
, unsignedp
,
2358 label_rtx (node
->left
->code_label
));
2359 emit_case_nodes (index
, node
->right
, default_label
, index_type
);
2362 /* If both children are single-valued cases with no
2363 children, finish up all the work. This way, we can save
2364 one ordered comparison. */
2365 else if (tree_int_cst_equal (node
->right
->low
, node
->right
->high
)
2366 && node
->right
->left
== 0
2367 && node
->right
->right
== 0
2368 && tree_int_cst_equal (node
->left
->low
, node
->left
->high
)
2369 && node
->left
->left
== 0
2370 && node
->left
->right
== 0)
2372 /* Neither node is bounded. First distinguish the two sides;
2373 then emit the code for one side at a time. */
2375 /* See if the value matches what the right hand side
2377 do_jump_if_equal (mode
, index
,
2378 convert_modes (mode
, imode
,
2379 expand_normal (node
->right
->low
),
2381 label_rtx (node
->right
->code_label
),
2384 /* See if the value matches what the left hand side
2386 do_jump_if_equal (mode
, index
,
2387 convert_modes (mode
, imode
,
2388 expand_normal (node
->left
->low
),
2390 label_rtx (node
->left
->code_label
),
2396 /* Neither node is bounded. First distinguish the two sides;
2397 then emit the code for one side at a time. */
2400 = build_decl (CURR_INSN_LOCATION
,
2401 LABEL_DECL
, NULL_TREE
, NULL_TREE
);
2403 /* See if the value is on the right. */
2404 emit_cmp_and_jump_insns (index
,
2407 expand_normal (node
->high
),
2409 GT
, NULL_RTX
, mode
, unsignedp
,
2410 label_rtx (test_label
));
2412 /* Value must be on the left.
2413 Handle the left-hand subtree. */
2414 emit_case_nodes (index
, node
->left
, default_label
, index_type
);
2415 /* If left-hand subtree does nothing,
2418 emit_jump (default_label
);
2420 /* Code branches here for the right-hand subtree. */
2421 expand_label (test_label
);
2422 emit_case_nodes (index
, node
->right
, default_label
, index_type
);
2426 else if (node
->right
!= 0 && node
->left
== 0)
2428 /* Here we have a right child but no left so we issue a conditional
2429 branch to default and process the right child.
2431 Omit the conditional branch to default if the right child
2432 does not have any children and is single valued; it would
2433 cost too much space to save so little time. */
2435 if (node
->right
->right
|| node
->right
->left
2436 || !tree_int_cst_equal (node
->right
->low
, node
->right
->high
))
2438 if (!node_has_low_bound (node
, index_type
))
2440 emit_cmp_and_jump_insns (index
,
2443 expand_normal (node
->high
),
2445 LT
, NULL_RTX
, mode
, unsignedp
,
2449 emit_case_nodes (index
, node
->right
, default_label
, index_type
);
2452 /* We cannot process node->right normally
2453 since we haven't ruled out the numbers less than
2454 this node's value. So handle node->right explicitly. */
2455 do_jump_if_equal (mode
, index
,
2458 expand_normal (node
->right
->low
),
2460 label_rtx (node
->right
->code_label
), unsignedp
);
2463 else if (node
->right
== 0 && node
->left
!= 0)
2465 /* Just one subtree, on the left. */
2466 if (node
->left
->left
|| node
->left
->right
2467 || !tree_int_cst_equal (node
->left
->low
, node
->left
->high
))
2469 if (!node_has_high_bound (node
, index_type
))
2471 emit_cmp_and_jump_insns (index
,
2474 expand_normal (node
->high
),
2476 GT
, NULL_RTX
, mode
, unsignedp
,
2480 emit_case_nodes (index
, node
->left
, default_label
, index_type
);
2483 /* We cannot process node->left normally
2484 since we haven't ruled out the numbers less than
2485 this node's value. So handle node->left explicitly. */
2486 do_jump_if_equal (mode
, index
,
2489 expand_normal (node
->left
->low
),
2491 label_rtx (node
->left
->code_label
), unsignedp
);
2496 /* Node is a range. These cases are very similar to those for a single
2497 value, except that we do not start by testing whether this node
2498 is the one to branch to. */
2500 if (node
->right
!= 0 && node
->left
!= 0)
2502 /* Node has subtrees on both sides.
2503 If the right-hand subtree is bounded,
2504 test for it first, since we can go straight there.
2505 Otherwise, we need to make a branch in the control structure,
2506 then handle the two subtrees. */
2507 tree test_label
= 0;
2509 if (node_is_bounded (node
->right
, index_type
))
2510 /* Right hand node is fully bounded so we can eliminate any
2511 testing and branch directly to the target code. */
2512 emit_cmp_and_jump_insns (index
,
2515 expand_normal (node
->high
),
2517 GT
, NULL_RTX
, mode
, unsignedp
,
2518 label_rtx (node
->right
->code_label
));
2521 /* Right hand node requires testing.
2522 Branch to a label where we will handle it later. */
2524 test_label
= build_decl (CURR_INSN_LOCATION
,
2525 LABEL_DECL
, NULL_TREE
, NULL_TREE
);
2526 emit_cmp_and_jump_insns (index
,
2529 expand_normal (node
->high
),
2531 GT
, NULL_RTX
, mode
, unsignedp
,
2532 label_rtx (test_label
));
2535 /* Value belongs to this node or to the left-hand subtree. */
2537 emit_cmp_and_jump_insns (index
,
2540 expand_normal (node
->low
),
2542 GE
, NULL_RTX
, mode
, unsignedp
,
2543 label_rtx (node
->code_label
));
2545 /* Handle the left-hand subtree. */
2546 emit_case_nodes (index
, node
->left
, default_label
, index_type
);
2548 /* If right node had to be handled later, do that now. */
2552 /* If the left-hand subtree fell through,
2553 don't let it fall into the right-hand subtree. */
2555 emit_jump (default_label
);
2557 expand_label (test_label
);
2558 emit_case_nodes (index
, node
->right
, default_label
, index_type
);
2562 else if (node
->right
!= 0 && node
->left
== 0)
2564 /* Deal with values to the left of this node,
2565 if they are possible. */
2566 if (!node_has_low_bound (node
, index_type
))
2568 emit_cmp_and_jump_insns (index
,
2571 expand_normal (node
->low
),
2573 LT
, NULL_RTX
, mode
, unsignedp
,
2577 /* Value belongs to this node or to the right-hand subtree. */
2579 emit_cmp_and_jump_insns (index
,
2582 expand_normal (node
->high
),
2584 LE
, NULL_RTX
, mode
, unsignedp
,
2585 label_rtx (node
->code_label
));
2587 emit_case_nodes (index
, node
->right
, default_label
, index_type
);
2590 else if (node
->right
== 0 && node
->left
!= 0)
2592 /* Deal with values to the right of this node,
2593 if they are possible. */
2594 if (!node_has_high_bound (node
, index_type
))
2596 emit_cmp_and_jump_insns (index
,
2599 expand_normal (node
->high
),
2601 GT
, NULL_RTX
, mode
, unsignedp
,
2605 /* Value belongs to this node or to the left-hand subtree. */
2607 emit_cmp_and_jump_insns (index
,
2610 expand_normal (node
->low
),
2612 GE
, NULL_RTX
, mode
, unsignedp
,
2613 label_rtx (node
->code_label
));
2615 emit_case_nodes (index
, node
->left
, default_label
, index_type
);
2620 /* Node has no children so we check low and high bounds to remove
2621 redundant tests. Only one of the bounds can exist,
2622 since otherwise this node is bounded--a case tested already. */
2623 int high_bound
= node_has_high_bound (node
, index_type
);
2624 int low_bound
= node_has_low_bound (node
, index_type
);
2626 if (!high_bound
&& low_bound
)
2628 emit_cmp_and_jump_insns (index
,
2631 expand_normal (node
->high
),
2633 GT
, NULL_RTX
, mode
, unsignedp
,
2637 else if (!low_bound
&& high_bound
)
2639 emit_cmp_and_jump_insns (index
,
2642 expand_normal (node
->low
),
2644 LT
, NULL_RTX
, mode
, unsignedp
,
2647 else if (!low_bound
&& !high_bound
)
2649 /* Widen LOW and HIGH to the same width as INDEX. */
2650 tree type
= lang_hooks
.types
.type_for_mode (mode
, unsignedp
);
2651 tree low
= build1 (CONVERT_EXPR
, type
, node
->low
);
2652 tree high
= build1 (CONVERT_EXPR
, type
, node
->high
);
2653 rtx low_rtx
, new_index
, new_bound
;
2655 /* Instead of doing two branches, emit one unsigned branch for
2656 (index-low) > (high-low). */
2657 low_rtx
= expand_expr (low
, NULL_RTX
, mode
, EXPAND_NORMAL
);
2658 new_index
= expand_simple_binop (mode
, MINUS
, index
, low_rtx
,
2659 NULL_RTX
, unsignedp
,
2661 new_bound
= expand_expr (fold_build2 (MINUS_EXPR
, type
,
2663 NULL_RTX
, mode
, EXPAND_NORMAL
);
2665 emit_cmp_and_jump_insns (new_index
, new_bound
, GT
, NULL_RTX
,
2666 mode
, 1, default_label
);
2669 emit_jump (label_rtx (node
->code_label
));