1 /* Expands front end tree to back end RTL for GCC
2 Copyright (C) 1987-2013 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* This file handles the generation of rtl code from tree structure
21 above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
22 The functions whose names start with `expand_' are called by the
23 expander to generate RTL instructions for various kinds of constructs. */
27 #include "coretypes.h"
31 #include "hard-reg-set.h"
37 #include "insn-config.h"
42 #include "diagnostic-core.h"
45 #include "langhooks.h"
51 #include "alloc-pool.h"
52 #include "pretty-print.h"
53 #include "pointer-set.h"
58 /* Functions and data structures for expanding case statements. */
60 /* Case label structure, used to hold info on labels within case
61 statements. We handle "range" labels; for a single-value label
62 as in C, the high and low limits are the same.
64 We start with a vector of case nodes sorted in ascending order, and
65 the default label as the last element in the vector. Before expanding
66 to RTL, we transform this vector into a list linked via the RIGHT
67 fields in the case_node struct. Nodes with higher case values are
70 Switch statements can be output in three forms. A branch table is
71 used if there are more than a few labels and the labels are dense
72 within the range between the smallest and largest case value. If a
73 branch table is used, no further manipulations are done with the case
76 The alternative to the use of a branch table is to generate a series
77 of compare and jump insns. When that is done, we use the LEFT, RIGHT,
78 and PARENT fields to hold a binary tree. Initially the tree is
79 totally unbalanced, with everything on the right. We balance the tree
80 with nodes on the left having lower case values than the parent
81 and nodes on the right having higher values. We then output the tree
84 For very small, suitable switch statements, we can generate a series
85 of simple bit test and branches instead. */
89 struct case_node
*left
; /* Left son in binary tree */
90 struct case_node
*right
; /* Right son in binary tree; also node chain */
91 struct case_node
*parent
; /* Parent of node in binary tree */
92 tree low
; /* Lowest index value for this label */
93 tree high
; /* Highest index value for this label */
94 tree code_label
; /* Label to jump to when node matches */
95 int prob
; /* Probability of taking this case. */
96 /* Probability of reaching subtree rooted at this node */
100 typedef struct case_node case_node
;
101 typedef struct case_node
*case_node_ptr
;
103 extern basic_block
label_to_block_fn (struct function
*, tree
);
105 static int n_occurrences (int, const char *);
106 static bool tree_conflicts_with_clobbers_p (tree
, HARD_REG_SET
*);
107 static bool check_operand_nalternatives (tree
, tree
);
108 static bool check_unique_operand_names (tree
, tree
, tree
);
109 static char *resolve_operand_name_1 (char *, tree
, tree
, tree
);
110 static void expand_null_return_1 (void);
111 static void expand_value_return (rtx
);
112 static void balance_case_nodes (case_node_ptr
*, case_node_ptr
);
113 static int node_has_low_bound (case_node_ptr
, tree
);
114 static int node_has_high_bound (case_node_ptr
, tree
);
115 static int node_is_bounded (case_node_ptr
, tree
);
116 static void emit_case_nodes (rtx
, case_node_ptr
, rtx
, int, tree
);
118 /* Return the rtx-label that corresponds to a LABEL_DECL,
119 creating it if necessary. */
122 label_rtx (tree label
)
124 gcc_assert (TREE_CODE (label
) == LABEL_DECL
);
126 if (!DECL_RTL_SET_P (label
))
128 rtx r
= gen_label_rtx ();
129 SET_DECL_RTL (label
, r
);
130 if (FORCED_LABEL (label
) || DECL_NONLOCAL (label
))
131 LABEL_PRESERVE_P (r
) = 1;
134 return DECL_RTL (label
);
137 /* As above, but also put it on the forced-reference list of the
138 function that contains it. */
140 force_label_rtx (tree label
)
142 rtx ref
= label_rtx (label
);
143 tree function
= decl_function_context (label
);
145 gcc_assert (function
);
147 forced_labels
= gen_rtx_EXPR_LIST (VOIDmode
, ref
, forced_labels
);
151 /* Add an unconditional jump to LABEL as the next sequential instruction. */
154 emit_jump (rtx label
)
156 do_pending_stack_adjust ();
157 emit_jump_insn (gen_jump (label
));
161 /* Emit code to jump to the address
162 specified by the pointer expression EXP. */
165 expand_computed_goto (tree exp
)
167 rtx x
= expand_normal (exp
);
169 x
= convert_memory_address (Pmode
, x
);
171 do_pending_stack_adjust ();
172 emit_indirect_jump (x
);
175 /* Handle goto statements and the labels that they can go to. */
177 /* Specify the location in the RTL code of a label LABEL,
178 which is a LABEL_DECL tree node.
180 This is used for the kind of label that the user can jump to with a
181 goto statement, and for alternatives of a switch or case statement.
182 RTL labels generated for loops and conditionals don't go through here;
183 they are generated directly at the RTL level, by other functions below.
185 Note that this has nothing to do with defining label *names*.
186 Languages vary in how they do that and what that even means. */
189 expand_label (tree label
)
191 rtx label_r
= label_rtx (label
);
193 do_pending_stack_adjust ();
194 emit_label (label_r
);
195 if (DECL_NAME (label
))
196 LABEL_NAME (DECL_RTL (label
)) = IDENTIFIER_POINTER (DECL_NAME (label
));
198 if (DECL_NONLOCAL (label
))
200 expand_builtin_setjmp_receiver (NULL
);
201 nonlocal_goto_handler_labels
202 = gen_rtx_EXPR_LIST (VOIDmode
, label_r
,
203 nonlocal_goto_handler_labels
);
206 if (FORCED_LABEL (label
))
207 forced_labels
= gen_rtx_EXPR_LIST (VOIDmode
, label_r
, forced_labels
);
209 if (DECL_NONLOCAL (label
) || FORCED_LABEL (label
))
210 maybe_set_first_label_num (label_r
);
213 /* Generate RTL code for a `goto' statement with target label LABEL.
214 LABEL should be a LABEL_DECL tree node that was or will later be
215 defined with `expand_label'. */
218 expand_goto (tree label
)
220 #ifdef ENABLE_CHECKING
221 /* Check for a nonlocal goto to a containing function. Should have
222 gotten translated to __builtin_nonlocal_goto. */
223 tree context
= decl_function_context (label
);
224 gcc_assert (!context
|| context
== current_function_decl
);
227 emit_jump (label_rtx (label
));
230 /* Return the number of times character C occurs in string S. */
232 n_occurrences (int c
, const char *s
)
240 /* Generate RTL for an asm statement (explicit assembler code).
241 STRING is a STRING_CST node containing the assembler code text,
242 or an ADDR_EXPR containing a STRING_CST. VOL nonzero means the
243 insn is volatile; don't optimize it. */
246 expand_asm_loc (tree string
, int vol
, location_t locus
)
250 if (TREE_CODE (string
) == ADDR_EXPR
)
251 string
= TREE_OPERAND (string
, 0);
253 body
= gen_rtx_ASM_INPUT_loc (VOIDmode
,
254 ggc_strdup (TREE_STRING_POINTER (string
)),
257 MEM_VOLATILE_P (body
) = vol
;
262 /* Parse the output constraint pointed to by *CONSTRAINT_P. It is the
263 OPERAND_NUMth output operand, indexed from zero. There are NINPUTS
264 inputs and NOUTPUTS outputs to this extended-asm. Upon return,
265 *ALLOWS_MEM will be TRUE iff the constraint allows the use of a
266 memory operand. Similarly, *ALLOWS_REG will be TRUE iff the
267 constraint allows the use of a register operand. And, *IS_INOUT
268 will be true if the operand is read-write, i.e., if it is used as
269 an input as well as an output. If *CONSTRAINT_P is not in
270 canonical form, it will be made canonical. (Note that `+' will be
271 replaced with `=' as part of this process.)
273 Returns TRUE if all went well; FALSE if an error occurred. */
276 parse_output_constraint (const char **constraint_p
, int operand_num
,
277 int ninputs
, int noutputs
, bool *allows_mem
,
278 bool *allows_reg
, bool *is_inout
)
280 const char *constraint
= *constraint_p
;
283 /* Assume the constraint doesn't allow the use of either a register
288 /* Allow the `=' or `+' to not be at the beginning of the string,
289 since it wasn't explicitly documented that way, and there is a
290 large body of code that puts it last. Swap the character to
291 the front, so as not to uglify any place else. */
292 p
= strchr (constraint
, '=');
294 p
= strchr (constraint
, '+');
296 /* If the string doesn't contain an `=', issue an error
300 error ("output operand constraint lacks %<=%>");
304 /* If the constraint begins with `+', then the operand is both read
305 from and written to. */
306 *is_inout
= (*p
== '+');
308 /* Canonicalize the output constraint so that it begins with `='. */
309 if (p
!= constraint
|| *is_inout
)
312 size_t c_len
= strlen (constraint
);
315 warning (0, "output constraint %qc for operand %d "
316 "is not at the beginning",
319 /* Make a copy of the constraint. */
320 buf
= XALLOCAVEC (char, c_len
+ 1);
321 strcpy (buf
, constraint
);
322 /* Swap the first character and the `=' or `+'. */
323 buf
[p
- constraint
] = buf
[0];
324 /* Make sure the first character is an `='. (Until we do this,
325 it might be a `+'.) */
327 /* Replace the constraint with the canonicalized string. */
328 *constraint_p
= ggc_alloc_string (buf
, c_len
);
329 constraint
= *constraint_p
;
332 /* Loop through the constraint string. */
333 for (p
= constraint
+ 1; *p
; p
+= CONSTRAINT_LEN (*p
, p
))
338 error ("operand constraint contains incorrectly positioned "
343 if (operand_num
+ 1 == ninputs
+ noutputs
)
345 error ("%<%%%> constraint used with last operand");
350 case 'V': case TARGET_MEM_CONSTRAINT
: case 'o':
354 case '?': case '!': case '*': case '&': case '#':
355 case 'E': case 'F': case 'G': case 'H':
356 case 's': case 'i': case 'n':
357 case 'I': case 'J': case 'K': case 'L': case 'M':
358 case 'N': case 'O': case 'P': case ',':
361 case '0': case '1': case '2': case '3': case '4':
362 case '5': case '6': case '7': case '8': case '9':
364 error ("matching constraint not valid in output operand");
368 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
369 excepting those that expand_call created. So match memory
386 if (REG_CLASS_FROM_CONSTRAINT (*p
, p
) != NO_REGS
)
388 #ifdef EXTRA_CONSTRAINT_STR
389 else if (EXTRA_ADDRESS_CONSTRAINT (*p
, p
))
391 else if (EXTRA_MEMORY_CONSTRAINT (*p
, p
))
395 /* Otherwise we can't assume anything about the nature of
396 the constraint except that it isn't purely registers.
397 Treat it like "g" and hope for the best. */
408 /* Similar, but for input constraints. */
411 parse_input_constraint (const char **constraint_p
, int input_num
,
412 int ninputs
, int noutputs
, int ninout
,
413 const char * const * constraints
,
414 bool *allows_mem
, bool *allows_reg
)
416 const char *constraint
= *constraint_p
;
417 const char *orig_constraint
= constraint
;
418 size_t c_len
= strlen (constraint
);
420 bool saw_match
= false;
422 /* Assume the constraint doesn't allow the use of either
423 a register or memory. */
427 /* Make sure constraint has neither `=', `+', nor '&'. */
429 for (j
= 0; j
< c_len
; j
+= CONSTRAINT_LEN (constraint
[j
], constraint
+j
))
430 switch (constraint
[j
])
432 case '+': case '=': case '&':
433 if (constraint
== orig_constraint
)
435 error ("input operand constraint contains %qc", constraint
[j
]);
441 if (constraint
== orig_constraint
442 && input_num
+ 1 == ninputs
- ninout
)
444 error ("%<%%%> constraint used with last operand");
449 case 'V': case TARGET_MEM_CONSTRAINT
: case 'o':
454 case '?': case '!': case '*': case '#':
455 case 'E': case 'F': case 'G': case 'H':
456 case 's': case 'i': case 'n':
457 case 'I': case 'J': case 'K': case 'L': case 'M':
458 case 'N': case 'O': case 'P': case ',':
461 /* Whether or not a numeric constraint allows a register is
462 decided by the matching constraint, and so there is no need
463 to do anything special with them. We must handle them in
464 the default case, so that we don't unnecessarily force
465 operands to memory. */
466 case '0': case '1': case '2': case '3': case '4':
467 case '5': case '6': case '7': case '8': case '9':
474 match
= strtoul (constraint
+ j
, &end
, 10);
475 if (match
>= (unsigned long) noutputs
)
477 error ("matching constraint references invalid operand number");
481 /* Try and find the real constraint for this dup. Only do this
482 if the matching constraint is the only alternative. */
484 && (j
== 0 || (j
== 1 && constraint
[0] == '%')))
486 constraint
= constraints
[match
];
487 *constraint_p
= constraint
;
488 c_len
= strlen (constraint
);
490 /* ??? At the end of the loop, we will skip the first part of
491 the matched constraint. This assumes not only that the
492 other constraint is an output constraint, but also that
493 the '=' or '+' come first. */
497 j
= end
- constraint
;
498 /* Anticipate increment at end of loop. */
513 if (! ISALPHA (constraint
[j
]))
515 error ("invalid punctuation %qc in constraint", constraint
[j
]);
518 if (REG_CLASS_FROM_CONSTRAINT (constraint
[j
], constraint
+ j
)
521 #ifdef EXTRA_CONSTRAINT_STR
522 else if (EXTRA_ADDRESS_CONSTRAINT (constraint
[j
], constraint
+ j
))
524 else if (EXTRA_MEMORY_CONSTRAINT (constraint
[j
], constraint
+ j
))
528 /* Otherwise we can't assume anything about the nature of
529 the constraint except that it isn't purely registers.
530 Treat it like "g" and hope for the best. */
538 if (saw_match
&& !*allows_reg
)
539 warning (0, "matching constraint does not allow a register");
544 /* Return DECL iff there's an overlap between *REGS and DECL, where DECL
545 can be an asm-declared register. Called via walk_tree. */
548 decl_overlaps_hard_reg_set_p (tree
*declp
, int *walk_subtrees ATTRIBUTE_UNUSED
,
552 const HARD_REG_SET
*const regs
= (const HARD_REG_SET
*) data
;
554 if (TREE_CODE (decl
) == VAR_DECL
)
556 if (DECL_HARD_REGISTER (decl
)
557 && REG_P (DECL_RTL (decl
))
558 && REGNO (DECL_RTL (decl
)) < FIRST_PSEUDO_REGISTER
)
560 rtx reg
= DECL_RTL (decl
);
562 if (overlaps_hard_reg_set_p (*regs
, GET_MODE (reg
), REGNO (reg
)))
567 else if (TYPE_P (decl
) || TREE_CODE (decl
) == PARM_DECL
)
572 /* If there is an overlap between *REGS and DECL, return the first overlap
575 tree_overlaps_hard_reg_set (tree decl
, HARD_REG_SET
*regs
)
577 return walk_tree (&decl
, decl_overlaps_hard_reg_set_p
, regs
, NULL
);
580 /* Check for overlap between registers marked in CLOBBERED_REGS and
581 anything inappropriate in T. Emit error and return the register
582 variable definition for error, NULL_TREE for ok. */
585 tree_conflicts_with_clobbers_p (tree t
, HARD_REG_SET
*clobbered_regs
)
587 /* Conflicts between asm-declared register variables and the clobber
588 list are not allowed. */
589 tree overlap
= tree_overlaps_hard_reg_set (t
, clobbered_regs
);
593 error ("asm-specifier for variable %qE conflicts with asm clobber list",
594 DECL_NAME (overlap
));
596 /* Reset registerness to stop multiple errors emitted for a single
598 DECL_REGISTER (overlap
) = 0;
605 /* Generate RTL for an asm statement with arguments.
606 STRING is the instruction template.
607 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
608 Each output or input has an expression in the TREE_VALUE and
609 a tree list in TREE_PURPOSE which in turn contains a constraint
610 name in TREE_VALUE (or NULL_TREE) and a constraint string
612 CLOBBERS is a list of STRING_CST nodes each naming a hard register
613 that is clobbered by this insn.
615 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
616 Some elements of OUTPUTS may be replaced with trees representing temporary
617 values. The caller should copy those temporary values to the originally
620 VOL nonzero means the insn is volatile; don't optimize it. */
623 expand_asm_operands (tree string
, tree outputs
, tree inputs
,
624 tree clobbers
, tree labels
, int vol
, location_t locus
)
626 rtvec argvec
, constraintvec
, labelvec
;
628 int ninputs
= list_length (inputs
);
629 int noutputs
= list_length (outputs
);
630 int nlabels
= list_length (labels
);
633 HARD_REG_SET clobbered_regs
;
634 int clobber_conflict_found
= 0;
638 /* Vector of RTX's of evaluated output operands. */
639 rtx
*output_rtx
= XALLOCAVEC (rtx
, noutputs
);
640 int *inout_opnum
= XALLOCAVEC (int, noutputs
);
641 rtx
*real_output_rtx
= XALLOCAVEC (rtx
, noutputs
);
642 enum machine_mode
*inout_mode
= XALLOCAVEC (enum machine_mode
, noutputs
);
643 const char **constraints
= XALLOCAVEC (const char *, noutputs
+ ninputs
);
644 int old_generating_concat_p
= generating_concat_p
;
646 /* An ASM with no outputs needs to be treated as volatile, for now. */
650 if (! check_operand_nalternatives (outputs
, inputs
))
653 string
= resolve_asm_operand_names (string
, outputs
, inputs
, labels
);
655 /* Collect constraints. */
657 for (t
= outputs
; t
; t
= TREE_CHAIN (t
), i
++)
658 constraints
[i
] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t
)));
659 for (t
= inputs
; t
; t
= TREE_CHAIN (t
), i
++)
660 constraints
[i
] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t
)));
662 /* Sometimes we wish to automatically clobber registers across an asm.
663 Case in point is when the i386 backend moved from cc0 to a hard reg --
664 maintaining source-level compatibility means automatically clobbering
665 the flags register. */
666 clobbers
= targetm
.md_asm_clobbers (outputs
, inputs
, clobbers
);
668 /* Count the number of meaningful clobbered registers, ignoring what
669 we would ignore later. */
671 CLEAR_HARD_REG_SET (clobbered_regs
);
672 for (tail
= clobbers
; tail
; tail
= TREE_CHAIN (tail
))
677 if (TREE_VALUE (tail
) == error_mark_node
)
679 regname
= TREE_STRING_POINTER (TREE_VALUE (tail
));
681 i
= decode_reg_name_and_count (regname
, &nregs
);
685 error ("unknown register name %qs in %<asm%>", regname
);
687 /* Mark clobbered registers. */
692 for (reg
= i
; reg
< i
+ nregs
; reg
++)
696 /* Clobbering the PIC register is an error. */
697 if (reg
== (int) PIC_OFFSET_TABLE_REGNUM
)
699 error ("PIC register clobbered by %qs in %<asm%>", regname
);
703 SET_HARD_REG_BIT (clobbered_regs
, reg
);
708 /* First pass over inputs and outputs checks validity and sets
709 mark_addressable if needed. */
712 for (i
= 0, tail
= outputs
; tail
; tail
= TREE_CHAIN (tail
), i
++)
714 tree val
= TREE_VALUE (tail
);
715 tree type
= TREE_TYPE (val
);
716 const char *constraint
;
721 /* If there's an erroneous arg, emit no insn. */
722 if (type
== error_mark_node
)
725 /* Try to parse the output constraint. If that fails, there's
726 no point in going further. */
727 constraint
= constraints
[i
];
728 if (!parse_output_constraint (&constraint
, i
, ninputs
, noutputs
,
729 &allows_mem
, &allows_reg
, &is_inout
))
736 && REG_P (DECL_RTL (val
))
737 && GET_MODE (DECL_RTL (val
)) != TYPE_MODE (type
))))
738 mark_addressable (val
);
745 if (ninputs
+ noutputs
> MAX_RECOG_OPERANDS
)
747 error ("more than %d operands in %<asm%>", MAX_RECOG_OPERANDS
);
751 for (i
= 0, tail
= inputs
; tail
; i
++, tail
= TREE_CHAIN (tail
))
753 bool allows_reg
, allows_mem
;
754 const char *constraint
;
756 /* If there's an erroneous arg, emit no insn, because the ASM_INPUT
757 would get VOIDmode and that could cause a crash in reload. */
758 if (TREE_TYPE (TREE_VALUE (tail
)) == error_mark_node
)
761 constraint
= constraints
[i
+ noutputs
];
762 if (! parse_input_constraint (&constraint
, i
, ninputs
, noutputs
, ninout
,
763 constraints
, &allows_mem
, &allows_reg
))
766 if (! allows_reg
&& allows_mem
)
767 mark_addressable (TREE_VALUE (tail
));
770 /* Second pass evaluates arguments. */
772 /* Make sure stack is consistent for asm goto. */
774 do_pending_stack_adjust ();
777 for (i
= 0, tail
= outputs
; tail
; tail
= TREE_CHAIN (tail
), i
++)
779 tree val
= TREE_VALUE (tail
);
780 tree type
= TREE_TYPE (val
);
787 ok
= parse_output_constraint (&constraints
[i
], i
, ninputs
,
788 noutputs
, &allows_mem
, &allows_reg
,
792 /* If an output operand is not a decl or indirect ref and our constraint
793 allows a register, make a temporary to act as an intermediate.
794 Make the asm insn write into that, then our caller will copy it to
795 the real output operand. Likewise for promoted variables. */
797 generating_concat_p
= 0;
799 real_output_rtx
[i
] = NULL_RTX
;
800 if ((TREE_CODE (val
) == INDIRECT_REF
803 && (allows_mem
|| REG_P (DECL_RTL (val
)))
804 && ! (REG_P (DECL_RTL (val
))
805 && GET_MODE (DECL_RTL (val
)) != TYPE_MODE (type
)))
809 op
= expand_expr (val
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
811 op
= validize_mem (op
);
813 if (! allows_reg
&& !MEM_P (op
))
814 error ("output number %d not directly addressable", i
);
815 if ((! allows_mem
&& MEM_P (op
))
816 || GET_CODE (op
) == CONCAT
)
818 real_output_rtx
[i
] = op
;
819 op
= gen_reg_rtx (GET_MODE (op
));
821 emit_move_insn (op
, real_output_rtx
[i
]);
826 op
= assign_temp (type
, 0, 1);
827 op
= validize_mem (op
);
828 if (!MEM_P (op
) && TREE_CODE (TREE_VALUE (tail
)) == SSA_NAME
)
829 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (TREE_VALUE (tail
)), op
);
830 TREE_VALUE (tail
) = make_tree (type
, op
);
834 generating_concat_p
= old_generating_concat_p
;
838 inout_mode
[ninout
] = TYPE_MODE (type
);
839 inout_opnum
[ninout
++] = i
;
842 if (tree_conflicts_with_clobbers_p (val
, &clobbered_regs
))
843 clobber_conflict_found
= 1;
846 /* Make vectors for the expression-rtx, constraint strings,
847 and named operands. */
849 argvec
= rtvec_alloc (ninputs
);
850 constraintvec
= rtvec_alloc (ninputs
);
851 labelvec
= rtvec_alloc (nlabels
);
853 body
= gen_rtx_ASM_OPERANDS ((noutputs
== 0 ? VOIDmode
854 : GET_MODE (output_rtx
[0])),
855 ggc_strdup (TREE_STRING_POINTER (string
)),
856 empty_string
, 0, argvec
, constraintvec
,
859 MEM_VOLATILE_P (body
) = vol
;
861 /* Eval the inputs and put them into ARGVEC.
862 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
864 for (i
= 0, tail
= inputs
; tail
; tail
= TREE_CHAIN (tail
), ++i
)
866 bool allows_reg
, allows_mem
;
867 const char *constraint
;
872 constraint
= constraints
[i
+ noutputs
];
873 ok
= parse_input_constraint (&constraint
, i
, ninputs
, noutputs
, ninout
,
874 constraints
, &allows_mem
, &allows_reg
);
877 generating_concat_p
= 0;
879 val
= TREE_VALUE (tail
);
880 type
= TREE_TYPE (val
);
881 /* EXPAND_INITIALIZER will not generate code for valid initializer
882 constants, but will still generate code for other types of operand.
883 This is the behavior we want for constant constraints. */
884 op
= expand_expr (val
, NULL_RTX
, VOIDmode
,
885 allows_reg
? EXPAND_NORMAL
886 : allows_mem
? EXPAND_MEMORY
887 : EXPAND_INITIALIZER
);
889 /* Never pass a CONCAT to an ASM. */
890 if (GET_CODE (op
) == CONCAT
)
891 op
= force_reg (GET_MODE (op
), op
);
893 op
= validize_mem (op
);
895 if (asm_operand_ok (op
, constraint
, NULL
) <= 0)
897 if (allows_reg
&& TYPE_MODE (type
) != BLKmode
)
898 op
= force_reg (TYPE_MODE (type
), op
);
899 else if (!allows_mem
)
900 warning (0, "asm operand %d probably doesn%'t match constraints",
904 /* We won't recognize either volatile memory or memory
905 with a queued address as available a memory_operand
906 at this point. Ignore it: clearly this *is* a memory. */
912 generating_concat_p
= old_generating_concat_p
;
913 ASM_OPERANDS_INPUT (body
, i
) = op
;
915 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body
, i
)
916 = gen_rtx_ASM_INPUT (TYPE_MODE (type
),
917 ggc_strdup (constraints
[i
+ noutputs
]));
919 if (tree_conflicts_with_clobbers_p (val
, &clobbered_regs
))
920 clobber_conflict_found
= 1;
923 /* Protect all the operands from the queue now that they have all been
926 generating_concat_p
= 0;
928 /* For in-out operands, copy output rtx to input rtx. */
929 for (i
= 0; i
< ninout
; i
++)
931 int j
= inout_opnum
[i
];
934 ASM_OPERANDS_INPUT (body
, ninputs
- ninout
+ i
)
937 sprintf (buffer
, "%d", j
);
938 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body
, ninputs
- ninout
+ i
)
939 = gen_rtx_ASM_INPUT (inout_mode
[i
], ggc_strdup (buffer
));
942 /* Copy labels to the vector. */
943 for (i
= 0, tail
= labels
; i
< nlabels
; ++i
, tail
= TREE_CHAIN (tail
))
944 ASM_OPERANDS_LABEL (body
, i
)
945 = gen_rtx_LABEL_REF (Pmode
, label_rtx (TREE_VALUE (tail
)));
947 generating_concat_p
= old_generating_concat_p
;
949 /* Now, for each output, construct an rtx
950 (set OUTPUT (asm_operands INSN OUTPUTCONSTRAINT OUTPUTNUMBER
951 ARGVEC CONSTRAINTS OPNAMES))
952 If there is more than one, put them inside a PARALLEL. */
954 if (nlabels
> 0 && nclobbers
== 0)
956 gcc_assert (noutputs
== 0);
957 emit_jump_insn (body
);
959 else if (noutputs
== 0 && nclobbers
== 0)
961 /* No output operands: put in a raw ASM_OPERANDS rtx. */
964 else if (noutputs
== 1 && nclobbers
== 0)
966 ASM_OPERANDS_OUTPUT_CONSTRAINT (body
) = ggc_strdup (constraints
[0]);
967 emit_insn (gen_rtx_SET (VOIDmode
, output_rtx
[0], body
));
977 body
= gen_rtx_PARALLEL (VOIDmode
, rtvec_alloc (num
+ nclobbers
));
979 /* For each output operand, store a SET. */
980 for (i
= 0, tail
= outputs
; tail
; tail
= TREE_CHAIN (tail
), i
++)
983 = gen_rtx_SET (VOIDmode
,
986 (GET_MODE (output_rtx
[i
]),
987 ggc_strdup (TREE_STRING_POINTER (string
)),
988 ggc_strdup (constraints
[i
]),
989 i
, argvec
, constraintvec
, labelvec
, locus
));
991 MEM_VOLATILE_P (SET_SRC (XVECEXP (body
, 0, i
))) = vol
;
994 /* If there are no outputs (but there are some clobbers)
995 store the bare ASM_OPERANDS into the PARALLEL. */
998 XVECEXP (body
, 0, i
++) = obody
;
1000 /* Store (clobber REG) for each clobbered register specified. */
1002 for (tail
= clobbers
; tail
; tail
= TREE_CHAIN (tail
))
1004 const char *regname
= TREE_STRING_POINTER (TREE_VALUE (tail
));
1006 int j
= decode_reg_name_and_count (regname
, &nregs
);
1011 if (j
== -3) /* `cc', which is not a register */
1014 if (j
== -4) /* `memory', don't cache memory across asm */
1016 XVECEXP (body
, 0, i
++)
1017 = gen_rtx_CLOBBER (VOIDmode
,
1020 gen_rtx_SCRATCH (VOIDmode
)));
1024 /* Ignore unknown register, error already signaled. */
1028 for (reg
= j
; reg
< j
+ nregs
; reg
++)
1030 /* Use QImode since that's guaranteed to clobber just
1032 clobbered_reg
= gen_rtx_REG (QImode
, reg
);
1034 /* Do sanity check for overlap between clobbers and
1035 respectively input and outputs that hasn't been
1036 handled. Such overlap should have been detected and
1038 if (!clobber_conflict_found
)
1042 /* We test the old body (obody) contents to avoid
1043 tripping over the under-construction body. */
1044 for (opno
= 0; opno
< noutputs
; opno
++)
1045 if (reg_overlap_mentioned_p (clobbered_reg
,
1048 ("asm clobber conflict with output operand");
1050 for (opno
= 0; opno
< ninputs
- ninout
; opno
++)
1051 if (reg_overlap_mentioned_p (clobbered_reg
,
1052 ASM_OPERANDS_INPUT (obody
,
1055 ("asm clobber conflict with input operand");
1058 XVECEXP (body
, 0, i
++)
1059 = gen_rtx_CLOBBER (VOIDmode
, clobbered_reg
);
1064 emit_jump_insn (body
);
1069 /* For any outputs that needed reloading into registers, spill them
1070 back to where they belong. */
1071 for (i
= 0; i
< noutputs
; ++i
)
1072 if (real_output_rtx
[i
])
1073 emit_move_insn (real_output_rtx
[i
], output_rtx
[i
]);
1075 crtl
->has_asm_statement
= 1;
1080 expand_asm_stmt (gimple stmt
)
1083 tree outputs
, tail
, t
;
1087 tree str
, out
, in
, cl
, labels
;
1088 location_t locus
= gimple_location (stmt
);
1090 /* Meh... convert the gimple asm operands into real tree lists.
1091 Eventually we should make all routines work on the vectors instead
1092 of relying on TREE_CHAIN. */
1094 n
= gimple_asm_noutputs (stmt
);
1097 t
= out
= gimple_asm_output_op (stmt
, 0);
1098 for (i
= 1; i
< n
; i
++)
1099 t
= TREE_CHAIN (t
) = gimple_asm_output_op (stmt
, i
);
1103 n
= gimple_asm_ninputs (stmt
);
1106 t
= in
= gimple_asm_input_op (stmt
, 0);
1107 for (i
= 1; i
< n
; i
++)
1108 t
= TREE_CHAIN (t
) = gimple_asm_input_op (stmt
, i
);
1112 n
= gimple_asm_nclobbers (stmt
);
1115 t
= cl
= gimple_asm_clobber_op (stmt
, 0);
1116 for (i
= 1; i
< n
; i
++)
1117 t
= TREE_CHAIN (t
) = gimple_asm_clobber_op (stmt
, i
);
1121 n
= gimple_asm_nlabels (stmt
);
1124 t
= labels
= gimple_asm_label_op (stmt
, 0);
1125 for (i
= 1; i
< n
; i
++)
1126 t
= TREE_CHAIN (t
) = gimple_asm_label_op (stmt
, i
);
1129 s
= gimple_asm_string (stmt
);
1130 str
= build_string (strlen (s
), s
);
1132 if (gimple_asm_input_p (stmt
))
1134 expand_asm_loc (str
, gimple_asm_volatile_p (stmt
), locus
);
1139 noutputs
= gimple_asm_noutputs (stmt
);
1140 /* o[I] is the place that output number I should be written. */
1141 o
= (tree
*) alloca (noutputs
* sizeof (tree
));
1143 /* Record the contents of OUTPUTS before it is modified. */
1144 for (i
= 0, tail
= outputs
; tail
; tail
= TREE_CHAIN (tail
), i
++)
1145 o
[i
] = TREE_VALUE (tail
);
1147 /* Generate the ASM_OPERANDS insn; store into the TREE_VALUEs of
1148 OUTPUTS some trees for where the values were actually stored. */
1149 expand_asm_operands (str
, outputs
, in
, cl
, labels
,
1150 gimple_asm_volatile_p (stmt
), locus
);
1152 /* Copy all the intermediate outputs into the specified outputs. */
1153 for (i
= 0, tail
= outputs
; tail
; tail
= TREE_CHAIN (tail
), i
++)
1155 if (o
[i
] != TREE_VALUE (tail
))
1157 expand_assignment (o
[i
], TREE_VALUE (tail
), false);
1160 /* Restore the original value so that it's correct the next
1161 time we expand this function. */
1162 TREE_VALUE (tail
) = o
[i
];
1167 /* A subroutine of expand_asm_operands. Check that all operands have
1168 the same number of alternatives. Return true if so. */
1171 check_operand_nalternatives (tree outputs
, tree inputs
)
1173 if (outputs
|| inputs
)
1175 tree tmp
= TREE_PURPOSE (outputs
? outputs
: inputs
);
1177 = n_occurrences (',', TREE_STRING_POINTER (TREE_VALUE (tmp
)));
1180 if (nalternatives
+ 1 > MAX_RECOG_ALTERNATIVES
)
1182 error ("too many alternatives in %<asm%>");
1189 const char *constraint
1190 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (tmp
)));
1192 if (n_occurrences (',', constraint
) != nalternatives
)
1194 error ("operand constraints for %<asm%> differ "
1195 "in number of alternatives");
1199 if (TREE_CHAIN (tmp
))
1200 tmp
= TREE_CHAIN (tmp
);
1202 tmp
= next
, next
= 0;
1209 /* A subroutine of expand_asm_operands. Check that all operand names
1210 are unique. Return true if so. We rely on the fact that these names
1211 are identifiers, and so have been canonicalized by get_identifier,
1212 so all we need are pointer comparisons. */
1215 check_unique_operand_names (tree outputs
, tree inputs
, tree labels
)
1217 tree i
, j
, i_name
= NULL_TREE
;
1219 for (i
= outputs
; i
; i
= TREE_CHAIN (i
))
1221 i_name
= TREE_PURPOSE (TREE_PURPOSE (i
));
1225 for (j
= TREE_CHAIN (i
); j
; j
= TREE_CHAIN (j
))
1226 if (simple_cst_equal (i_name
, TREE_PURPOSE (TREE_PURPOSE (j
))))
1230 for (i
= inputs
; i
; i
= TREE_CHAIN (i
))
1232 i_name
= TREE_PURPOSE (TREE_PURPOSE (i
));
1236 for (j
= TREE_CHAIN (i
); j
; j
= TREE_CHAIN (j
))
1237 if (simple_cst_equal (i_name
, TREE_PURPOSE (TREE_PURPOSE (j
))))
1239 for (j
= outputs
; j
; j
= TREE_CHAIN (j
))
1240 if (simple_cst_equal (i_name
, TREE_PURPOSE (TREE_PURPOSE (j
))))
1244 for (i
= labels
; i
; i
= TREE_CHAIN (i
))
1246 i_name
= TREE_PURPOSE (i
);
1250 for (j
= TREE_CHAIN (i
); j
; j
= TREE_CHAIN (j
))
1251 if (simple_cst_equal (i_name
, TREE_PURPOSE (j
)))
1253 for (j
= inputs
; j
; j
= TREE_CHAIN (j
))
1254 if (simple_cst_equal (i_name
, TREE_PURPOSE (TREE_PURPOSE (j
))))
1261 error ("duplicate asm operand name %qs", TREE_STRING_POINTER (i_name
));
1265 /* A subroutine of expand_asm_operands. Resolve the names of the operands
1266 in *POUTPUTS and *PINPUTS to numbers, and replace the name expansions in
1267 STRING and in the constraints to those numbers. */
1270 resolve_asm_operand_names (tree string
, tree outputs
, tree inputs
, tree labels
)
1277 check_unique_operand_names (outputs
, inputs
, labels
);
1279 /* Substitute [<name>] in input constraint strings. There should be no
1280 named operands in output constraints. */
1281 for (t
= inputs
; t
; t
= TREE_CHAIN (t
))
1283 c
= TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t
)));
1284 if (strchr (c
, '[') != NULL
)
1286 p
= buffer
= xstrdup (c
);
1287 while ((p
= strchr (p
, '[')) != NULL
)
1288 p
= resolve_operand_name_1 (p
, outputs
, inputs
, NULL
);
1289 TREE_VALUE (TREE_PURPOSE (t
))
1290 = build_string (strlen (buffer
), buffer
);
1295 /* Now check for any needed substitutions in the template. */
1296 c
= TREE_STRING_POINTER (string
);
1297 while ((c
= strchr (c
, '%')) != NULL
)
1301 else if (ISALPHA (c
[1]) && c
[2] == '[')
1305 c
+= 1 + (c
[1] == '%');
1312 /* OK, we need to make a copy so we can perform the substitutions.
1313 Assume that we will not need extra space--we get to remove '['
1314 and ']', which means we cannot have a problem until we have more
1315 than 999 operands. */
1316 buffer
= xstrdup (TREE_STRING_POINTER (string
));
1317 p
= buffer
+ (c
- TREE_STRING_POINTER (string
));
1319 while ((p
= strchr (p
, '%')) != NULL
)
1323 else if (ISALPHA (p
[1]) && p
[2] == '[')
1327 p
+= 1 + (p
[1] == '%');
1331 p
= resolve_operand_name_1 (p
, outputs
, inputs
, labels
);
1334 string
= build_string (strlen (buffer
), buffer
);
1341 /* A subroutine of resolve_operand_names. P points to the '[' for a
1342 potential named operand of the form [<name>]. In place, replace
1343 the name and brackets with a number. Return a pointer to the
1344 balance of the string after substitution. */
1347 resolve_operand_name_1 (char *p
, tree outputs
, tree inputs
, tree labels
)
1353 /* Collect the operand name. */
1354 q
= strchr (++p
, ']');
1357 error ("missing close brace for named operand");
1358 return strchr (p
, '\0');
1362 /* Resolve the name to a number. */
1363 for (op
= 0, t
= outputs
; t
; t
= TREE_CHAIN (t
), op
++)
1365 tree name
= TREE_PURPOSE (TREE_PURPOSE (t
));
1366 if (name
&& strcmp (TREE_STRING_POINTER (name
), p
) == 0)
1369 for (t
= inputs
; t
; t
= TREE_CHAIN (t
), op
++)
1371 tree name
= TREE_PURPOSE (TREE_PURPOSE (t
));
1372 if (name
&& strcmp (TREE_STRING_POINTER (name
), p
) == 0)
1375 for (t
= labels
; t
; t
= TREE_CHAIN (t
), op
++)
1377 tree name
= TREE_PURPOSE (t
);
1378 if (name
&& strcmp (TREE_STRING_POINTER (name
), p
) == 0)
1382 error ("undefined named operand %qs", identifier_to_locale (p
));
1386 /* Replace the name with the number. Unfortunately, not all libraries
1387 get the return value of sprintf correct, so search for the end of the
1388 generated string by hand. */
1389 sprintf (--p
, "%d", op
);
1390 p
= strchr (p
, '\0');
1392 /* Verify the no extra buffer space assumption. */
1393 gcc_assert (p
<= q
);
1395 /* Shift the rest of the buffer down to fill the gap. */
1396 memmove (p
, q
+ 1, strlen (q
+ 1) + 1);
1401 /* Generate RTL to return from the current function, with no value.
1402 (That is, we do not do anything about returning any value.) */
1405 expand_null_return (void)
1407 /* If this function was declared to return a value, but we
1408 didn't, clobber the return registers so that they are not
1409 propagated live to the rest of the function. */
1410 clobber_return_register ();
1412 expand_null_return_1 ();
1415 /* Generate RTL to return directly from the current function.
1416 (That is, we bypass any return value.) */
1419 expand_naked_return (void)
1423 clear_pending_stack_adjust ();
1424 do_pending_stack_adjust ();
1426 end_label
= naked_return_label
;
1428 end_label
= naked_return_label
= gen_label_rtx ();
1430 emit_jump (end_label
);
1433 /* Generate RTL to return from the current function, with value VAL. */
1436 expand_value_return (rtx val
)
1438 /* Copy the value to the return location unless it's already there. */
1440 tree decl
= DECL_RESULT (current_function_decl
);
1441 rtx return_reg
= DECL_RTL (decl
);
1442 if (return_reg
!= val
)
1444 tree funtype
= TREE_TYPE (current_function_decl
);
1445 tree type
= TREE_TYPE (decl
);
1446 int unsignedp
= TYPE_UNSIGNED (type
);
1447 enum machine_mode old_mode
= DECL_MODE (decl
);
1448 enum machine_mode mode
;
1449 if (DECL_BY_REFERENCE (decl
))
1450 mode
= promote_function_mode (type
, old_mode
, &unsignedp
, funtype
, 2);
1452 mode
= promote_function_mode (type
, old_mode
, &unsignedp
, funtype
, 1);
1454 if (mode
!= old_mode
)
1455 val
= convert_modes (mode
, old_mode
, val
, unsignedp
);
1457 if (GET_CODE (return_reg
) == PARALLEL
)
1458 emit_group_load (return_reg
, val
, type
, int_size_in_bytes (type
));
1460 emit_move_insn (return_reg
, val
);
1463 expand_null_return_1 ();
1466 /* Output a return with no value. */
1469 expand_null_return_1 (void)
1471 clear_pending_stack_adjust ();
1472 do_pending_stack_adjust ();
1473 emit_jump (return_label
);
1476 /* Generate RTL to evaluate the expression RETVAL and return it
1477 from the current function. */
1480 expand_return (tree retval
)
1486 /* If function wants no value, give it none. */
1487 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl
))) == VOID_TYPE
)
1489 expand_normal (retval
);
1490 expand_null_return ();
1494 if (retval
== error_mark_node
)
1496 /* Treat this like a return of no value from a function that
1498 expand_null_return ();
1501 else if ((TREE_CODE (retval
) == MODIFY_EXPR
1502 || TREE_CODE (retval
) == INIT_EXPR
)
1503 && TREE_CODE (TREE_OPERAND (retval
, 0)) == RESULT_DECL
)
1504 retval_rhs
= TREE_OPERAND (retval
, 1);
1506 retval_rhs
= retval
;
1508 result_rtl
= DECL_RTL (DECL_RESULT (current_function_decl
));
1510 /* If we are returning the RESULT_DECL, then the value has already
1511 been stored into it, so we don't have to do anything special. */
1512 if (TREE_CODE (retval_rhs
) == RESULT_DECL
)
1513 expand_value_return (result_rtl
);
1515 /* If the result is an aggregate that is being returned in one (or more)
1516 registers, load the registers here. */
1518 else if (retval_rhs
!= 0
1519 && TYPE_MODE (TREE_TYPE (retval_rhs
)) == BLKmode
1520 && REG_P (result_rtl
))
1522 val
= copy_blkmode_to_reg (GET_MODE (result_rtl
), retval_rhs
);
1525 /* Use the mode of the result value on the return register. */
1526 PUT_MODE (result_rtl
, GET_MODE (val
));
1527 expand_value_return (val
);
1530 expand_null_return ();
1532 else if (retval_rhs
!= 0
1533 && !VOID_TYPE_P (TREE_TYPE (retval_rhs
))
1534 && (REG_P (result_rtl
)
1535 || (GET_CODE (result_rtl
) == PARALLEL
)))
1537 /* Calculate the return value into a temporary (usually a pseudo
1539 tree ot
= TREE_TYPE (DECL_RESULT (current_function_decl
));
1540 tree nt
= build_qualified_type (ot
, TYPE_QUALS (ot
) | TYPE_QUAL_CONST
);
1542 val
= assign_temp (nt
, 0, 1);
1543 val
= expand_expr (retval_rhs
, val
, GET_MODE (val
), EXPAND_NORMAL
);
1544 val
= force_not_mem (val
);
1545 /* Return the calculated value. */
1546 expand_value_return (val
);
1550 /* No hard reg used; calculate value into hard return reg. */
1551 expand_expr (retval
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
1552 expand_value_return (result_rtl
);
1557 /* Emit code to save the current value of stack. */
1559 expand_stack_save (void)
1563 do_pending_stack_adjust ();
1564 emit_stack_save (SAVE_BLOCK
, &ret
);
1568 /* Emit code to restore the current value of stack. */
1570 expand_stack_restore (tree var
)
1572 rtx prev
, sa
= expand_normal (var
);
1574 sa
= convert_memory_address (Pmode
, sa
);
1576 prev
= get_last_insn ();
1577 emit_stack_restore (SAVE_BLOCK
, sa
);
1578 fixup_args_size_notes (prev
, get_last_insn (), 0);
1581 /* Generate code to jump to LABEL if OP0 and OP1 are equal in mode MODE. PROB
1582 is the probability of jumping to LABEL. */
1584 do_jump_if_equal (enum machine_mode mode
, rtx op0
, rtx op1
, rtx label
,
1585 int unsignedp
, int prob
)
1587 gcc_assert (prob
<= REG_BR_PROB_BASE
);
1588 do_compare_rtx_and_jump (op0
, op1
, EQ
, unsignedp
, mode
,
1589 NULL_RTX
, NULL_RTX
, label
, prob
);
1592 /* Do the insertion of a case label into case_list. The labels are
1593 fed to us in descending order from the sorted vector of case labels used
1594 in the tree part of the middle end. So the list we construct is
1595 sorted in ascending order.
1597 LABEL is the case label to be inserted. LOW and HIGH are the bounds
1598 against which the index is compared to jump to LABEL and PROB is the
1599 estimated probability LABEL is reached from the switch statement. */
1601 static struct case_node
*
1602 add_case_node (struct case_node
*head
, tree low
, tree high
,
1603 tree label
, int prob
, alloc_pool case_node_pool
)
1605 struct case_node
*r
;
1607 gcc_checking_assert (low
);
1608 gcc_checking_assert (high
&& (TREE_TYPE (low
) == TREE_TYPE (high
)));
1610 /* Add this label to the chain. */
1611 r
= (struct case_node
*) pool_alloc (case_node_pool
);
1614 r
->code_label
= label
;
1615 r
->parent
= r
->left
= NULL
;
1617 r
->subtree_prob
= prob
;
1622 /* Dump ROOT, a list or tree of case nodes, to file. */
1625 dump_case_nodes (FILE *f
, struct case_node
*root
,
1626 int indent_step
, int indent_level
)
1628 HOST_WIDE_INT low
, high
;
1634 dump_case_nodes (f
, root
->left
, indent_step
, indent_level
);
1636 low
= tree_low_cst (root
->low
, 0);
1637 high
= tree_low_cst (root
->high
, 0);
1641 fprintf(f
, "%*s" HOST_WIDE_INT_PRINT_DEC
,
1642 indent_step
* indent_level
, "", low
);
1644 fprintf(f
, "%*s" HOST_WIDE_INT_PRINT_DEC
" ... " HOST_WIDE_INT_PRINT_DEC
,
1645 indent_step
* indent_level
, "", low
, high
);
1648 dump_case_nodes (f
, root
->right
, indent_step
, indent_level
);
1652 #define HAVE_casesi 0
1655 #ifndef HAVE_tablejump
1656 #define HAVE_tablejump 0
1659 /* Return the smallest number of different values for which it is best to use a
1660 jump-table instead of a tree of conditional branches. */
1663 case_values_threshold (void)
1665 unsigned int threshold
= PARAM_VALUE (PARAM_CASE_VALUES_THRESHOLD
);
1668 threshold
= targetm
.case_values_threshold ();
1673 /* Return true if a switch should be expanded as a decision tree.
1674 RANGE is the difference between highest and lowest case.
1675 UNIQ is number of unique case node targets, not counting the default case.
1676 COUNT is the number of comparisons needed, not counting the default case. */
1679 expand_switch_as_decision_tree_p (tree range
,
1680 unsigned int uniq ATTRIBUTE_UNUSED
,
1685 /* If neither casesi or tablejump is available, or flag_jump_tables
1686 over-ruled us, we really have no choice. */
1687 if (!HAVE_casesi
&& !HAVE_tablejump
)
1689 if (!flag_jump_tables
)
1691 #ifndef ASM_OUTPUT_ADDR_DIFF_ELT
1696 /* If the switch is relatively small such that the cost of one
1697 indirect jump on the target are higher than the cost of a
1698 decision tree, go with the decision tree.
1700 If range of values is much bigger than number of values,
1701 or if it is too large to represent in a HOST_WIDE_INT,
1702 make a sequence of conditional branches instead of a dispatch.
1704 The definition of "much bigger" depends on whether we are
1705 optimizing for size or for speed. If the former, the maximum
1706 ratio range/count = 3, because this was found to be the optimal
1707 ratio for size on i686-pc-linux-gnu, see PR11823. The ratio
1708 10 is much older, and was probably selected after an extensive
1709 benchmarking investigation on numerous platforms. Or maybe it
1710 just made sense to someone at some point in the history of GCC,
1712 max_ratio
= optimize_insn_for_size_p () ? 3 : 10;
1713 if (count
< case_values_threshold ()
1714 || ! host_integerp (range
, /*pos=*/1)
1715 || compare_tree_int (range
, max_ratio
* count
) > 0)
1721 /* Generate a decision tree, switching on INDEX_EXPR and jumping to
1722 one of the labels in CASE_LIST or to the DEFAULT_LABEL.
1723 DEFAULT_PROB is the estimated probability that it jumps to
1726 We generate a binary decision tree to select the appropriate target
1727 code. This is done as follows:
1729 If the index is a short or char that we do not have
1730 an insn to handle comparisons directly, convert it to
1731 a full integer now, rather than letting each comparison
1732 generate the conversion.
1734 Load the index into a register.
1736 The list of cases is rearranged into a binary tree,
1737 nearly optimal assuming equal probability for each case.
1739 The tree is transformed into RTL, eliminating redundant
1740 test conditions at the same time.
1742 If program flow could reach the end of the decision tree
1743 an unconditional jump to the default code is emitted.
1745 The above process is unaware of the CFG. The caller has to fix up
1746 the CFG itself. This is done in cfgexpand.c. */
1749 emit_case_decision_tree (tree index_expr
, tree index_type
,
1750 struct case_node
*case_list
, rtx default_label
,
1753 rtx index
= expand_normal (index_expr
);
1755 if (GET_MODE_CLASS (GET_MODE (index
)) == MODE_INT
1756 && ! have_insn_for (COMPARE
, GET_MODE (index
)))
1758 int unsignedp
= TYPE_UNSIGNED (index_type
);
1759 enum machine_mode wider_mode
;
1760 for (wider_mode
= GET_MODE (index
); wider_mode
!= VOIDmode
;
1761 wider_mode
= GET_MODE_WIDER_MODE (wider_mode
))
1762 if (have_insn_for (COMPARE
, wider_mode
))
1764 index
= convert_to_mode (wider_mode
, index
, unsignedp
);
1769 do_pending_stack_adjust ();
1773 index
= copy_to_reg (index
);
1774 if (TREE_CODE (index_expr
) == SSA_NAME
)
1775 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (index_expr
), index
);
1778 balance_case_nodes (&case_list
, NULL
);
1780 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1782 int indent_step
= ceil_log2 (TYPE_PRECISION (index_type
)) + 2;
1783 fprintf (dump_file
, ";; Expanding GIMPLE switch as decision tree:\n");
1784 dump_case_nodes (dump_file
, case_list
, indent_step
, 0);
1787 emit_case_nodes (index
, case_list
, default_label
, default_prob
, index_type
);
1789 emit_jump (default_label
);
1792 /* Return the sum of probabilities of outgoing edges of basic block BB. */
1795 get_outgoing_edge_probs (basic_block bb
)
1802 FOR_EACH_EDGE(e
, ei
, bb
->succs
)
1803 prob_sum
+= e
->probability
;
1807 /* Computes the conditional probability of jumping to a target if the branch
1808 instruction is executed.
1809 TARGET_PROB is the estimated probability of jumping to a target relative
1810 to some basic block BB.
1811 BASE_PROB is the probability of reaching the branch instruction relative
1812 to the same basic block BB. */
1815 conditional_probability (int target_prob
, int base_prob
)
1819 gcc_assert (target_prob
>= 0);
1820 gcc_assert (target_prob
<= base_prob
);
1821 return GCOV_COMPUTE_SCALE (target_prob
, base_prob
);
1826 /* Generate a dispatch tabler, switching on INDEX_EXPR and jumping to
1827 one of the labels in CASE_LIST or to the DEFAULT_LABEL.
1828 MINVAL, MAXVAL, and RANGE are the extrema and range of the case
1829 labels in CASE_LIST. STMT_BB is the basic block containing the statement.
1831 First, a jump insn is emitted. First we try "casesi". If that
1832 fails, try "tablejump". A target *must* have one of them (or both).
1834 Then, a table with the target labels is emitted.
1836 The process is unaware of the CFG. The caller has to fix up
1837 the CFG itself. This is done in cfgexpand.c. */
1840 emit_case_dispatch_table (tree index_expr
, tree index_type
,
1841 struct case_node
*case_list
, rtx default_label
,
1842 tree minval
, tree maxval
, tree range
,
1843 basic_block stmt_bb
)
1846 struct case_node
*n
;
1848 rtx fallback_label
= label_rtx (case_list
->code_label
);
1849 rtx table_label
= gen_label_rtx ();
1850 bool has_gaps
= false;
1851 edge default_edge
= stmt_bb
? EDGE_SUCC(stmt_bb
, 0) : NULL
;
1852 int default_prob
= default_edge
? default_edge
->probability
: 0;
1853 int base
= get_outgoing_edge_probs (stmt_bb
);
1854 bool try_with_tablejump
= false;
1856 int new_default_prob
= conditional_probability (default_prob
,
1859 if (! try_casesi (index_type
, index_expr
, minval
, range
,
1860 table_label
, default_label
, fallback_label
,
1863 /* Index jumptables from zero for suitable values of minval to avoid
1864 a subtraction. For the rationale see:
1865 "http://gcc.gnu.org/ml/gcc-patches/2001-10/msg01234.html". */
1866 if (optimize_insn_for_speed_p ()
1867 && compare_tree_int (minval
, 0) > 0
1868 && compare_tree_int (minval
, 3) < 0)
1870 minval
= build_int_cst (index_type
, 0);
1874 try_with_tablejump
= true;
1877 /* Get table of labels to jump to, in order of case index. */
1879 ncases
= tree_low_cst (range
, 0) + 1;
1880 labelvec
= XALLOCAVEC (rtx
, ncases
);
1881 memset (labelvec
, 0, ncases
* sizeof (rtx
));
1883 for (n
= case_list
; n
; n
= n
->right
)
1885 /* Compute the low and high bounds relative to the minimum
1886 value since that should fit in a HOST_WIDE_INT while the
1887 actual values may not. */
1889 = tree_low_cst (fold_build2 (MINUS_EXPR
, index_type
,
1890 n
->low
, minval
), 1);
1891 HOST_WIDE_INT i_high
1892 = tree_low_cst (fold_build2 (MINUS_EXPR
, index_type
,
1893 n
->high
, minval
), 1);
1896 for (i
= i_low
; i
<= i_high
; i
++)
1898 = gen_rtx_LABEL_REF (Pmode
, label_rtx (n
->code_label
));
1901 /* Fill in the gaps with the default. We may have gaps at
1902 the beginning if we tried to avoid the minval subtraction,
1903 so substitute some label even if the default label was
1904 deemed unreachable. */
1906 default_label
= fallback_label
;
1907 for (i
= 0; i
< ncases
; i
++)
1908 if (labelvec
[i
] == 0)
1911 labelvec
[i
] = gen_rtx_LABEL_REF (Pmode
, default_label
);
1916 /* There is at least one entry in the jump table that jumps
1917 to default label. The default label can either be reached
1918 through the indirect jump or the direct conditional jump
1919 before that. Split the probability of reaching the
1920 default label among these two jumps. */
1921 new_default_prob
= conditional_probability (default_prob
/2,
1924 base
-= default_prob
;
1928 base
-= default_prob
;
1933 default_edge
->probability
= default_prob
;
1935 /* We have altered the probability of the default edge. So the probabilities
1936 of all other edges need to be adjusted so that it sums up to
1937 REG_BR_PROB_BASE. */
1942 FOR_EACH_EDGE (e
, ei
, stmt_bb
->succs
)
1943 e
->probability
= GCOV_COMPUTE_SCALE (e
->probability
, base
);
1946 if (try_with_tablejump
)
1948 bool ok
= try_tablejump (index_type
, index_expr
, minval
, range
,
1949 table_label
, default_label
, new_default_prob
);
1952 /* Output the table. */
1953 emit_label (table_label
);
1955 if (CASE_VECTOR_PC_RELATIVE
|| flag_pic
)
1956 emit_jump_table_data (gen_rtx_ADDR_DIFF_VEC (CASE_VECTOR_MODE
,
1957 gen_rtx_LABEL_REF (Pmode
,
1959 gen_rtvec_v (ncases
, labelvec
),
1960 const0_rtx
, const0_rtx
));
1962 emit_jump_table_data (gen_rtx_ADDR_VEC (CASE_VECTOR_MODE
,
1963 gen_rtvec_v (ncases
, labelvec
)));
1965 /* Record no drop-through after the table. */
1969 /* Reset the aux field of all outgoing edges of basic block BB. */
1972 reset_out_edges_aux (basic_block bb
)
1976 FOR_EACH_EDGE(e
, ei
, bb
->succs
)
1980 /* Compute the number of case labels that correspond to each outgoing edge of
1981 STMT. Record this information in the aux field of the edge. */
1984 compute_cases_per_edge (gimple stmt
)
1986 basic_block bb
= gimple_bb (stmt
);
1987 reset_out_edges_aux (bb
);
1988 int ncases
= gimple_switch_num_labels (stmt
);
1989 for (int i
= ncases
- 1; i
>= 1; --i
)
1991 tree elt
= gimple_switch_label (stmt
, i
);
1992 tree lab
= CASE_LABEL (elt
);
1993 basic_block case_bb
= label_to_block_fn (cfun
, lab
);
1994 edge case_edge
= find_edge (bb
, case_bb
);
1995 case_edge
->aux
= (void *)((intptr_t)(case_edge
->aux
) + 1);
1999 /* Terminate a case (Pascal/Ada) or switch (C) statement
2000 in which ORIG_INDEX is the expression to be tested.
2001 If ORIG_TYPE is not NULL, it is the original ORIG_INDEX
2002 type as given in the source before any compiler conversions.
2003 Generate the code to test it and jump to the right place. */
2006 expand_case (gimple stmt
)
2008 tree minval
= NULL_TREE
, maxval
= NULL_TREE
, range
= NULL_TREE
;
2009 rtx default_label
= NULL_RTX
;
2010 unsigned int count
, uniq
;
2012 int ncases
= gimple_switch_num_labels (stmt
);
2013 tree index_expr
= gimple_switch_index (stmt
);
2014 tree index_type
= TREE_TYPE (index_expr
);
2016 basic_block bb
= gimple_bb (stmt
);
2018 /* A list of case labels; it is first built as a list and it may then
2019 be rearranged into a nearly balanced binary tree. */
2020 struct case_node
*case_list
= 0;
2022 /* A pool for case nodes. */
2023 alloc_pool case_node_pool
;
2025 /* An ERROR_MARK occurs for various reasons including invalid data type.
2026 ??? Can this still happen, with GIMPLE and all? */
2027 if (index_type
== error_mark_node
)
2030 /* cleanup_tree_cfg removes all SWITCH_EXPR with their index
2031 expressions being INTEGER_CST. */
2032 gcc_assert (TREE_CODE (index_expr
) != INTEGER_CST
);
2034 case_node_pool
= create_alloc_pool ("struct case_node pool",
2035 sizeof (struct case_node
),
2038 do_pending_stack_adjust ();
2040 /* Find the default case target label. */
2041 default_label
= label_rtx (CASE_LABEL (gimple_switch_default_label (stmt
)));
2042 edge default_edge
= EDGE_SUCC(bb
, 0);
2043 int default_prob
= default_edge
->probability
;
2045 /* Get upper and lower bounds of case values. */
2046 elt
= gimple_switch_label (stmt
, 1);
2047 minval
= fold_convert (index_type
, CASE_LOW (elt
));
2048 elt
= gimple_switch_label (stmt
, ncases
- 1);
2049 if (CASE_HIGH (elt
))
2050 maxval
= fold_convert (index_type
, CASE_HIGH (elt
));
2052 maxval
= fold_convert (index_type
, CASE_LOW (elt
));
2054 /* Compute span of values. */
2055 range
= fold_build2 (MINUS_EXPR
, index_type
, maxval
, minval
);
2057 /* Listify the labels queue and gather some numbers to decide
2058 how to expand this switch(). */
2061 struct pointer_set_t
*seen_labels
= pointer_set_create ();
2062 compute_cases_per_edge (stmt
);
2064 for (i
= ncases
- 1; i
>= 1; --i
)
2066 elt
= gimple_switch_label (stmt
, i
);
2067 tree low
= CASE_LOW (elt
);
2069 tree high
= CASE_HIGH (elt
);
2070 gcc_assert (! high
|| tree_int_cst_lt (low
, high
));
2071 tree lab
= CASE_LABEL (elt
);
2073 /* Count the elements.
2074 A range counts double, since it requires two compares. */
2079 /* If we have not seen this label yet, then increase the
2080 number of unique case node targets seen. */
2081 if (!pointer_set_insert (seen_labels
, lab
))
2084 /* The bounds on the case range, LOW and HIGH, have to be converted
2085 to case's index type TYPE. Note that the original type of the
2086 case index in the source code is usually "lost" during
2087 gimplification due to type promotion, but the case labels retain the
2088 original type. Make sure to drop overflow flags. */
2089 low
= fold_convert (index_type
, low
);
2090 if (TREE_OVERFLOW (low
))
2091 low
= build_int_cst_wide (index_type
,
2092 TREE_INT_CST_LOW (low
),
2093 TREE_INT_CST_HIGH (low
));
2095 /* The canonical from of a case label in GIMPLE is that a simple case
2096 has an empty CASE_HIGH. For the casesi and tablejump expanders,
2097 the back ends want simple cases to have high == low. */
2100 high
= fold_convert (index_type
, high
);
2101 if (TREE_OVERFLOW (high
))
2102 high
= build_int_cst_wide (index_type
,
2103 TREE_INT_CST_LOW (high
),
2104 TREE_INT_CST_HIGH (high
));
2106 basic_block case_bb
= label_to_block_fn (cfun
, lab
);
2107 edge case_edge
= find_edge (bb
, case_bb
);
2108 case_list
= add_case_node (
2109 case_list
, low
, high
, lab
,
2110 case_edge
->probability
/ (intptr_t)(case_edge
->aux
),
2113 pointer_set_destroy (seen_labels
);
2114 reset_out_edges_aux (bb
);
2116 /* cleanup_tree_cfg removes all SWITCH_EXPR with a single
2117 destination, such as one with a default case only.
2118 It also removes cases that are out of range for the switch
2119 type, so we should never get a zero here. */
2120 gcc_assert (count
> 0);
2122 rtx before_case
= get_last_insn ();
2124 /* Decide how to expand this switch.
2125 The two options at this point are a dispatch table (casesi or
2126 tablejump) or a decision tree. */
2128 if (expand_switch_as_decision_tree_p (range
, uniq
, count
))
2129 emit_case_decision_tree (index_expr
, index_type
,
2130 case_list
, default_label
,
2133 emit_case_dispatch_table (index_expr
, index_type
,
2134 case_list
, default_label
,
2135 minval
, maxval
, range
, bb
);
2137 reorder_insns (NEXT_INSN (before_case
), get_last_insn (), before_case
);
2140 free_alloc_pool (case_node_pool
);
2143 /* Expand the dispatch to a short decrement chain if there are few cases
2144 to dispatch to. Likewise if neither casesi nor tablejump is available,
2145 or if flag_jump_tables is set. Otherwise, expand as a casesi or a
2146 tablejump. The index mode is always the mode of integer_type_node.
2147 Trap if no case matches the index.
2149 DISPATCH_INDEX is the index expression to switch on. It should be a
2150 memory or register operand.
2152 DISPATCH_TABLE is a set of case labels. The set should be sorted in
2153 ascending order, be contiguous, starting with value 0, and contain only
2154 single-valued case labels. */
2157 expand_sjlj_dispatch_table (rtx dispatch_index
,
2158 vec
<tree
> dispatch_table
)
2160 tree index_type
= integer_type_node
;
2161 enum machine_mode index_mode
= TYPE_MODE (index_type
);
2163 int ncases
= dispatch_table
.length ();
2165 do_pending_stack_adjust ();
2166 rtx before_case
= get_last_insn ();
2168 /* Expand as a decrement-chain if there are 5 or fewer dispatch
2169 labels. This covers more than 98% of the cases in libjava,
2170 and seems to be a reasonable compromise between the "old way"
2171 of expanding as a decision tree or dispatch table vs. the "new
2172 way" with decrement chain or dispatch table. */
2173 if (dispatch_table
.length () <= 5
2174 || (!HAVE_casesi
&& !HAVE_tablejump
)
2175 || !flag_jump_tables
)
2177 /* Expand the dispatch as a decrement chain:
2179 "switch(index) {case 0: do_0; case 1: do_1; ...; case N: do_N;}"
2183 if (index == 0) do_0; else index--;
2184 if (index == 0) do_1; else index--;
2186 if (index == 0) do_N; else index--;
2188 This is more efficient than a dispatch table on most machines.
2189 The last "index--" is redundant but the code is trivially dead
2190 and will be cleaned up by later passes. */
2191 rtx index
= copy_to_mode_reg (index_mode
, dispatch_index
);
2192 rtx zero
= CONST0_RTX (index_mode
);
2193 for (int i
= 0; i
< ncases
; i
++)
2195 tree elt
= dispatch_table
[i
];
2196 rtx lab
= label_rtx (CASE_LABEL (elt
));
2197 do_jump_if_equal (index_mode
, index
, zero
, lab
, 0, -1);
2198 force_expand_binop (index_mode
, sub_optab
,
2199 index
, CONST1_RTX (index_mode
),
2200 index
, 0, OPTAB_DIRECT
);
2205 /* Similar to expand_case, but much simpler. */
2206 struct case_node
*case_list
= 0;
2207 alloc_pool case_node_pool
= create_alloc_pool ("struct sjlj_case pool",
2208 sizeof (struct case_node
),
2210 tree index_expr
= make_tree (index_type
, dispatch_index
);
2211 tree minval
= build_int_cst (index_type
, 0);
2212 tree maxval
= CASE_LOW (dispatch_table
.last ());
2213 tree range
= maxval
;
2214 rtx default_label
= gen_label_rtx ();
2216 for (int i
= ncases
- 1; i
>= 0; --i
)
2218 tree elt
= dispatch_table
[i
];
2219 tree low
= CASE_LOW (elt
);
2220 tree lab
= CASE_LABEL (elt
);
2221 case_list
= add_case_node (case_list
, low
, low
, lab
, 0, case_node_pool
);
2224 emit_case_dispatch_table (index_expr
, index_type
,
2225 case_list
, default_label
,
2226 minval
, maxval
, range
,
2227 BLOCK_FOR_INSN (before_case
));
2228 emit_label (default_label
);
2229 free_alloc_pool (case_node_pool
);
2232 /* Dispatching something not handled? Trap! */
2233 expand_builtin_trap ();
2235 reorder_insns (NEXT_INSN (before_case
), get_last_insn (), before_case
);
2241 /* Take an ordered list of case nodes
2242 and transform them into a near optimal binary tree,
2243 on the assumption that any target code selection value is as
2244 likely as any other.
2246 The transformation is performed by splitting the ordered
2247 list into two equal sections plus a pivot. The parts are
2248 then attached to the pivot as left and right branches. Each
2249 branch is then transformed recursively. */
2252 balance_case_nodes (case_node_ptr
*head
, case_node_ptr parent
)
2264 /* Count the number of entries on branch. Also count the ranges. */
2268 if (!tree_int_cst_equal (np
->low
, np
->high
))
2277 /* Split this list if it is long enough for that to help. */
2281 /* If there are just three nodes, split at the middle one. */
2283 npp
= &(*npp
)->right
;
2286 /* Find the place in the list that bisects the list's total cost,
2287 where ranges count as 2.
2288 Here I gets half the total cost. */
2289 i
= (i
+ ranges
+ 1) / 2;
2292 /* Skip nodes while their cost does not reach that amount. */
2293 if (!tree_int_cst_equal ((*npp
)->low
, (*npp
)->high
))
2298 npp
= &(*npp
)->right
;
2303 np
->parent
= parent
;
2306 /* Optimize each of the two split parts. */
2307 balance_case_nodes (&np
->left
, np
);
2308 balance_case_nodes (&np
->right
, np
);
2309 np
->subtree_prob
= np
->prob
;
2310 np
->subtree_prob
+= np
->left
->subtree_prob
;
2311 np
->subtree_prob
+= np
->right
->subtree_prob
;
2315 /* Else leave this branch as one level,
2316 but fill in `parent' fields. */
2318 np
->parent
= parent
;
2319 np
->subtree_prob
= np
->prob
;
2320 for (; np
->right
; np
= np
->right
)
2322 np
->right
->parent
= np
;
2323 (*head
)->subtree_prob
+= np
->right
->subtree_prob
;
2329 /* Search the parent sections of the case node tree
2330 to see if a test for the lower bound of NODE would be redundant.
2331 INDEX_TYPE is the type of the index expression.
2333 The instructions to generate the case decision tree are
2334 output in the same order as nodes are processed so it is
2335 known that if a parent node checks the range of the current
2336 node minus one that the current node is bounded at its lower
2337 span. Thus the test would be redundant. */
2340 node_has_low_bound (case_node_ptr node
, tree index_type
)
2343 case_node_ptr pnode
;
2345 /* If the lower bound of this node is the lowest value in the index type,
2346 we need not test it. */
2348 if (tree_int_cst_equal (node
->low
, TYPE_MIN_VALUE (index_type
)))
2351 /* If this node has a left branch, the value at the left must be less
2352 than that at this node, so it cannot be bounded at the bottom and
2353 we need not bother testing any further. */
2358 low_minus_one
= fold_build2 (MINUS_EXPR
, TREE_TYPE (node
->low
),
2360 build_int_cst (TREE_TYPE (node
->low
), 1));
2362 /* If the subtraction above overflowed, we can't verify anything.
2363 Otherwise, look for a parent that tests our value - 1. */
2365 if (! tree_int_cst_lt (low_minus_one
, node
->low
))
2368 for (pnode
= node
->parent
; pnode
; pnode
= pnode
->parent
)
2369 if (tree_int_cst_equal (low_minus_one
, pnode
->high
))
2375 /* Search the parent sections of the case node tree
2376 to see if a test for the upper bound of NODE would be redundant.
2377 INDEX_TYPE is the type of the index expression.
2379 The instructions to generate the case decision tree are
2380 output in the same order as nodes are processed so it is
2381 known that if a parent node checks the range of the current
2382 node plus one that the current node is bounded at its upper
2383 span. Thus the test would be redundant. */
2386 node_has_high_bound (case_node_ptr node
, tree index_type
)
2389 case_node_ptr pnode
;
2391 /* If there is no upper bound, obviously no test is needed. */
2393 if (TYPE_MAX_VALUE (index_type
) == NULL
)
2396 /* If the upper bound of this node is the highest value in the type
2397 of the index expression, we need not test against it. */
2399 if (tree_int_cst_equal (node
->high
, TYPE_MAX_VALUE (index_type
)))
2402 /* If this node has a right branch, the value at the right must be greater
2403 than that at this node, so it cannot be bounded at the top and
2404 we need not bother testing any further. */
2409 high_plus_one
= fold_build2 (PLUS_EXPR
, TREE_TYPE (node
->high
),
2411 build_int_cst (TREE_TYPE (node
->high
), 1));
2413 /* If the addition above overflowed, we can't verify anything.
2414 Otherwise, look for a parent that tests our value + 1. */
2416 if (! tree_int_cst_lt (node
->high
, high_plus_one
))
2419 for (pnode
= node
->parent
; pnode
; pnode
= pnode
->parent
)
2420 if (tree_int_cst_equal (high_plus_one
, pnode
->low
))
2426 /* Search the parent sections of the
2427 case node tree to see if both tests for the upper and lower
2428 bounds of NODE would be redundant. */
2431 node_is_bounded (case_node_ptr node
, tree index_type
)
2433 return (node_has_low_bound (node
, index_type
)
2434 && node_has_high_bound (node
, index_type
));
2438 /* Emit step-by-step code to select a case for the value of INDEX.
2439 The thus generated decision tree follows the form of the
2440 case-node binary tree NODE, whose nodes represent test conditions.
2441 INDEX_TYPE is the type of the index of the switch.
2443 Care is taken to prune redundant tests from the decision tree
2444 by detecting any boundary conditions already checked by
2445 emitted rtx. (See node_has_high_bound, node_has_low_bound
2446 and node_is_bounded, above.)
2448 Where the test conditions can be shown to be redundant we emit
2449 an unconditional jump to the target code. As a further
2450 optimization, the subordinates of a tree node are examined to
2451 check for bounded nodes. In this case conditional and/or
2452 unconditional jumps as a result of the boundary check for the
2453 current node are arranged to target the subordinates associated
2454 code for out of bound conditions on the current node.
2456 We can assume that when control reaches the code generated here,
2457 the index value has already been compared with the parents
2458 of this node, and determined to be on the same side of each parent
2459 as this node is. Thus, if this node tests for the value 51,
2460 and a parent tested for 52, we don't need to consider
2461 the possibility of a value greater than 51. If another parent
2462 tests for the value 50, then this node need not test anything. */
2465 emit_case_nodes (rtx index
, case_node_ptr node
, rtx default_label
,
2466 int default_prob
, tree index_type
)
2468 /* If INDEX has an unsigned type, we must make unsigned branches. */
2469 int unsignedp
= TYPE_UNSIGNED (index_type
);
2471 int prob
= node
->prob
, subtree_prob
= node
->subtree_prob
;
2472 enum machine_mode mode
= GET_MODE (index
);
2473 enum machine_mode imode
= TYPE_MODE (index_type
);
2475 /* Handle indices detected as constant during RTL expansion. */
2476 if (mode
== VOIDmode
)
2479 /* See if our parents have already tested everything for us.
2480 If they have, emit an unconditional jump for this node. */
2481 if (node_is_bounded (node
, index_type
))
2482 emit_jump (label_rtx (node
->code_label
));
2484 else if (tree_int_cst_equal (node
->low
, node
->high
))
2486 probability
= conditional_probability (prob
, subtree_prob
+ default_prob
);
2487 /* Node is single valued. First see if the index expression matches
2488 this node and then check our children, if any. */
2489 do_jump_if_equal (mode
, index
,
2490 convert_modes (mode
, imode
,
2491 expand_normal (node
->low
),
2493 label_rtx (node
->code_label
), unsignedp
, probability
);
2494 /* Since this case is taken at this point, reduce its weight from
2496 subtree_prob
-= prob
;
2497 if (node
->right
!= 0 && node
->left
!= 0)
2499 /* This node has children on both sides.
2500 Dispatch to one side or the other
2501 by comparing the index value with this node's value.
2502 If one subtree is bounded, check that one first,
2503 so we can avoid real branches in the tree. */
2505 if (node_is_bounded (node
->right
, index_type
))
2507 probability
= conditional_probability (
2509 subtree_prob
+ default_prob
);
2510 emit_cmp_and_jump_insns (index
,
2513 expand_normal (node
->high
),
2515 GT
, NULL_RTX
, mode
, unsignedp
,
2516 label_rtx (node
->right
->code_label
),
2518 emit_case_nodes (index
, node
->left
, default_label
, default_prob
,
2522 else if (node_is_bounded (node
->left
, index_type
))
2524 probability
= conditional_probability (
2526 subtree_prob
+ default_prob
);
2527 emit_cmp_and_jump_insns (index
,
2530 expand_normal (node
->high
),
2532 LT
, NULL_RTX
, mode
, unsignedp
,
2533 label_rtx (node
->left
->code_label
),
2535 emit_case_nodes (index
, node
->right
, default_label
, default_prob
, index_type
);
2538 /* If both children are single-valued cases with no
2539 children, finish up all the work. This way, we can save
2540 one ordered comparison. */
2541 else if (tree_int_cst_equal (node
->right
->low
, node
->right
->high
)
2542 && node
->right
->left
== 0
2543 && node
->right
->right
== 0
2544 && tree_int_cst_equal (node
->left
->low
, node
->left
->high
)
2545 && node
->left
->left
== 0
2546 && node
->left
->right
== 0)
2548 /* Neither node is bounded. First distinguish the two sides;
2549 then emit the code for one side at a time. */
2551 /* See if the value matches what the right hand side
2553 probability
= conditional_probability (
2555 subtree_prob
+ default_prob
);
2556 do_jump_if_equal (mode
, index
,
2557 convert_modes (mode
, imode
,
2558 expand_normal (node
->right
->low
),
2560 label_rtx (node
->right
->code_label
),
2561 unsignedp
, probability
);
2563 /* See if the value matches what the left hand side
2565 probability
= conditional_probability (
2567 subtree_prob
+ default_prob
);
2568 do_jump_if_equal (mode
, index
,
2569 convert_modes (mode
, imode
,
2570 expand_normal (node
->left
->low
),
2572 label_rtx (node
->left
->code_label
),
2573 unsignedp
, probability
);
2578 /* Neither node is bounded. First distinguish the two sides;
2579 then emit the code for one side at a time. */
2582 = build_decl (curr_insn_location (),
2583 LABEL_DECL
, NULL_TREE
, NULL_TREE
);
2585 /* The default label could be reached either through the right
2586 subtree or the left subtree. Divide the probability
2588 probability
= conditional_probability (
2589 node
->right
->subtree_prob
+ default_prob
/2,
2590 subtree_prob
+ default_prob
);
2591 /* See if the value is on the right. */
2592 emit_cmp_and_jump_insns (index
,
2595 expand_normal (node
->high
),
2597 GT
, NULL_RTX
, mode
, unsignedp
,
2598 label_rtx (test_label
),
2602 /* Value must be on the left.
2603 Handle the left-hand subtree. */
2604 emit_case_nodes (index
, node
->left
, default_label
, default_prob
, index_type
);
2605 /* If left-hand subtree does nothing,
2608 emit_jump (default_label
);
2610 /* Code branches here for the right-hand subtree. */
2611 expand_label (test_label
);
2612 emit_case_nodes (index
, node
->right
, default_label
, default_prob
, index_type
);
2616 else if (node
->right
!= 0 && node
->left
== 0)
2618 /* Here we have a right child but no left so we issue a conditional
2619 branch to default and process the right child.
2621 Omit the conditional branch to default if the right child
2622 does not have any children and is single valued; it would
2623 cost too much space to save so little time. */
2625 if (node
->right
->right
|| node
->right
->left
2626 || !tree_int_cst_equal (node
->right
->low
, node
->right
->high
))
2628 if (!node_has_low_bound (node
, index_type
))
2630 probability
= conditional_probability (
2632 subtree_prob
+ default_prob
);
2633 emit_cmp_and_jump_insns (index
,
2636 expand_normal (node
->high
),
2638 LT
, NULL_RTX
, mode
, unsignedp
,
2644 emit_case_nodes (index
, node
->right
, default_label
, default_prob
, index_type
);
2648 probability
= conditional_probability (
2649 node
->right
->subtree_prob
,
2650 subtree_prob
+ default_prob
);
2651 /* We cannot process node->right normally
2652 since we haven't ruled out the numbers less than
2653 this node's value. So handle node->right explicitly. */
2654 do_jump_if_equal (mode
, index
,
2657 expand_normal (node
->right
->low
),
2659 label_rtx (node
->right
->code_label
), unsignedp
, probability
);
2663 else if (node
->right
== 0 && node
->left
!= 0)
2665 /* Just one subtree, on the left. */
2666 if (node
->left
->left
|| node
->left
->right
2667 || !tree_int_cst_equal (node
->left
->low
, node
->left
->high
))
2669 if (!node_has_high_bound (node
, index_type
))
2671 probability
= conditional_probability (
2673 subtree_prob
+ default_prob
);
2674 emit_cmp_and_jump_insns (index
,
2677 expand_normal (node
->high
),
2679 GT
, NULL_RTX
, mode
, unsignedp
,
2685 emit_case_nodes (index
, node
->left
, default_label
,
2686 default_prob
, index_type
);
2690 probability
= conditional_probability (
2691 node
->left
->subtree_prob
,
2692 subtree_prob
+ default_prob
);
2693 /* We cannot process node->left normally
2694 since we haven't ruled out the numbers less than
2695 this node's value. So handle node->left explicitly. */
2696 do_jump_if_equal (mode
, index
,
2699 expand_normal (node
->left
->low
),
2701 label_rtx (node
->left
->code_label
), unsignedp
, probability
);
2707 /* Node is a range. These cases are very similar to those for a single
2708 value, except that we do not start by testing whether this node
2709 is the one to branch to. */
2711 if (node
->right
!= 0 && node
->left
!= 0)
2713 /* Node has subtrees on both sides.
2714 If the right-hand subtree is bounded,
2715 test for it first, since we can go straight there.
2716 Otherwise, we need to make a branch in the control structure,
2717 then handle the two subtrees. */
2718 tree test_label
= 0;
2720 if (node_is_bounded (node
->right
, index_type
))
2722 /* Right hand node is fully bounded so we can eliminate any
2723 testing and branch directly to the target code. */
2724 probability
= conditional_probability (
2725 node
->right
->subtree_prob
,
2726 subtree_prob
+ default_prob
);
2727 emit_cmp_and_jump_insns (index
,
2730 expand_normal (node
->high
),
2732 GT
, NULL_RTX
, mode
, unsignedp
,
2733 label_rtx (node
->right
->code_label
),
2738 /* Right hand node requires testing.
2739 Branch to a label where we will handle it later. */
2741 test_label
= build_decl (curr_insn_location (),
2742 LABEL_DECL
, NULL_TREE
, NULL_TREE
);
2743 probability
= conditional_probability (
2744 node
->right
->subtree_prob
+ default_prob
/2,
2745 subtree_prob
+ default_prob
);
2746 emit_cmp_and_jump_insns (index
,
2749 expand_normal (node
->high
),
2751 GT
, NULL_RTX
, mode
, unsignedp
,
2752 label_rtx (test_label
),
2757 /* Value belongs to this node or to the left-hand subtree. */
2759 probability
= conditional_probability (
2761 subtree_prob
+ default_prob
);
2762 emit_cmp_and_jump_insns (index
,
2765 expand_normal (node
->low
),
2767 GE
, NULL_RTX
, mode
, unsignedp
,
2768 label_rtx (node
->code_label
),
2771 /* Handle the left-hand subtree. */
2772 emit_case_nodes (index
, node
->left
, default_label
, default_prob
, index_type
);
2774 /* If right node had to be handled later, do that now. */
2778 /* If the left-hand subtree fell through,
2779 don't let it fall into the right-hand subtree. */
2781 emit_jump (default_label
);
2783 expand_label (test_label
);
2784 emit_case_nodes (index
, node
->right
, default_label
, default_prob
, index_type
);
2788 else if (node
->right
!= 0 && node
->left
== 0)
2790 /* Deal with values to the left of this node,
2791 if they are possible. */
2792 if (!node_has_low_bound (node
, index_type
))
2794 probability
= conditional_probability (
2796 subtree_prob
+ default_prob
);
2797 emit_cmp_and_jump_insns (index
,
2800 expand_normal (node
->low
),
2802 LT
, NULL_RTX
, mode
, unsignedp
,
2808 /* Value belongs to this node or to the right-hand subtree. */
2810 probability
= conditional_probability (
2812 subtree_prob
+ default_prob
);
2813 emit_cmp_and_jump_insns (index
,
2816 expand_normal (node
->high
),
2818 LE
, NULL_RTX
, mode
, unsignedp
,
2819 label_rtx (node
->code_label
),
2822 emit_case_nodes (index
, node
->right
, default_label
, default_prob
, index_type
);
2825 else if (node
->right
== 0 && node
->left
!= 0)
2827 /* Deal with values to the right of this node,
2828 if they are possible. */
2829 if (!node_has_high_bound (node
, index_type
))
2831 probability
= conditional_probability (
2833 subtree_prob
+ default_prob
);
2834 emit_cmp_and_jump_insns (index
,
2837 expand_normal (node
->high
),
2839 GT
, NULL_RTX
, mode
, unsignedp
,
2845 /* Value belongs to this node or to the left-hand subtree. */
2847 probability
= conditional_probability (
2849 subtree_prob
+ default_prob
);
2850 emit_cmp_and_jump_insns (index
,
2853 expand_normal (node
->low
),
2855 GE
, NULL_RTX
, mode
, unsignedp
,
2856 label_rtx (node
->code_label
),
2859 emit_case_nodes (index
, node
->left
, default_label
, default_prob
, index_type
);
2864 /* Node has no children so we check low and high bounds to remove
2865 redundant tests. Only one of the bounds can exist,
2866 since otherwise this node is bounded--a case tested already. */
2867 int high_bound
= node_has_high_bound (node
, index_type
);
2868 int low_bound
= node_has_low_bound (node
, index_type
);
2870 if (!high_bound
&& low_bound
)
2872 probability
= conditional_probability (
2874 subtree_prob
+ default_prob
);
2875 emit_cmp_and_jump_insns (index
,
2878 expand_normal (node
->high
),
2880 GT
, NULL_RTX
, mode
, unsignedp
,
2885 else if (!low_bound
&& high_bound
)
2887 probability
= conditional_probability (
2889 subtree_prob
+ default_prob
);
2890 emit_cmp_and_jump_insns (index
,
2893 expand_normal (node
->low
),
2895 LT
, NULL_RTX
, mode
, unsignedp
,
2899 else if (!low_bound
&& !high_bound
)
2901 /* Widen LOW and HIGH to the same width as INDEX. */
2902 tree type
= lang_hooks
.types
.type_for_mode (mode
, unsignedp
);
2903 tree low
= build1 (CONVERT_EXPR
, type
, node
->low
);
2904 tree high
= build1 (CONVERT_EXPR
, type
, node
->high
);
2905 rtx low_rtx
, new_index
, new_bound
;
2907 /* Instead of doing two branches, emit one unsigned branch for
2908 (index-low) > (high-low). */
2909 low_rtx
= expand_expr (low
, NULL_RTX
, mode
, EXPAND_NORMAL
);
2910 new_index
= expand_simple_binop (mode
, MINUS
, index
, low_rtx
,
2911 NULL_RTX
, unsignedp
,
2913 new_bound
= expand_expr (fold_build2 (MINUS_EXPR
, type
,
2915 NULL_RTX
, mode
, EXPAND_NORMAL
);
2917 probability
= conditional_probability (
2919 subtree_prob
+ default_prob
);
2920 emit_cmp_and_jump_insns (new_index
, new_bound
, GT
, NULL_RTX
,
2921 mode
, 1, default_label
, probability
);
2924 emit_jump (label_rtx (node
->code_label
));