1 /* Expands front end tree to back end RTL for GCC
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009,
4 2010, 2011, 2012 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /* This file handles the generation of rtl code from tree structure
23 above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
24 The functions whose names start with `expand_' are called by the
25 expander to generate RTL instructions for various kinds of constructs. */
29 #include "coretypes.h"
33 #include "hard-reg-set.h"
39 #include "insn-config.h"
44 #include "diagnostic-core.h"
47 #include "langhooks.h"
53 #include "alloc-pool.h"
54 #include "pretty-print.h"
55 #include "pointer-set.h"
60 /* Functions and data structures for expanding case statements. */
62 /* Case label structure, used to hold info on labels within case
63 statements. We handle "range" labels; for a single-value label
64 as in C, the high and low limits are the same.
66 We start with a vector of case nodes sorted in ascending order, and
67 the default label as the last element in the vector. Before expanding
68 to RTL, we transform this vector into a list linked via the RIGHT
69 fields in the case_node struct. Nodes with higher case values are
72 Switch statements can be output in three forms. A branch table is
73 used if there are more than a few labels and the labels are dense
74 within the range between the smallest and largest case value. If a
75 branch table is used, no further manipulations are done with the case
78 The alternative to the use of a branch table is to generate a series
79 of compare and jump insns. When that is done, we use the LEFT, RIGHT,
80 and PARENT fields to hold a binary tree. Initially the tree is
81 totally unbalanced, with everything on the right. We balance the tree
82 with nodes on the left having lower case values than the parent
83 and nodes on the right having higher values. We then output the tree
86 For very small, suitable switch statements, we can generate a series
87 of simple bit test and branches instead. */
91 struct case_node
*left
; /* Left son in binary tree */
92 struct case_node
*right
; /* Right son in binary tree; also node chain */
93 struct case_node
*parent
; /* Parent of node in binary tree */
94 tree low
; /* Lowest index value for this label */
95 tree high
; /* Highest index value for this label */
96 tree code_label
; /* Label to jump to when node matches */
97 int prob
; /* Probability of taking this case. */
98 /* Probability of reaching subtree rooted at this node */
102 typedef struct case_node case_node
;
103 typedef struct case_node
*case_node_ptr
;
105 extern basic_block
label_to_block_fn (struct function
*, tree
);
107 static int n_occurrences (int, const char *);
108 static bool tree_conflicts_with_clobbers_p (tree
, HARD_REG_SET
*);
109 static void expand_nl_goto_receiver (void);
110 static bool check_operand_nalternatives (tree
, tree
);
111 static bool check_unique_operand_names (tree
, tree
, tree
);
112 static char *resolve_operand_name_1 (char *, tree
, tree
, tree
);
113 static void expand_null_return_1 (void);
114 static void expand_value_return (rtx
);
115 static void balance_case_nodes (case_node_ptr
*, case_node_ptr
);
116 static int node_has_low_bound (case_node_ptr
, tree
);
117 static int node_has_high_bound (case_node_ptr
, tree
);
118 static int node_is_bounded (case_node_ptr
, tree
);
119 static void emit_case_nodes (rtx
, case_node_ptr
, rtx
, int, tree
);
121 /* Return the rtx-label that corresponds to a LABEL_DECL,
122 creating it if necessary. */
125 label_rtx (tree label
)
127 gcc_assert (TREE_CODE (label
) == LABEL_DECL
);
129 if (!DECL_RTL_SET_P (label
))
131 rtx r
= gen_label_rtx ();
132 SET_DECL_RTL (label
, r
);
133 if (FORCED_LABEL (label
) || DECL_NONLOCAL (label
))
134 LABEL_PRESERVE_P (r
) = 1;
137 return DECL_RTL (label
);
140 /* As above, but also put it on the forced-reference list of the
141 function that contains it. */
143 force_label_rtx (tree label
)
145 rtx ref
= label_rtx (label
);
146 tree function
= decl_function_context (label
);
148 gcc_assert (function
);
150 forced_labels
= gen_rtx_EXPR_LIST (VOIDmode
, ref
, forced_labels
);
154 /* Add an unconditional jump to LABEL as the next sequential instruction. */
157 emit_jump (rtx label
)
159 do_pending_stack_adjust ();
160 emit_jump_insn (gen_jump (label
));
164 /* Emit code to jump to the address
165 specified by the pointer expression EXP. */
168 expand_computed_goto (tree exp
)
170 rtx x
= expand_normal (exp
);
172 x
= convert_memory_address (Pmode
, x
);
174 do_pending_stack_adjust ();
175 emit_indirect_jump (x
);
178 /* Handle goto statements and the labels that they can go to. */
180 /* Specify the location in the RTL code of a label LABEL,
181 which is a LABEL_DECL tree node.
183 This is used for the kind of label that the user can jump to with a
184 goto statement, and for alternatives of a switch or case statement.
185 RTL labels generated for loops and conditionals don't go through here;
186 they are generated directly at the RTL level, by other functions below.
188 Note that this has nothing to do with defining label *names*.
189 Languages vary in how they do that and what that even means. */
192 expand_label (tree label
)
194 rtx label_r
= label_rtx (label
);
196 do_pending_stack_adjust ();
197 emit_label (label_r
);
198 if (DECL_NAME (label
))
199 LABEL_NAME (DECL_RTL (label
)) = IDENTIFIER_POINTER (DECL_NAME (label
));
201 if (DECL_NONLOCAL (label
))
203 expand_nl_goto_receiver ();
204 nonlocal_goto_handler_labels
205 = gen_rtx_EXPR_LIST (VOIDmode
, label_r
,
206 nonlocal_goto_handler_labels
);
209 if (FORCED_LABEL (label
))
210 forced_labels
= gen_rtx_EXPR_LIST (VOIDmode
, label_r
, forced_labels
);
212 if (DECL_NONLOCAL (label
) || FORCED_LABEL (label
))
213 maybe_set_first_label_num (label_r
);
216 /* Generate RTL code for a `goto' statement with target label LABEL.
217 LABEL should be a LABEL_DECL tree node that was or will later be
218 defined with `expand_label'. */
221 expand_goto (tree label
)
223 #ifdef ENABLE_CHECKING
224 /* Check for a nonlocal goto to a containing function. Should have
225 gotten translated to __builtin_nonlocal_goto. */
226 tree context
= decl_function_context (label
);
227 gcc_assert (!context
|| context
== current_function_decl
);
230 emit_jump (label_rtx (label
));
233 /* Return the number of times character C occurs in string S. */
235 n_occurrences (int c
, const char *s
)
243 /* Generate RTL for an asm statement (explicit assembler code).
244 STRING is a STRING_CST node containing the assembler code text,
245 or an ADDR_EXPR containing a STRING_CST. VOL nonzero means the
246 insn is volatile; don't optimize it. */
249 expand_asm_loc (tree string
, int vol
, location_t locus
)
253 if (TREE_CODE (string
) == ADDR_EXPR
)
254 string
= TREE_OPERAND (string
, 0);
256 body
= gen_rtx_ASM_INPUT_loc (VOIDmode
,
257 ggc_strdup (TREE_STRING_POINTER (string
)),
260 MEM_VOLATILE_P (body
) = vol
;
265 /* Parse the output constraint pointed to by *CONSTRAINT_P. It is the
266 OPERAND_NUMth output operand, indexed from zero. There are NINPUTS
267 inputs and NOUTPUTS outputs to this extended-asm. Upon return,
268 *ALLOWS_MEM will be TRUE iff the constraint allows the use of a
269 memory operand. Similarly, *ALLOWS_REG will be TRUE iff the
270 constraint allows the use of a register operand. And, *IS_INOUT
271 will be true if the operand is read-write, i.e., if it is used as
272 an input as well as an output. If *CONSTRAINT_P is not in
273 canonical form, it will be made canonical. (Note that `+' will be
274 replaced with `=' as part of this process.)
276 Returns TRUE if all went well; FALSE if an error occurred. */
279 parse_output_constraint (const char **constraint_p
, int operand_num
,
280 int ninputs
, int noutputs
, bool *allows_mem
,
281 bool *allows_reg
, bool *is_inout
)
283 const char *constraint
= *constraint_p
;
286 /* Assume the constraint doesn't allow the use of either a register
291 /* Allow the `=' or `+' to not be at the beginning of the string,
292 since it wasn't explicitly documented that way, and there is a
293 large body of code that puts it last. Swap the character to
294 the front, so as not to uglify any place else. */
295 p
= strchr (constraint
, '=');
297 p
= strchr (constraint
, '+');
299 /* If the string doesn't contain an `=', issue an error
303 error ("output operand constraint lacks %<=%>");
307 /* If the constraint begins with `+', then the operand is both read
308 from and written to. */
309 *is_inout
= (*p
== '+');
311 /* Canonicalize the output constraint so that it begins with `='. */
312 if (p
!= constraint
|| *is_inout
)
315 size_t c_len
= strlen (constraint
);
318 warning (0, "output constraint %qc for operand %d "
319 "is not at the beginning",
322 /* Make a copy of the constraint. */
323 buf
= XALLOCAVEC (char, c_len
+ 1);
324 strcpy (buf
, constraint
);
325 /* Swap the first character and the `=' or `+'. */
326 buf
[p
- constraint
] = buf
[0];
327 /* Make sure the first character is an `='. (Until we do this,
328 it might be a `+'.) */
330 /* Replace the constraint with the canonicalized string. */
331 *constraint_p
= ggc_alloc_string (buf
, c_len
);
332 constraint
= *constraint_p
;
335 /* Loop through the constraint string. */
336 for (p
= constraint
+ 1; *p
; p
+= CONSTRAINT_LEN (*p
, p
))
341 error ("operand constraint contains incorrectly positioned "
346 if (operand_num
+ 1 == ninputs
+ noutputs
)
348 error ("%<%%%> constraint used with last operand");
353 case 'V': case TARGET_MEM_CONSTRAINT
: case 'o':
357 case '?': case '!': case '*': case '&': case '#':
358 case 'E': case 'F': case 'G': case 'H':
359 case 's': case 'i': case 'n':
360 case 'I': case 'J': case 'K': case 'L': case 'M':
361 case 'N': case 'O': case 'P': case ',':
364 case '0': case '1': case '2': case '3': case '4':
365 case '5': case '6': case '7': case '8': case '9':
367 error ("matching constraint not valid in output operand");
371 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
372 excepting those that expand_call created. So match memory
389 if (REG_CLASS_FROM_CONSTRAINT (*p
, p
) != NO_REGS
)
391 #ifdef EXTRA_CONSTRAINT_STR
392 else if (EXTRA_ADDRESS_CONSTRAINT (*p
, p
))
394 else if (EXTRA_MEMORY_CONSTRAINT (*p
, p
))
398 /* Otherwise we can't assume anything about the nature of
399 the constraint except that it isn't purely registers.
400 Treat it like "g" and hope for the best. */
411 /* Similar, but for input constraints. */
414 parse_input_constraint (const char **constraint_p
, int input_num
,
415 int ninputs
, int noutputs
, int ninout
,
416 const char * const * constraints
,
417 bool *allows_mem
, bool *allows_reg
)
419 const char *constraint
= *constraint_p
;
420 const char *orig_constraint
= constraint
;
421 size_t c_len
= strlen (constraint
);
423 bool saw_match
= false;
425 /* Assume the constraint doesn't allow the use of either
426 a register or memory. */
430 /* Make sure constraint has neither `=', `+', nor '&'. */
432 for (j
= 0; j
< c_len
; j
+= CONSTRAINT_LEN (constraint
[j
], constraint
+j
))
433 switch (constraint
[j
])
435 case '+': case '=': case '&':
436 if (constraint
== orig_constraint
)
438 error ("input operand constraint contains %qc", constraint
[j
]);
444 if (constraint
== orig_constraint
445 && input_num
+ 1 == ninputs
- ninout
)
447 error ("%<%%%> constraint used with last operand");
452 case 'V': case TARGET_MEM_CONSTRAINT
: case 'o':
457 case '?': case '!': case '*': case '#':
458 case 'E': case 'F': case 'G': case 'H':
459 case 's': case 'i': case 'n':
460 case 'I': case 'J': case 'K': case 'L': case 'M':
461 case 'N': case 'O': case 'P': case ',':
464 /* Whether or not a numeric constraint allows a register is
465 decided by the matching constraint, and so there is no need
466 to do anything special with them. We must handle them in
467 the default case, so that we don't unnecessarily force
468 operands to memory. */
469 case '0': case '1': case '2': case '3': case '4':
470 case '5': case '6': case '7': case '8': case '9':
477 match
= strtoul (constraint
+ j
, &end
, 10);
478 if (match
>= (unsigned long) noutputs
)
480 error ("matching constraint references invalid operand number");
484 /* Try and find the real constraint for this dup. Only do this
485 if the matching constraint is the only alternative. */
487 && (j
== 0 || (j
== 1 && constraint
[0] == '%')))
489 constraint
= constraints
[match
];
490 *constraint_p
= constraint
;
491 c_len
= strlen (constraint
);
493 /* ??? At the end of the loop, we will skip the first part of
494 the matched constraint. This assumes not only that the
495 other constraint is an output constraint, but also that
496 the '=' or '+' come first. */
500 j
= end
- constraint
;
501 /* Anticipate increment at end of loop. */
516 if (! ISALPHA (constraint
[j
]))
518 error ("invalid punctuation %qc in constraint", constraint
[j
]);
521 if (REG_CLASS_FROM_CONSTRAINT (constraint
[j
], constraint
+ j
)
524 #ifdef EXTRA_CONSTRAINT_STR
525 else if (EXTRA_ADDRESS_CONSTRAINT (constraint
[j
], constraint
+ j
))
527 else if (EXTRA_MEMORY_CONSTRAINT (constraint
[j
], constraint
+ j
))
531 /* Otherwise we can't assume anything about the nature of
532 the constraint except that it isn't purely registers.
533 Treat it like "g" and hope for the best. */
541 if (saw_match
&& !*allows_reg
)
542 warning (0, "matching constraint does not allow a register");
547 /* Return DECL iff there's an overlap between *REGS and DECL, where DECL
548 can be an asm-declared register. Called via walk_tree. */
551 decl_overlaps_hard_reg_set_p (tree
*declp
, int *walk_subtrees ATTRIBUTE_UNUSED
,
555 const HARD_REG_SET
*const regs
= (const HARD_REG_SET
*) data
;
557 if (TREE_CODE (decl
) == VAR_DECL
)
559 if (DECL_HARD_REGISTER (decl
)
560 && REG_P (DECL_RTL (decl
))
561 && REGNO (DECL_RTL (decl
)) < FIRST_PSEUDO_REGISTER
)
563 rtx reg
= DECL_RTL (decl
);
565 if (overlaps_hard_reg_set_p (*regs
, GET_MODE (reg
), REGNO (reg
)))
570 else if (TYPE_P (decl
) || TREE_CODE (decl
) == PARM_DECL
)
575 /* If there is an overlap between *REGS and DECL, return the first overlap
578 tree_overlaps_hard_reg_set (tree decl
, HARD_REG_SET
*regs
)
580 return walk_tree (&decl
, decl_overlaps_hard_reg_set_p
, regs
, NULL
);
583 /* Check for overlap between registers marked in CLOBBERED_REGS and
584 anything inappropriate in T. Emit error and return the register
585 variable definition for error, NULL_TREE for ok. */
588 tree_conflicts_with_clobbers_p (tree t
, HARD_REG_SET
*clobbered_regs
)
590 /* Conflicts between asm-declared register variables and the clobber
591 list are not allowed. */
592 tree overlap
= tree_overlaps_hard_reg_set (t
, clobbered_regs
);
596 error ("asm-specifier for variable %qE conflicts with asm clobber list",
597 DECL_NAME (overlap
));
599 /* Reset registerness to stop multiple errors emitted for a single
601 DECL_REGISTER (overlap
) = 0;
608 /* Generate RTL for an asm statement with arguments.
609 STRING is the instruction template.
610 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
611 Each output or input has an expression in the TREE_VALUE and
612 a tree list in TREE_PURPOSE which in turn contains a constraint
613 name in TREE_VALUE (or NULL_TREE) and a constraint string
615 CLOBBERS is a list of STRING_CST nodes each naming a hard register
616 that is clobbered by this insn.
618 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
619 Some elements of OUTPUTS may be replaced with trees representing temporary
620 values. The caller should copy those temporary values to the originally
623 VOL nonzero means the insn is volatile; don't optimize it. */
626 expand_asm_operands (tree string
, tree outputs
, tree inputs
,
627 tree clobbers
, tree labels
, int vol
, location_t locus
)
629 rtvec argvec
, constraintvec
, labelvec
;
631 int ninputs
= list_length (inputs
);
632 int noutputs
= list_length (outputs
);
633 int nlabels
= list_length (labels
);
636 HARD_REG_SET clobbered_regs
;
637 int clobber_conflict_found
= 0;
641 /* Vector of RTX's of evaluated output operands. */
642 rtx
*output_rtx
= XALLOCAVEC (rtx
, noutputs
);
643 int *inout_opnum
= XALLOCAVEC (int, noutputs
);
644 rtx
*real_output_rtx
= XALLOCAVEC (rtx
, noutputs
);
645 enum machine_mode
*inout_mode
= XALLOCAVEC (enum machine_mode
, noutputs
);
646 const char **constraints
= XALLOCAVEC (const char *, noutputs
+ ninputs
);
647 int old_generating_concat_p
= generating_concat_p
;
649 /* An ASM with no outputs needs to be treated as volatile, for now. */
653 if (! check_operand_nalternatives (outputs
, inputs
))
656 string
= resolve_asm_operand_names (string
, outputs
, inputs
, labels
);
658 /* Collect constraints. */
660 for (t
= outputs
; t
; t
= TREE_CHAIN (t
), i
++)
661 constraints
[i
] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t
)));
662 for (t
= inputs
; t
; t
= TREE_CHAIN (t
), i
++)
663 constraints
[i
] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t
)));
665 /* Sometimes we wish to automatically clobber registers across an asm.
666 Case in point is when the i386 backend moved from cc0 to a hard reg --
667 maintaining source-level compatibility means automatically clobbering
668 the flags register. */
669 clobbers
= targetm
.md_asm_clobbers (outputs
, inputs
, clobbers
);
671 /* Count the number of meaningful clobbered registers, ignoring what
672 we would ignore later. */
674 CLEAR_HARD_REG_SET (clobbered_regs
);
675 for (tail
= clobbers
; tail
; tail
= TREE_CHAIN (tail
))
680 if (TREE_VALUE (tail
) == error_mark_node
)
682 regname
= TREE_STRING_POINTER (TREE_VALUE (tail
));
684 i
= decode_reg_name_and_count (regname
, &nregs
);
688 error ("unknown register name %qs in %<asm%>", regname
);
690 /* Mark clobbered registers. */
695 for (reg
= i
; reg
< i
+ nregs
; reg
++)
699 /* Clobbering the PIC register is an error. */
700 if (reg
== (int) PIC_OFFSET_TABLE_REGNUM
)
702 error ("PIC register clobbered by %qs in %<asm%>", regname
);
706 SET_HARD_REG_BIT (clobbered_regs
, reg
);
711 /* First pass over inputs and outputs checks validity and sets
712 mark_addressable if needed. */
715 for (i
= 0, tail
= outputs
; tail
; tail
= TREE_CHAIN (tail
), i
++)
717 tree val
= TREE_VALUE (tail
);
718 tree type
= TREE_TYPE (val
);
719 const char *constraint
;
724 /* If there's an erroneous arg, emit no insn. */
725 if (type
== error_mark_node
)
728 /* Try to parse the output constraint. If that fails, there's
729 no point in going further. */
730 constraint
= constraints
[i
];
731 if (!parse_output_constraint (&constraint
, i
, ninputs
, noutputs
,
732 &allows_mem
, &allows_reg
, &is_inout
))
739 && REG_P (DECL_RTL (val
))
740 && GET_MODE (DECL_RTL (val
)) != TYPE_MODE (type
))))
741 mark_addressable (val
);
748 if (ninputs
+ noutputs
> MAX_RECOG_OPERANDS
)
750 error ("more than %d operands in %<asm%>", MAX_RECOG_OPERANDS
);
754 for (i
= 0, tail
= inputs
; tail
; i
++, tail
= TREE_CHAIN (tail
))
756 bool allows_reg
, allows_mem
;
757 const char *constraint
;
759 /* If there's an erroneous arg, emit no insn, because the ASM_INPUT
760 would get VOIDmode and that could cause a crash in reload. */
761 if (TREE_TYPE (TREE_VALUE (tail
)) == error_mark_node
)
764 constraint
= constraints
[i
+ noutputs
];
765 if (! parse_input_constraint (&constraint
, i
, ninputs
, noutputs
, ninout
,
766 constraints
, &allows_mem
, &allows_reg
))
769 if (! allows_reg
&& allows_mem
)
770 mark_addressable (TREE_VALUE (tail
));
773 /* Second pass evaluates arguments. */
775 /* Make sure stack is consistent for asm goto. */
777 do_pending_stack_adjust ();
780 for (i
= 0, tail
= outputs
; tail
; tail
= TREE_CHAIN (tail
), i
++)
782 tree val
= TREE_VALUE (tail
);
783 tree type
= TREE_TYPE (val
);
790 ok
= parse_output_constraint (&constraints
[i
], i
, ninputs
,
791 noutputs
, &allows_mem
, &allows_reg
,
795 /* If an output operand is not a decl or indirect ref and our constraint
796 allows a register, make a temporary to act as an intermediate.
797 Make the asm insn write into that, then our caller will copy it to
798 the real output operand. Likewise for promoted variables. */
800 generating_concat_p
= 0;
802 real_output_rtx
[i
] = NULL_RTX
;
803 if ((TREE_CODE (val
) == INDIRECT_REF
806 && (allows_mem
|| REG_P (DECL_RTL (val
)))
807 && ! (REG_P (DECL_RTL (val
))
808 && GET_MODE (DECL_RTL (val
)) != TYPE_MODE (type
)))
812 op
= expand_expr (val
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
814 op
= validize_mem (op
);
816 if (! allows_reg
&& !MEM_P (op
))
817 error ("output number %d not directly addressable", i
);
818 if ((! allows_mem
&& MEM_P (op
))
819 || GET_CODE (op
) == CONCAT
)
821 real_output_rtx
[i
] = op
;
822 op
= gen_reg_rtx (GET_MODE (op
));
824 emit_move_insn (op
, real_output_rtx
[i
]);
829 op
= assign_temp (type
, 0, 1);
830 op
= validize_mem (op
);
831 if (!MEM_P (op
) && TREE_CODE (TREE_VALUE (tail
)) == SSA_NAME
)
832 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (TREE_VALUE (tail
)), op
);
833 TREE_VALUE (tail
) = make_tree (type
, op
);
837 generating_concat_p
= old_generating_concat_p
;
841 inout_mode
[ninout
] = TYPE_MODE (type
);
842 inout_opnum
[ninout
++] = i
;
845 if (tree_conflicts_with_clobbers_p (val
, &clobbered_regs
))
846 clobber_conflict_found
= 1;
849 /* Make vectors for the expression-rtx, constraint strings,
850 and named operands. */
852 argvec
= rtvec_alloc (ninputs
);
853 constraintvec
= rtvec_alloc (ninputs
);
854 labelvec
= rtvec_alloc (nlabels
);
856 body
= gen_rtx_ASM_OPERANDS ((noutputs
== 0 ? VOIDmode
857 : GET_MODE (output_rtx
[0])),
858 ggc_strdup (TREE_STRING_POINTER (string
)),
859 empty_string
, 0, argvec
, constraintvec
,
862 MEM_VOLATILE_P (body
) = vol
;
864 /* Eval the inputs and put them into ARGVEC.
865 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
867 for (i
= 0, tail
= inputs
; tail
; tail
= TREE_CHAIN (tail
), ++i
)
869 bool allows_reg
, allows_mem
;
870 const char *constraint
;
875 constraint
= constraints
[i
+ noutputs
];
876 ok
= parse_input_constraint (&constraint
, i
, ninputs
, noutputs
, ninout
,
877 constraints
, &allows_mem
, &allows_reg
);
880 generating_concat_p
= 0;
882 val
= TREE_VALUE (tail
);
883 type
= TREE_TYPE (val
);
884 /* EXPAND_INITIALIZER will not generate code for valid initializer
885 constants, but will still generate code for other types of operand.
886 This is the behavior we want for constant constraints. */
887 op
= expand_expr (val
, NULL_RTX
, VOIDmode
,
888 allows_reg
? EXPAND_NORMAL
889 : allows_mem
? EXPAND_MEMORY
890 : EXPAND_INITIALIZER
);
892 /* Never pass a CONCAT to an ASM. */
893 if (GET_CODE (op
) == CONCAT
)
894 op
= force_reg (GET_MODE (op
), op
);
896 op
= validize_mem (op
);
898 if (asm_operand_ok (op
, constraint
, NULL
) <= 0)
900 if (allows_reg
&& TYPE_MODE (type
) != BLKmode
)
901 op
= force_reg (TYPE_MODE (type
), op
);
902 else if (!allows_mem
)
903 warning (0, "asm operand %d probably doesn%'t match constraints",
907 /* We won't recognize either volatile memory or memory
908 with a queued address as available a memory_operand
909 at this point. Ignore it: clearly this *is* a memory. */
915 generating_concat_p
= old_generating_concat_p
;
916 ASM_OPERANDS_INPUT (body
, i
) = op
;
918 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body
, i
)
919 = gen_rtx_ASM_INPUT (TYPE_MODE (type
),
920 ggc_strdup (constraints
[i
+ noutputs
]));
922 if (tree_conflicts_with_clobbers_p (val
, &clobbered_regs
))
923 clobber_conflict_found
= 1;
926 /* Protect all the operands from the queue now that they have all been
929 generating_concat_p
= 0;
931 /* For in-out operands, copy output rtx to input rtx. */
932 for (i
= 0; i
< ninout
; i
++)
934 int j
= inout_opnum
[i
];
937 ASM_OPERANDS_INPUT (body
, ninputs
- ninout
+ i
)
940 sprintf (buffer
, "%d", j
);
941 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body
, ninputs
- ninout
+ i
)
942 = gen_rtx_ASM_INPUT (inout_mode
[i
], ggc_strdup (buffer
));
945 /* Copy labels to the vector. */
946 for (i
= 0, tail
= labels
; i
< nlabels
; ++i
, tail
= TREE_CHAIN (tail
))
947 ASM_OPERANDS_LABEL (body
, i
)
948 = gen_rtx_LABEL_REF (Pmode
, label_rtx (TREE_VALUE (tail
)));
950 generating_concat_p
= old_generating_concat_p
;
952 /* Now, for each output, construct an rtx
953 (set OUTPUT (asm_operands INSN OUTPUTCONSTRAINT OUTPUTNUMBER
954 ARGVEC CONSTRAINTS OPNAMES))
955 If there is more than one, put them inside a PARALLEL. */
957 if (nlabels
> 0 && nclobbers
== 0)
959 gcc_assert (noutputs
== 0);
960 emit_jump_insn (body
);
962 else if (noutputs
== 0 && nclobbers
== 0)
964 /* No output operands: put in a raw ASM_OPERANDS rtx. */
967 else if (noutputs
== 1 && nclobbers
== 0)
969 ASM_OPERANDS_OUTPUT_CONSTRAINT (body
) = ggc_strdup (constraints
[0]);
970 emit_insn (gen_rtx_SET (VOIDmode
, output_rtx
[0], body
));
980 body
= gen_rtx_PARALLEL (VOIDmode
, rtvec_alloc (num
+ nclobbers
));
982 /* For each output operand, store a SET. */
983 for (i
= 0, tail
= outputs
; tail
; tail
= TREE_CHAIN (tail
), i
++)
986 = gen_rtx_SET (VOIDmode
,
989 (GET_MODE (output_rtx
[i
]),
990 ggc_strdup (TREE_STRING_POINTER (string
)),
991 ggc_strdup (constraints
[i
]),
992 i
, argvec
, constraintvec
, labelvec
, locus
));
994 MEM_VOLATILE_P (SET_SRC (XVECEXP (body
, 0, i
))) = vol
;
997 /* If there are no outputs (but there are some clobbers)
998 store the bare ASM_OPERANDS into the PARALLEL. */
1001 XVECEXP (body
, 0, i
++) = obody
;
1003 /* Store (clobber REG) for each clobbered register specified. */
1005 for (tail
= clobbers
; tail
; tail
= TREE_CHAIN (tail
))
1007 const char *regname
= TREE_STRING_POINTER (TREE_VALUE (tail
));
1009 int j
= decode_reg_name_and_count (regname
, &nregs
);
1014 if (j
== -3) /* `cc', which is not a register */
1017 if (j
== -4) /* `memory', don't cache memory across asm */
1019 XVECEXP (body
, 0, i
++)
1020 = gen_rtx_CLOBBER (VOIDmode
,
1023 gen_rtx_SCRATCH (VOIDmode
)));
1027 /* Ignore unknown register, error already signaled. */
1031 for (reg
= j
; reg
< j
+ nregs
; reg
++)
1033 /* Use QImode since that's guaranteed to clobber just
1035 clobbered_reg
= gen_rtx_REG (QImode
, reg
);
1037 /* Do sanity check for overlap between clobbers and
1038 respectively input and outputs that hasn't been
1039 handled. Such overlap should have been detected and
1041 if (!clobber_conflict_found
)
1045 /* We test the old body (obody) contents to avoid
1046 tripping over the under-construction body. */
1047 for (opno
= 0; opno
< noutputs
; opno
++)
1048 if (reg_overlap_mentioned_p (clobbered_reg
,
1051 ("asm clobber conflict with output operand");
1053 for (opno
= 0; opno
< ninputs
- ninout
; opno
++)
1054 if (reg_overlap_mentioned_p (clobbered_reg
,
1055 ASM_OPERANDS_INPUT (obody
,
1058 ("asm clobber conflict with input operand");
1061 XVECEXP (body
, 0, i
++)
1062 = gen_rtx_CLOBBER (VOIDmode
, clobbered_reg
);
1067 emit_jump_insn (body
);
1072 /* For any outputs that needed reloading into registers, spill them
1073 back to where they belong. */
1074 for (i
= 0; i
< noutputs
; ++i
)
1075 if (real_output_rtx
[i
])
1076 emit_move_insn (real_output_rtx
[i
], output_rtx
[i
]);
1078 crtl
->has_asm_statement
= 1;
1083 expand_asm_stmt (gimple stmt
)
1086 tree outputs
, tail
, t
;
1090 tree str
, out
, in
, cl
, labels
;
1091 location_t locus
= gimple_location (stmt
);
1093 /* Meh... convert the gimple asm operands into real tree lists.
1094 Eventually we should make all routines work on the vectors instead
1095 of relying on TREE_CHAIN. */
1097 n
= gimple_asm_noutputs (stmt
);
1100 t
= out
= gimple_asm_output_op (stmt
, 0);
1101 for (i
= 1; i
< n
; i
++)
1102 t
= TREE_CHAIN (t
) = gimple_asm_output_op (stmt
, i
);
1106 n
= gimple_asm_ninputs (stmt
);
1109 t
= in
= gimple_asm_input_op (stmt
, 0);
1110 for (i
= 1; i
< n
; i
++)
1111 t
= TREE_CHAIN (t
) = gimple_asm_input_op (stmt
, i
);
1115 n
= gimple_asm_nclobbers (stmt
);
1118 t
= cl
= gimple_asm_clobber_op (stmt
, 0);
1119 for (i
= 1; i
< n
; i
++)
1120 t
= TREE_CHAIN (t
) = gimple_asm_clobber_op (stmt
, i
);
1124 n
= gimple_asm_nlabels (stmt
);
1127 t
= labels
= gimple_asm_label_op (stmt
, 0);
1128 for (i
= 1; i
< n
; i
++)
1129 t
= TREE_CHAIN (t
) = gimple_asm_label_op (stmt
, i
);
1132 s
= gimple_asm_string (stmt
);
1133 str
= build_string (strlen (s
), s
);
1135 if (gimple_asm_input_p (stmt
))
1137 expand_asm_loc (str
, gimple_asm_volatile_p (stmt
), locus
);
1142 noutputs
= gimple_asm_noutputs (stmt
);
1143 /* o[I] is the place that output number I should be written. */
1144 o
= (tree
*) alloca (noutputs
* sizeof (tree
));
1146 /* Record the contents of OUTPUTS before it is modified. */
1147 for (i
= 0, tail
= outputs
; tail
; tail
= TREE_CHAIN (tail
), i
++)
1148 o
[i
] = TREE_VALUE (tail
);
1150 /* Generate the ASM_OPERANDS insn; store into the TREE_VALUEs of
1151 OUTPUTS some trees for where the values were actually stored. */
1152 expand_asm_operands (str
, outputs
, in
, cl
, labels
,
1153 gimple_asm_volatile_p (stmt
), locus
);
1155 /* Copy all the intermediate outputs into the specified outputs. */
1156 for (i
= 0, tail
= outputs
; tail
; tail
= TREE_CHAIN (tail
), i
++)
1158 if (o
[i
] != TREE_VALUE (tail
))
1160 expand_assignment (o
[i
], TREE_VALUE (tail
), false);
1163 /* Restore the original value so that it's correct the next
1164 time we expand this function. */
1165 TREE_VALUE (tail
) = o
[i
];
1170 /* A subroutine of expand_asm_operands. Check that all operands have
1171 the same number of alternatives. Return true if so. */
1174 check_operand_nalternatives (tree outputs
, tree inputs
)
1176 if (outputs
|| inputs
)
1178 tree tmp
= TREE_PURPOSE (outputs
? outputs
: inputs
);
1180 = n_occurrences (',', TREE_STRING_POINTER (TREE_VALUE (tmp
)));
1183 if (nalternatives
+ 1 > MAX_RECOG_ALTERNATIVES
)
1185 error ("too many alternatives in %<asm%>");
1192 const char *constraint
1193 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (tmp
)));
1195 if (n_occurrences (',', constraint
) != nalternatives
)
1197 error ("operand constraints for %<asm%> differ "
1198 "in number of alternatives");
1202 if (TREE_CHAIN (tmp
))
1203 tmp
= TREE_CHAIN (tmp
);
1205 tmp
= next
, next
= 0;
1212 /* A subroutine of expand_asm_operands. Check that all operand names
1213 are unique. Return true if so. We rely on the fact that these names
1214 are identifiers, and so have been canonicalized by get_identifier,
1215 so all we need are pointer comparisons. */
1218 check_unique_operand_names (tree outputs
, tree inputs
, tree labels
)
1220 tree i
, j
, i_name
= NULL_TREE
;
1222 for (i
= outputs
; i
; i
= TREE_CHAIN (i
))
1224 i_name
= TREE_PURPOSE (TREE_PURPOSE (i
));
1228 for (j
= TREE_CHAIN (i
); j
; j
= TREE_CHAIN (j
))
1229 if (simple_cst_equal (i_name
, TREE_PURPOSE (TREE_PURPOSE (j
))))
1233 for (i
= inputs
; i
; i
= TREE_CHAIN (i
))
1235 i_name
= TREE_PURPOSE (TREE_PURPOSE (i
));
1239 for (j
= TREE_CHAIN (i
); j
; j
= TREE_CHAIN (j
))
1240 if (simple_cst_equal (i_name
, TREE_PURPOSE (TREE_PURPOSE (j
))))
1242 for (j
= outputs
; j
; j
= TREE_CHAIN (j
))
1243 if (simple_cst_equal (i_name
, TREE_PURPOSE (TREE_PURPOSE (j
))))
1247 for (i
= labels
; i
; i
= TREE_CHAIN (i
))
1249 i_name
= TREE_PURPOSE (i
);
1253 for (j
= TREE_CHAIN (i
); j
; j
= TREE_CHAIN (j
))
1254 if (simple_cst_equal (i_name
, TREE_PURPOSE (j
)))
1256 for (j
= inputs
; j
; j
= TREE_CHAIN (j
))
1257 if (simple_cst_equal (i_name
, TREE_PURPOSE (TREE_PURPOSE (j
))))
1264 error ("duplicate asm operand name %qs", TREE_STRING_POINTER (i_name
));
1268 /* A subroutine of expand_asm_operands. Resolve the names of the operands
1269 in *POUTPUTS and *PINPUTS to numbers, and replace the name expansions in
1270 STRING and in the constraints to those numbers. */
1273 resolve_asm_operand_names (tree string
, tree outputs
, tree inputs
, tree labels
)
1280 check_unique_operand_names (outputs
, inputs
, labels
);
1282 /* Substitute [<name>] in input constraint strings. There should be no
1283 named operands in output constraints. */
1284 for (t
= inputs
; t
; t
= TREE_CHAIN (t
))
1286 c
= TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t
)));
1287 if (strchr (c
, '[') != NULL
)
1289 p
= buffer
= xstrdup (c
);
1290 while ((p
= strchr (p
, '[')) != NULL
)
1291 p
= resolve_operand_name_1 (p
, outputs
, inputs
, NULL
);
1292 TREE_VALUE (TREE_PURPOSE (t
))
1293 = build_string (strlen (buffer
), buffer
);
1298 /* Now check for any needed substitutions in the template. */
1299 c
= TREE_STRING_POINTER (string
);
1300 while ((c
= strchr (c
, '%')) != NULL
)
1304 else if (ISALPHA (c
[1]) && c
[2] == '[')
1308 c
+= 1 + (c
[1] == '%');
1315 /* OK, we need to make a copy so we can perform the substitutions.
1316 Assume that we will not need extra space--we get to remove '['
1317 and ']', which means we cannot have a problem until we have more
1318 than 999 operands. */
1319 buffer
= xstrdup (TREE_STRING_POINTER (string
));
1320 p
= buffer
+ (c
- TREE_STRING_POINTER (string
));
1322 while ((p
= strchr (p
, '%')) != NULL
)
1326 else if (ISALPHA (p
[1]) && p
[2] == '[')
1330 p
+= 1 + (p
[1] == '%');
1334 p
= resolve_operand_name_1 (p
, outputs
, inputs
, labels
);
1337 string
= build_string (strlen (buffer
), buffer
);
1344 /* A subroutine of resolve_operand_names. P points to the '[' for a
1345 potential named operand of the form [<name>]. In place, replace
1346 the name and brackets with a number. Return a pointer to the
1347 balance of the string after substitution. */
1350 resolve_operand_name_1 (char *p
, tree outputs
, tree inputs
, tree labels
)
1356 /* Collect the operand name. */
1357 q
= strchr (++p
, ']');
1360 error ("missing close brace for named operand");
1361 return strchr (p
, '\0');
1365 /* Resolve the name to a number. */
1366 for (op
= 0, t
= outputs
; t
; t
= TREE_CHAIN (t
), op
++)
1368 tree name
= TREE_PURPOSE (TREE_PURPOSE (t
));
1369 if (name
&& strcmp (TREE_STRING_POINTER (name
), p
) == 0)
1372 for (t
= inputs
; t
; t
= TREE_CHAIN (t
), op
++)
1374 tree name
= TREE_PURPOSE (TREE_PURPOSE (t
));
1375 if (name
&& strcmp (TREE_STRING_POINTER (name
), p
) == 0)
1378 for (t
= labels
; t
; t
= TREE_CHAIN (t
), op
++)
1380 tree name
= TREE_PURPOSE (t
);
1381 if (name
&& strcmp (TREE_STRING_POINTER (name
), p
) == 0)
1385 error ("undefined named operand %qs", identifier_to_locale (p
));
1389 /* Replace the name with the number. Unfortunately, not all libraries
1390 get the return value of sprintf correct, so search for the end of the
1391 generated string by hand. */
1392 sprintf (--p
, "%d", op
);
1393 p
= strchr (p
, '\0');
1395 /* Verify the no extra buffer space assumption. */
1396 gcc_assert (p
<= q
);
1398 /* Shift the rest of the buffer down to fill the gap. */
1399 memmove (p
, q
+ 1, strlen (q
+ 1) + 1);
1404 /* Generate RTL to return from the current function, with no value.
1405 (That is, we do not do anything about returning any value.) */
1408 expand_null_return (void)
1410 /* If this function was declared to return a value, but we
1411 didn't, clobber the return registers so that they are not
1412 propagated live to the rest of the function. */
1413 clobber_return_register ();
1415 expand_null_return_1 ();
1418 /* Generate RTL to return directly from the current function.
1419 (That is, we bypass any return value.) */
1422 expand_naked_return (void)
1426 clear_pending_stack_adjust ();
1427 do_pending_stack_adjust ();
1429 end_label
= naked_return_label
;
1431 end_label
= naked_return_label
= gen_label_rtx ();
1433 emit_jump (end_label
);
1436 /* Generate RTL to return from the current function, with value VAL. */
1439 expand_value_return (rtx val
)
1441 /* Copy the value to the return location unless it's already there. */
1443 tree decl
= DECL_RESULT (current_function_decl
);
1444 rtx return_reg
= DECL_RTL (decl
);
1445 if (return_reg
!= val
)
1447 tree funtype
= TREE_TYPE (current_function_decl
);
1448 tree type
= TREE_TYPE (decl
);
1449 int unsignedp
= TYPE_UNSIGNED (type
);
1450 enum machine_mode old_mode
= DECL_MODE (decl
);
1451 enum machine_mode mode
;
1452 if (DECL_BY_REFERENCE (decl
))
1453 mode
= promote_function_mode (type
, old_mode
, &unsignedp
, funtype
, 2);
1455 mode
= promote_function_mode (type
, old_mode
, &unsignedp
, funtype
, 1);
1457 if (mode
!= old_mode
)
1458 val
= convert_modes (mode
, old_mode
, val
, unsignedp
);
1460 if (GET_CODE (return_reg
) == PARALLEL
)
1461 emit_group_load (return_reg
, val
, type
, int_size_in_bytes (type
));
1463 emit_move_insn (return_reg
, val
);
1466 expand_null_return_1 ();
1469 /* Output a return with no value. */
1472 expand_null_return_1 (void)
1474 clear_pending_stack_adjust ();
1475 do_pending_stack_adjust ();
1476 emit_jump (return_label
);
1479 /* Generate RTL to evaluate the expression RETVAL and return it
1480 from the current function. */
1483 expand_return (tree retval
)
1489 /* If function wants no value, give it none. */
1490 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl
))) == VOID_TYPE
)
1492 expand_normal (retval
);
1493 expand_null_return ();
1497 if (retval
== error_mark_node
)
1499 /* Treat this like a return of no value from a function that
1501 expand_null_return ();
1504 else if ((TREE_CODE (retval
) == MODIFY_EXPR
1505 || TREE_CODE (retval
) == INIT_EXPR
)
1506 && TREE_CODE (TREE_OPERAND (retval
, 0)) == RESULT_DECL
)
1507 retval_rhs
= TREE_OPERAND (retval
, 1);
1509 retval_rhs
= retval
;
1511 result_rtl
= DECL_RTL (DECL_RESULT (current_function_decl
));
1513 /* If we are returning the RESULT_DECL, then the value has already
1514 been stored into it, so we don't have to do anything special. */
1515 if (TREE_CODE (retval_rhs
) == RESULT_DECL
)
1516 expand_value_return (result_rtl
);
1518 /* If the result is an aggregate that is being returned in one (or more)
1519 registers, load the registers here. */
1521 else if (retval_rhs
!= 0
1522 && TYPE_MODE (TREE_TYPE (retval_rhs
)) == BLKmode
1523 && REG_P (result_rtl
))
1525 val
= copy_blkmode_to_reg (GET_MODE (result_rtl
), retval_rhs
);
1528 /* Use the mode of the result value on the return register. */
1529 PUT_MODE (result_rtl
, GET_MODE (val
));
1530 expand_value_return (val
);
1533 expand_null_return ();
1535 else if (retval_rhs
!= 0
1536 && !VOID_TYPE_P (TREE_TYPE (retval_rhs
))
1537 && (REG_P (result_rtl
)
1538 || (GET_CODE (result_rtl
) == PARALLEL
)))
1540 /* Calculate the return value into a temporary (usually a pseudo
1542 tree ot
= TREE_TYPE (DECL_RESULT (current_function_decl
));
1543 tree nt
= build_qualified_type (ot
, TYPE_QUALS (ot
) | TYPE_QUAL_CONST
);
1545 val
= assign_temp (nt
, 0, 1);
1546 val
= expand_expr (retval_rhs
, val
, GET_MODE (val
), EXPAND_NORMAL
);
1547 val
= force_not_mem (val
);
1548 /* Return the calculated value. */
1549 expand_value_return (val
);
1553 /* No hard reg used; calculate value into hard return reg. */
1554 expand_expr (retval
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
1555 expand_value_return (result_rtl
);
1559 /* Emit code to restore vital registers at the beginning of a nonlocal goto
1562 expand_nl_goto_receiver (void)
1566 /* Clobber the FP when we get here, so we have to make sure it's
1567 marked as used by this function. */
1568 emit_use (hard_frame_pointer_rtx
);
1570 /* Mark the static chain as clobbered here so life information
1571 doesn't get messed up for it. */
1572 chain
= targetm
.calls
.static_chain (current_function_decl
, true);
1573 if (chain
&& REG_P (chain
))
1574 emit_clobber (chain
);
1576 #ifdef HAVE_nonlocal_goto
1577 if (! HAVE_nonlocal_goto
)
1579 /* First adjust our frame pointer to its actual value. It was
1580 previously set to the start of the virtual area corresponding to
1581 the stacked variables when we branched here and now needs to be
1582 adjusted to the actual hardware fp value.
1584 Assignments are to virtual registers are converted by
1585 instantiate_virtual_regs into the corresponding assignment
1586 to the underlying register (fp in this case) that makes
1587 the original assignment true.
1588 So the following insn will actually be
1589 decrementing fp by STARTING_FRAME_OFFSET. */
1590 emit_move_insn (virtual_stack_vars_rtx
, hard_frame_pointer_rtx
);
1592 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
1593 if (fixed_regs
[ARG_POINTER_REGNUM
])
1595 #ifdef ELIMINABLE_REGS
1596 /* If the argument pointer can be eliminated in favor of the
1597 frame pointer, we don't need to restore it. We assume here
1598 that if such an elimination is present, it can always be used.
1599 This is the case on all known machines; if we don't make this
1600 assumption, we do unnecessary saving on many machines. */
1601 static const struct elims
{const int from
, to
;} elim_regs
[] = ELIMINABLE_REGS
;
1604 for (i
= 0; i
< ARRAY_SIZE (elim_regs
); i
++)
1605 if (elim_regs
[i
].from
== ARG_POINTER_REGNUM
1606 && elim_regs
[i
].to
== HARD_FRAME_POINTER_REGNUM
)
1609 if (i
== ARRAY_SIZE (elim_regs
))
1612 /* Now restore our arg pointer from the address at which it
1613 was saved in our stack frame. */
1614 emit_move_insn (crtl
->args
.internal_arg_pointer
,
1615 copy_to_reg (get_arg_pointer_save_area ()));
1620 #ifdef HAVE_nonlocal_goto_receiver
1621 if (HAVE_nonlocal_goto_receiver
)
1622 emit_insn (gen_nonlocal_goto_receiver ());
1625 /* We must not allow the code we just generated to be reordered by
1626 scheduling. Specifically, the update of the frame pointer must
1627 happen immediately, not later. */
1628 emit_insn (gen_blockage ());
1631 /* Emit code to save the current value of stack. */
1633 expand_stack_save (void)
1637 do_pending_stack_adjust ();
1638 emit_stack_save (SAVE_BLOCK
, &ret
);
1642 /* Emit code to restore the current value of stack. */
1644 expand_stack_restore (tree var
)
1646 rtx prev
, sa
= expand_normal (var
);
1648 sa
= convert_memory_address (Pmode
, sa
);
1650 prev
= get_last_insn ();
1651 emit_stack_restore (SAVE_BLOCK
, sa
);
1652 fixup_args_size_notes (prev
, get_last_insn (), 0);
1655 /* Generate code to jump to LABEL if OP0 and OP1 are equal in mode MODE. PROB
1656 is the probability of jumping to LABEL. */
1658 do_jump_if_equal (enum machine_mode mode
, rtx op0
, rtx op1
, rtx label
,
1659 int unsignedp
, int prob
)
1661 gcc_assert (prob
<= REG_BR_PROB_BASE
);
1662 do_compare_rtx_and_jump (op0
, op1
, EQ
, unsignedp
, mode
,
1663 NULL_RTX
, NULL_RTX
, label
, prob
);
1666 /* Do the insertion of a case label into case_list. The labels are
1667 fed to us in descending order from the sorted vector of case labels used
1668 in the tree part of the middle end. So the list we construct is
1669 sorted in ascending order.
1671 LABEL is the case label to be inserted. LOW and HIGH are the bounds
1672 against which the index is compared to jump to LABEL and PROB is the
1673 estimated probability LABEL is reached from the switch statement. */
1675 static struct case_node
*
1676 add_case_node (struct case_node
*head
, tree low
, tree high
,
1677 tree label
, int prob
, alloc_pool case_node_pool
)
1679 struct case_node
*r
;
1681 gcc_checking_assert (low
);
1682 gcc_checking_assert (high
&& (TREE_TYPE (low
) == TREE_TYPE (high
)));
1684 /* Add this label to the chain. */
1685 r
= (struct case_node
*) pool_alloc (case_node_pool
);
1688 r
->code_label
= label
;
1689 r
->parent
= r
->left
= NULL
;
1691 r
->subtree_prob
= prob
;
1696 /* Dump ROOT, a list or tree of case nodes, to file. */
1699 dump_case_nodes (FILE *f
, struct case_node
*root
,
1700 int indent_step
, int indent_level
)
1702 HOST_WIDE_INT low
, high
;
1708 dump_case_nodes (f
, root
->left
, indent_step
, indent_level
);
1710 low
= tree_low_cst (root
->low
, 0);
1711 high
= tree_low_cst (root
->high
, 0);
1715 fprintf(f
, "%*s" HOST_WIDE_INT_PRINT_DEC
,
1716 indent_step
* indent_level
, "", low
);
1718 fprintf(f
, "%*s" HOST_WIDE_INT_PRINT_DEC
" ... " HOST_WIDE_INT_PRINT_DEC
,
1719 indent_step
* indent_level
, "", low
, high
);
1722 dump_case_nodes (f
, root
->right
, indent_step
, indent_level
);
1726 #define HAVE_casesi 0
1729 #ifndef HAVE_tablejump
1730 #define HAVE_tablejump 0
1733 /* Return the smallest number of different values for which it is best to use a
1734 jump-table instead of a tree of conditional branches. */
1737 case_values_threshold (void)
1739 unsigned int threshold
= PARAM_VALUE (PARAM_CASE_VALUES_THRESHOLD
);
1742 threshold
= targetm
.case_values_threshold ();
1747 /* Return true if a switch should be expanded as a decision tree.
1748 RANGE is the difference between highest and lowest case.
1749 UNIQ is number of unique case node targets, not counting the default case.
1750 COUNT is the number of comparisons needed, not counting the default case. */
1753 expand_switch_as_decision_tree_p (tree range
,
1754 unsigned int uniq ATTRIBUTE_UNUSED
,
1759 /* If neither casesi or tablejump is available, or flag_jump_tables
1760 over-ruled us, we really have no choice. */
1761 if (!HAVE_casesi
&& !HAVE_tablejump
)
1763 if (!flag_jump_tables
)
1766 /* If the switch is relatively small such that the cost of one
1767 indirect jump on the target are higher than the cost of a
1768 decision tree, go with the decision tree.
1770 If range of values is much bigger than number of values,
1771 or if it is too large to represent in a HOST_WIDE_INT,
1772 make a sequence of conditional branches instead of a dispatch.
1774 The definition of "much bigger" depends on whether we are
1775 optimizing for size or for speed. If the former, the maximum
1776 ratio range/count = 3, because this was found to be the optimal
1777 ratio for size on i686-pc-linux-gnu, see PR11823. The ratio
1778 10 is much older, and was probably selected after an extensive
1779 benchmarking investigation on numerous platforms. Or maybe it
1780 just made sense to someone at some point in the history of GCC,
1782 max_ratio
= optimize_insn_for_size_p () ? 3 : 10;
1783 if (count
< case_values_threshold ()
1784 || ! host_integerp (range
, /*pos=*/1)
1785 || compare_tree_int (range
, max_ratio
* count
) > 0)
1791 /* Generate a decision tree, switching on INDEX_EXPR and jumping to
1792 one of the labels in CASE_LIST or to the DEFAULT_LABEL.
1793 DEFAULT_PROB is the estimated probability that it jumps to
1796 We generate a binary decision tree to select the appropriate target
1797 code. This is done as follows:
1799 If the index is a short or char that we do not have
1800 an insn to handle comparisons directly, convert it to
1801 a full integer now, rather than letting each comparison
1802 generate the conversion.
1804 Load the index into a register.
1806 The list of cases is rearranged into a binary tree,
1807 nearly optimal assuming equal probability for each case.
1809 The tree is transformed into RTL, eliminating redundant
1810 test conditions at the same time.
1812 If program flow could reach the end of the decision tree
1813 an unconditional jump to the default code is emitted.
1815 The above process is unaware of the CFG. The caller has to fix up
1816 the CFG itself. This is done in cfgexpand.c. */
1819 emit_case_decision_tree (tree index_expr
, tree index_type
,
1820 struct case_node
*case_list
, rtx default_label
,
1823 rtx index
= expand_normal (index_expr
);
1825 if (GET_MODE_CLASS (GET_MODE (index
)) == MODE_INT
1826 && ! have_insn_for (COMPARE
, GET_MODE (index
)))
1828 int unsignedp
= TYPE_UNSIGNED (index_type
);
1829 enum machine_mode wider_mode
;
1830 for (wider_mode
= GET_MODE (index
); wider_mode
!= VOIDmode
;
1831 wider_mode
= GET_MODE_WIDER_MODE (wider_mode
))
1832 if (have_insn_for (COMPARE
, wider_mode
))
1834 index
= convert_to_mode (wider_mode
, index
, unsignedp
);
1839 do_pending_stack_adjust ();
1843 index
= copy_to_reg (index
);
1844 if (TREE_CODE (index_expr
) == SSA_NAME
)
1845 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (index_expr
), index
);
1848 balance_case_nodes (&case_list
, NULL
);
1850 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1852 int indent_step
= ceil_log2 (TYPE_PRECISION (index_type
)) + 2;
1853 fprintf (dump_file
, ";; Expanding GIMPLE switch as decision tree:\n");
1854 dump_case_nodes (dump_file
, case_list
, indent_step
, 0);
1857 emit_case_nodes (index
, case_list
, default_label
, default_prob
, index_type
);
1859 emit_jump (default_label
);
1862 /* Return the sum of probabilities of outgoing edges of basic block BB. */
1865 get_outgoing_edge_probs (basic_block bb
)
1872 FOR_EACH_EDGE(e
, ei
, bb
->succs
)
1873 prob_sum
+= e
->probability
;
1877 /* Computes the conditional probability of jumping to a target if the branch
1878 instruction is executed.
1879 TARGET_PROB is the estimated probability of jumping to a target relative
1880 to some basic block BB.
1881 BASE_PROB is the probability of reaching the branch instruction relative
1882 to the same basic block BB. */
1885 conditional_probability (int target_prob
, int base_prob
)
1889 gcc_assert (target_prob
>= 0);
1890 gcc_assert (target_prob
<= base_prob
);
1891 return RDIV (target_prob
* REG_BR_PROB_BASE
, base_prob
);
1896 /* Generate a dispatch tabler, switching on INDEX_EXPR and jumping to
1897 one of the labels in CASE_LIST or to the DEFAULT_LABEL.
1898 MINVAL, MAXVAL, and RANGE are the extrema and range of the case
1899 labels in CASE_LIST. STMT_BB is the basic block containing the statement.
1901 First, a jump insn is emitted. First we try "casesi". If that
1902 fails, try "tablejump". A target *must* have one of them (or both).
1904 Then, a table with the target labels is emitted.
1906 The process is unaware of the CFG. The caller has to fix up
1907 the CFG itself. This is done in cfgexpand.c. */
1910 emit_case_dispatch_table (tree index_expr
, tree index_type
,
1911 struct case_node
*case_list
, rtx default_label
,
1912 tree minval
, tree maxval
, tree range
,
1913 basic_block stmt_bb
)
1916 struct case_node
*n
;
1918 rtx fallback_label
= label_rtx (case_list
->code_label
);
1919 rtx table_label
= gen_label_rtx ();
1920 bool has_gaps
= false;
1921 edge default_edge
= stmt_bb
? EDGE_SUCC(stmt_bb
, 0) : NULL
;
1922 int default_prob
= default_edge
? default_edge
->probability
: 0;
1923 int base
= get_outgoing_edge_probs (stmt_bb
);
1924 bool try_with_tablejump
= false;
1926 int new_default_prob
= conditional_probability (default_prob
,
1929 if (! try_casesi (index_type
, index_expr
, minval
, range
,
1930 table_label
, default_label
, fallback_label
,
1933 /* Index jumptables from zero for suitable values of minval to avoid
1934 a subtraction. For the rationale see:
1935 "http://gcc.gnu.org/ml/gcc-patches/2001-10/msg01234.html". */
1936 if (optimize_insn_for_speed_p ()
1937 && compare_tree_int (minval
, 0) > 0
1938 && compare_tree_int (minval
, 3) < 0)
1940 minval
= build_int_cst (index_type
, 0);
1944 try_with_tablejump
= true;
1947 /* Get table of labels to jump to, in order of case index. */
1949 ncases
= tree_low_cst (range
, 0) + 1;
1950 labelvec
= XALLOCAVEC (rtx
, ncases
);
1951 memset (labelvec
, 0, ncases
* sizeof (rtx
));
1953 for (n
= case_list
; n
; n
= n
->right
)
1955 /* Compute the low and high bounds relative to the minimum
1956 value since that should fit in a HOST_WIDE_INT while the
1957 actual values may not. */
1959 = tree_low_cst (fold_build2 (MINUS_EXPR
, index_type
,
1960 n
->low
, minval
), 1);
1961 HOST_WIDE_INT i_high
1962 = tree_low_cst (fold_build2 (MINUS_EXPR
, index_type
,
1963 n
->high
, minval
), 1);
1966 for (i
= i_low
; i
<= i_high
; i
++)
1968 = gen_rtx_LABEL_REF (Pmode
, label_rtx (n
->code_label
));
1971 /* Fill in the gaps with the default. We may have gaps at
1972 the beginning if we tried to avoid the minval subtraction,
1973 so substitute some label even if the default label was
1974 deemed unreachable. */
1976 default_label
= fallback_label
;
1977 for (i
= 0; i
< ncases
; i
++)
1978 if (labelvec
[i
] == 0)
1981 labelvec
[i
] = gen_rtx_LABEL_REF (Pmode
, default_label
);
1986 /* There is at least one entry in the jump table that jumps
1987 to default label. The default label can either be reached
1988 through the indirect jump or the direct conditional jump
1989 before that. Split the probability of reaching the
1990 default label among these two jumps. */
1991 new_default_prob
= conditional_probability (default_prob
/2,
1994 base
-= default_prob
;
1998 base
-= default_prob
;
2003 default_edge
->probability
= default_prob
;
2005 /* We have altered the probability of the default edge. So the probabilities
2006 of all other edges need to be adjusted so that it sums up to
2007 REG_BR_PROB_BASE. */
2012 FOR_EACH_EDGE (e
, ei
, stmt_bb
->succs
)
2013 e
->probability
= RDIV (e
->probability
* REG_BR_PROB_BASE
, base
);
2016 if (try_with_tablejump
)
2018 bool ok
= try_tablejump (index_type
, index_expr
, minval
, range
,
2019 table_label
, default_label
, new_default_prob
);
2022 /* Output the table. */
2023 emit_label (table_label
);
2025 if (CASE_VECTOR_PC_RELATIVE
|| flag_pic
)
2026 emit_jump_insn (gen_rtx_ADDR_DIFF_VEC (CASE_VECTOR_MODE
,
2027 gen_rtx_LABEL_REF (Pmode
, table_label
),
2028 gen_rtvec_v (ncases
, labelvec
),
2029 const0_rtx
, const0_rtx
));
2031 emit_jump_insn (gen_rtx_ADDR_VEC (CASE_VECTOR_MODE
,
2032 gen_rtvec_v (ncases
, labelvec
)));
2034 /* Record no drop-through after the table. */
2038 /* Reset the aux field of all outgoing edges of basic block BB. */
2041 reset_out_edges_aux (basic_block bb
)
2045 FOR_EACH_EDGE(e
, ei
, bb
->succs
)
2049 /* Compute the number of case labels that correspond to each outgoing edge of
2050 STMT. Record this information in the aux field of the edge. */
2053 compute_cases_per_edge (gimple stmt
)
2055 basic_block bb
= gimple_bb (stmt
);
2056 reset_out_edges_aux (bb
);
2057 int ncases
= gimple_switch_num_labels (stmt
);
2058 for (int i
= ncases
- 1; i
>= 1; --i
)
2060 tree elt
= gimple_switch_label (stmt
, i
);
2061 tree lab
= CASE_LABEL (elt
);
2062 basic_block case_bb
= label_to_block_fn (cfun
, lab
);
2063 edge case_edge
= find_edge (bb
, case_bb
);
2064 case_edge
->aux
= (void *)((intptr_t)(case_edge
->aux
) + 1);
2068 /* Terminate a case (Pascal/Ada) or switch (C) statement
2069 in which ORIG_INDEX is the expression to be tested.
2070 If ORIG_TYPE is not NULL, it is the original ORIG_INDEX
2071 type as given in the source before any compiler conversions.
2072 Generate the code to test it and jump to the right place. */
2075 expand_case (gimple stmt
)
2077 tree minval
= NULL_TREE
, maxval
= NULL_TREE
, range
= NULL_TREE
;
2078 rtx default_label
= NULL_RTX
;
2079 unsigned int count
, uniq
;
2081 int ncases
= gimple_switch_num_labels (stmt
);
2082 tree index_expr
= gimple_switch_index (stmt
);
2083 tree index_type
= TREE_TYPE (index_expr
);
2085 basic_block bb
= gimple_bb (stmt
);
2087 /* A list of case labels; it is first built as a list and it may then
2088 be rearranged into a nearly balanced binary tree. */
2089 struct case_node
*case_list
= 0;
2091 /* A pool for case nodes. */
2092 alloc_pool case_node_pool
;
2094 /* An ERROR_MARK occurs for various reasons including invalid data type.
2095 ??? Can this still happen, with GIMPLE and all? */
2096 if (index_type
== error_mark_node
)
2099 /* cleanup_tree_cfg removes all SWITCH_EXPR with their index
2100 expressions being INTEGER_CST. */
2101 gcc_assert (TREE_CODE (index_expr
) != INTEGER_CST
);
2103 case_node_pool
= create_alloc_pool ("struct case_node pool",
2104 sizeof (struct case_node
),
2107 do_pending_stack_adjust ();
2109 /* Find the default case target label. */
2110 default_label
= label_rtx (CASE_LABEL (gimple_switch_default_label (stmt
)));
2111 edge default_edge
= EDGE_SUCC(bb
, 0);
2112 int default_prob
= default_edge
->probability
;
2114 /* Get upper and lower bounds of case values. */
2115 elt
= gimple_switch_label (stmt
, 1);
2116 minval
= fold_convert (index_type
, CASE_LOW (elt
));
2117 elt
= gimple_switch_label (stmt
, ncases
- 1);
2118 if (CASE_HIGH (elt
))
2119 maxval
= fold_convert (index_type
, CASE_HIGH (elt
));
2121 maxval
= fold_convert (index_type
, CASE_LOW (elt
));
2123 /* Compute span of values. */
2124 range
= fold_build2 (MINUS_EXPR
, index_type
, maxval
, minval
);
2126 /* Listify the labels queue and gather some numbers to decide
2127 how to expand this switch(). */
2130 struct pointer_set_t
*seen_labels
= pointer_set_create ();
2131 compute_cases_per_edge (stmt
);
2133 for (i
= ncases
- 1; i
>= 1; --i
)
2135 elt
= gimple_switch_label (stmt
, i
);
2136 tree low
= CASE_LOW (elt
);
2138 tree high
= CASE_HIGH (elt
);
2139 gcc_assert (! high
|| tree_int_cst_lt (low
, high
));
2140 tree lab
= CASE_LABEL (elt
);
2142 /* Count the elements.
2143 A range counts double, since it requires two compares. */
2148 /* If we have not seen this label yet, then increase the
2149 number of unique case node targets seen. */
2150 if (!pointer_set_insert (seen_labels
, lab
))
2153 /* The bounds on the case range, LOW and HIGH, have to be converted
2154 to case's index type TYPE. Note that the original type of the
2155 case index in the source code is usually "lost" during
2156 gimplification due to type promotion, but the case labels retain the
2157 original type. Make sure to drop overflow flags. */
2158 low
= fold_convert (index_type
, low
);
2159 if (TREE_OVERFLOW (low
))
2160 low
= build_int_cst_wide (index_type
,
2161 TREE_INT_CST_LOW (low
),
2162 TREE_INT_CST_HIGH (low
));
2164 /* The canonical from of a case label in GIMPLE is that a simple case
2165 has an empty CASE_HIGH. For the casesi and tablejump expanders,
2166 the back ends want simple cases to have high == low. */
2169 high
= fold_convert (index_type
, high
);
2170 if (TREE_OVERFLOW (high
))
2171 high
= build_int_cst_wide (index_type
,
2172 TREE_INT_CST_LOW (high
),
2173 TREE_INT_CST_HIGH (high
));
2175 basic_block case_bb
= label_to_block_fn (cfun
, lab
);
2176 edge case_edge
= find_edge (bb
, case_bb
);
2177 case_list
= add_case_node (
2178 case_list
, low
, high
, lab
,
2179 case_edge
->probability
/ (intptr_t)(case_edge
->aux
),
2182 pointer_set_destroy (seen_labels
);
2183 reset_out_edges_aux (bb
);
2185 /* cleanup_tree_cfg removes all SWITCH_EXPR with a single
2186 destination, such as one with a default case only.
2187 It also removes cases that are out of range for the switch
2188 type, so we should never get a zero here. */
2189 gcc_assert (count
> 0);
2191 rtx before_case
= get_last_insn ();
2193 /* Decide how to expand this switch.
2194 The two options at this point are a dispatch table (casesi or
2195 tablejump) or a decision tree. */
2197 if (expand_switch_as_decision_tree_p (range
, uniq
, count
))
2198 emit_case_decision_tree (index_expr
, index_type
,
2199 case_list
, default_label
,
2202 emit_case_dispatch_table (index_expr
, index_type
,
2203 case_list
, default_label
,
2204 minval
, maxval
, range
, bb
);
2206 reorder_insns (NEXT_INSN (before_case
), get_last_insn (), before_case
);
2209 free_alloc_pool (case_node_pool
);
2212 /* Expand the dispatch to a short decrement chain if there are few cases
2213 to dispatch to. Likewise if neither casesi nor tablejump is available,
2214 or if flag_jump_tables is set. Otherwise, expand as a casesi or a
2215 tablejump. The index mode is always the mode of integer_type_node.
2216 Trap if no case matches the index.
2218 DISPATCH_INDEX is the index expression to switch on. It should be a
2219 memory or register operand.
2221 DISPATCH_TABLE is a set of case labels. The set should be sorted in
2222 ascending order, be contiguous, starting with value 0, and contain only
2223 single-valued case labels. */
2226 expand_sjlj_dispatch_table (rtx dispatch_index
,
2227 vec
<tree
> dispatch_table
)
2229 tree index_type
= integer_type_node
;
2230 enum machine_mode index_mode
= TYPE_MODE (index_type
);
2232 int ncases
= dispatch_table
.length ();
2234 do_pending_stack_adjust ();
2235 rtx before_case
= get_last_insn ();
2237 /* Expand as a decrement-chain if there are 5 or fewer dispatch
2238 labels. This covers more than 98% of the cases in libjava,
2239 and seems to be a reasonable compromise between the "old way"
2240 of expanding as a decision tree or dispatch table vs. the "new
2241 way" with decrement chain or dispatch table. */
2242 if (dispatch_table
.length () <= 5
2243 || (!HAVE_casesi
&& !HAVE_tablejump
)
2244 || !flag_jump_tables
)
2246 /* Expand the dispatch as a decrement chain:
2248 "switch(index) {case 0: do_0; case 1: do_1; ...; case N: do_N;}"
2252 if (index == 0) do_0; else index--;
2253 if (index == 0) do_1; else index--;
2255 if (index == 0) do_N; else index--;
2257 This is more efficient than a dispatch table on most machines.
2258 The last "index--" is redundant but the code is trivially dead
2259 and will be cleaned up by later passes. */
2260 rtx index
= copy_to_mode_reg (index_mode
, dispatch_index
);
2261 rtx zero
= CONST0_RTX (index_mode
);
2262 for (int i
= 0; i
< ncases
; i
++)
2264 tree elt
= dispatch_table
[i
];
2265 rtx lab
= label_rtx (CASE_LABEL (elt
));
2266 do_jump_if_equal (index_mode
, index
, zero
, lab
, 0, -1);
2267 force_expand_binop (index_mode
, sub_optab
,
2268 index
, CONST1_RTX (index_mode
),
2269 index
, 0, OPTAB_DIRECT
);
2274 /* Similar to expand_case, but much simpler. */
2275 struct case_node
*case_list
= 0;
2276 alloc_pool case_node_pool
= create_alloc_pool ("struct sjlj_case pool",
2277 sizeof (struct case_node
),
2279 tree index_expr
= make_tree (index_type
, dispatch_index
);
2280 tree minval
= build_int_cst (index_type
, 0);
2281 tree maxval
= CASE_LOW (dispatch_table
.last ());
2282 tree range
= maxval
;
2283 rtx default_label
= gen_label_rtx ();
2285 for (int i
= ncases
- 1; i
>= 0; --i
)
2287 tree elt
= dispatch_table
[i
];
2288 tree low
= CASE_LOW (elt
);
2289 tree lab
= CASE_LABEL (elt
);
2290 case_list
= add_case_node (case_list
, low
, low
, lab
, 0, case_node_pool
);
2293 emit_case_dispatch_table (index_expr
, index_type
,
2294 case_list
, default_label
,
2295 minval
, maxval
, range
,
2296 BLOCK_FOR_INSN (before_case
));
2297 emit_label (default_label
);
2298 free_alloc_pool (case_node_pool
);
2301 /* Dispatching something not handled? Trap! */
2302 expand_builtin_trap ();
2304 reorder_insns (NEXT_INSN (before_case
), get_last_insn (), before_case
);
2310 /* Take an ordered list of case nodes
2311 and transform them into a near optimal binary tree,
2312 on the assumption that any target code selection value is as
2313 likely as any other.
2315 The transformation is performed by splitting the ordered
2316 list into two equal sections plus a pivot. The parts are
2317 then attached to the pivot as left and right branches. Each
2318 branch is then transformed recursively. */
2321 balance_case_nodes (case_node_ptr
*head
, case_node_ptr parent
)
2333 /* Count the number of entries on branch. Also count the ranges. */
2337 if (!tree_int_cst_equal (np
->low
, np
->high
))
2346 /* Split this list if it is long enough for that to help. */
2350 /* If there are just three nodes, split at the middle one. */
2352 npp
= &(*npp
)->right
;
2355 /* Find the place in the list that bisects the list's total cost,
2356 where ranges count as 2.
2357 Here I gets half the total cost. */
2358 i
= (i
+ ranges
+ 1) / 2;
2361 /* Skip nodes while their cost does not reach that amount. */
2362 if (!tree_int_cst_equal ((*npp
)->low
, (*npp
)->high
))
2367 npp
= &(*npp
)->right
;
2372 np
->parent
= parent
;
2375 /* Optimize each of the two split parts. */
2376 balance_case_nodes (&np
->left
, np
);
2377 balance_case_nodes (&np
->right
, np
);
2378 np
->subtree_prob
= np
->prob
;
2379 np
->subtree_prob
+= np
->left
->subtree_prob
;
2380 np
->subtree_prob
+= np
->right
->subtree_prob
;
2384 /* Else leave this branch as one level,
2385 but fill in `parent' fields. */
2387 np
->parent
= parent
;
2388 np
->subtree_prob
= np
->prob
;
2389 for (; np
->right
; np
= np
->right
)
2391 np
->right
->parent
= np
;
2392 (*head
)->subtree_prob
+= np
->right
->subtree_prob
;
2398 /* Search the parent sections of the case node tree
2399 to see if a test for the lower bound of NODE would be redundant.
2400 INDEX_TYPE is the type of the index expression.
2402 The instructions to generate the case decision tree are
2403 output in the same order as nodes are processed so it is
2404 known that if a parent node checks the range of the current
2405 node minus one that the current node is bounded at its lower
2406 span. Thus the test would be redundant. */
2409 node_has_low_bound (case_node_ptr node
, tree index_type
)
2412 case_node_ptr pnode
;
2414 /* If the lower bound of this node is the lowest value in the index type,
2415 we need not test it. */
2417 if (tree_int_cst_equal (node
->low
, TYPE_MIN_VALUE (index_type
)))
2420 /* If this node has a left branch, the value at the left must be less
2421 than that at this node, so it cannot be bounded at the bottom and
2422 we need not bother testing any further. */
2427 low_minus_one
= fold_build2 (MINUS_EXPR
, TREE_TYPE (node
->low
),
2429 build_int_cst (TREE_TYPE (node
->low
), 1));
2431 /* If the subtraction above overflowed, we can't verify anything.
2432 Otherwise, look for a parent that tests our value - 1. */
2434 if (! tree_int_cst_lt (low_minus_one
, node
->low
))
2437 for (pnode
= node
->parent
; pnode
; pnode
= pnode
->parent
)
2438 if (tree_int_cst_equal (low_minus_one
, pnode
->high
))
2444 /* Search the parent sections of the case node tree
2445 to see if a test for the upper bound of NODE would be redundant.
2446 INDEX_TYPE is the type of the index expression.
2448 The instructions to generate the case decision tree are
2449 output in the same order as nodes are processed so it is
2450 known that if a parent node checks the range of the current
2451 node plus one that the current node is bounded at its upper
2452 span. Thus the test would be redundant. */
2455 node_has_high_bound (case_node_ptr node
, tree index_type
)
2458 case_node_ptr pnode
;
2460 /* If there is no upper bound, obviously no test is needed. */
2462 if (TYPE_MAX_VALUE (index_type
) == NULL
)
2465 /* If the upper bound of this node is the highest value in the type
2466 of the index expression, we need not test against it. */
2468 if (tree_int_cst_equal (node
->high
, TYPE_MAX_VALUE (index_type
)))
2471 /* If this node has a right branch, the value at the right must be greater
2472 than that at this node, so it cannot be bounded at the top and
2473 we need not bother testing any further. */
2478 high_plus_one
= fold_build2 (PLUS_EXPR
, TREE_TYPE (node
->high
),
2480 build_int_cst (TREE_TYPE (node
->high
), 1));
2482 /* If the addition above overflowed, we can't verify anything.
2483 Otherwise, look for a parent that tests our value + 1. */
2485 if (! tree_int_cst_lt (node
->high
, high_plus_one
))
2488 for (pnode
= node
->parent
; pnode
; pnode
= pnode
->parent
)
2489 if (tree_int_cst_equal (high_plus_one
, pnode
->low
))
2495 /* Search the parent sections of the
2496 case node tree to see if both tests for the upper and lower
2497 bounds of NODE would be redundant. */
2500 node_is_bounded (case_node_ptr node
, tree index_type
)
2502 return (node_has_low_bound (node
, index_type
)
2503 && node_has_high_bound (node
, index_type
));
2507 /* Emit step-by-step code to select a case for the value of INDEX.
2508 The thus generated decision tree follows the form of the
2509 case-node binary tree NODE, whose nodes represent test conditions.
2510 INDEX_TYPE is the type of the index of the switch.
2512 Care is taken to prune redundant tests from the decision tree
2513 by detecting any boundary conditions already checked by
2514 emitted rtx. (See node_has_high_bound, node_has_low_bound
2515 and node_is_bounded, above.)
2517 Where the test conditions can be shown to be redundant we emit
2518 an unconditional jump to the target code. As a further
2519 optimization, the subordinates of a tree node are examined to
2520 check for bounded nodes. In this case conditional and/or
2521 unconditional jumps as a result of the boundary check for the
2522 current node are arranged to target the subordinates associated
2523 code for out of bound conditions on the current node.
2525 We can assume that when control reaches the code generated here,
2526 the index value has already been compared with the parents
2527 of this node, and determined to be on the same side of each parent
2528 as this node is. Thus, if this node tests for the value 51,
2529 and a parent tested for 52, we don't need to consider
2530 the possibility of a value greater than 51. If another parent
2531 tests for the value 50, then this node need not test anything. */
2534 emit_case_nodes (rtx index
, case_node_ptr node
, rtx default_label
,
2535 int default_prob
, tree index_type
)
2537 /* If INDEX has an unsigned type, we must make unsigned branches. */
2538 int unsignedp
= TYPE_UNSIGNED (index_type
);
2540 int prob
= node
->prob
, subtree_prob
= node
->subtree_prob
;
2541 enum machine_mode mode
= GET_MODE (index
);
2542 enum machine_mode imode
= TYPE_MODE (index_type
);
2544 /* Handle indices detected as constant during RTL expansion. */
2545 if (mode
== VOIDmode
)
2548 /* See if our parents have already tested everything for us.
2549 If they have, emit an unconditional jump for this node. */
2550 if (node_is_bounded (node
, index_type
))
2551 emit_jump (label_rtx (node
->code_label
));
2553 else if (tree_int_cst_equal (node
->low
, node
->high
))
2555 probability
= conditional_probability (prob
, subtree_prob
+ default_prob
);
2556 /* Node is single valued. First see if the index expression matches
2557 this node and then check our children, if any. */
2558 do_jump_if_equal (mode
, index
,
2559 convert_modes (mode
, imode
,
2560 expand_normal (node
->low
),
2562 label_rtx (node
->code_label
), unsignedp
, probability
);
2563 /* Since this case is taken at this point, reduce its weight from
2565 subtree_prob
-= prob
;
2566 if (node
->right
!= 0 && node
->left
!= 0)
2568 /* This node has children on both sides.
2569 Dispatch to one side or the other
2570 by comparing the index value with this node's value.
2571 If one subtree is bounded, check that one first,
2572 so we can avoid real branches in the tree. */
2574 if (node_is_bounded (node
->right
, index_type
))
2576 probability
= conditional_probability (
2578 subtree_prob
+ default_prob
);
2579 emit_cmp_and_jump_insns (index
,
2582 expand_normal (node
->high
),
2584 GT
, NULL_RTX
, mode
, unsignedp
,
2585 label_rtx (node
->right
->code_label
),
2587 emit_case_nodes (index
, node
->left
, default_label
, default_prob
,
2591 else if (node_is_bounded (node
->left
, index_type
))
2593 probability
= conditional_probability (
2595 subtree_prob
+ default_prob
);
2596 emit_cmp_and_jump_insns (index
,
2599 expand_normal (node
->high
),
2601 LT
, NULL_RTX
, mode
, unsignedp
,
2602 label_rtx (node
->left
->code_label
),
2604 emit_case_nodes (index
, node
->right
, default_label
, default_prob
, index_type
);
2607 /* If both children are single-valued cases with no
2608 children, finish up all the work. This way, we can save
2609 one ordered comparison. */
2610 else if (tree_int_cst_equal (node
->right
->low
, node
->right
->high
)
2611 && node
->right
->left
== 0
2612 && node
->right
->right
== 0
2613 && tree_int_cst_equal (node
->left
->low
, node
->left
->high
)
2614 && node
->left
->left
== 0
2615 && node
->left
->right
== 0)
2617 /* Neither node is bounded. First distinguish the two sides;
2618 then emit the code for one side at a time. */
2620 /* See if the value matches what the right hand side
2622 probability
= conditional_probability (
2624 subtree_prob
+ default_prob
);
2625 do_jump_if_equal (mode
, index
,
2626 convert_modes (mode
, imode
,
2627 expand_normal (node
->right
->low
),
2629 label_rtx (node
->right
->code_label
),
2630 unsignedp
, probability
);
2632 /* See if the value matches what the left hand side
2634 probability
= conditional_probability (
2636 subtree_prob
+ default_prob
);
2637 do_jump_if_equal (mode
, index
,
2638 convert_modes (mode
, imode
,
2639 expand_normal (node
->left
->low
),
2641 label_rtx (node
->left
->code_label
),
2642 unsignedp
, probability
);
2647 /* Neither node is bounded. First distinguish the two sides;
2648 then emit the code for one side at a time. */
2651 = build_decl (curr_insn_location (),
2652 LABEL_DECL
, NULL_TREE
, NULL_TREE
);
2654 /* The default label could be reached either through the right
2655 subtree or the left subtree. Divide the probability
2657 probability
= conditional_probability (
2658 node
->right
->subtree_prob
+ default_prob
/2,
2659 subtree_prob
+ default_prob
);
2660 /* See if the value is on the right. */
2661 emit_cmp_and_jump_insns (index
,
2664 expand_normal (node
->high
),
2666 GT
, NULL_RTX
, mode
, unsignedp
,
2667 label_rtx (test_label
),
2671 /* Value must be on the left.
2672 Handle the left-hand subtree. */
2673 emit_case_nodes (index
, node
->left
, default_label
, default_prob
, index_type
);
2674 /* If left-hand subtree does nothing,
2677 emit_jump (default_label
);
2679 /* Code branches here for the right-hand subtree. */
2680 expand_label (test_label
);
2681 emit_case_nodes (index
, node
->right
, default_label
, default_prob
, index_type
);
2685 else if (node
->right
!= 0 && node
->left
== 0)
2687 /* Here we have a right child but no left so we issue a conditional
2688 branch to default and process the right child.
2690 Omit the conditional branch to default if the right child
2691 does not have any children and is single valued; it would
2692 cost too much space to save so little time. */
2694 if (node
->right
->right
|| node
->right
->left
2695 || !tree_int_cst_equal (node
->right
->low
, node
->right
->high
))
2697 if (!node_has_low_bound (node
, index_type
))
2699 probability
= conditional_probability (
2701 subtree_prob
+ default_prob
);
2702 emit_cmp_and_jump_insns (index
,
2705 expand_normal (node
->high
),
2707 LT
, NULL_RTX
, mode
, unsignedp
,
2713 emit_case_nodes (index
, node
->right
, default_label
, default_prob
, index_type
);
2717 probability
= conditional_probability (
2718 node
->right
->subtree_prob
,
2719 subtree_prob
+ default_prob
);
2720 /* We cannot process node->right normally
2721 since we haven't ruled out the numbers less than
2722 this node's value. So handle node->right explicitly. */
2723 do_jump_if_equal (mode
, index
,
2726 expand_normal (node
->right
->low
),
2728 label_rtx (node
->right
->code_label
), unsignedp
, probability
);
2732 else if (node
->right
== 0 && node
->left
!= 0)
2734 /* Just one subtree, on the left. */
2735 if (node
->left
->left
|| node
->left
->right
2736 || !tree_int_cst_equal (node
->left
->low
, node
->left
->high
))
2738 if (!node_has_high_bound (node
, index_type
))
2740 probability
= conditional_probability (
2742 subtree_prob
+ default_prob
);
2743 emit_cmp_and_jump_insns (index
,
2746 expand_normal (node
->high
),
2748 GT
, NULL_RTX
, mode
, unsignedp
,
2754 emit_case_nodes (index
, node
->left
, default_label
,
2755 default_prob
, index_type
);
2759 probability
= conditional_probability (
2760 node
->left
->subtree_prob
,
2761 subtree_prob
+ default_prob
);
2762 /* We cannot process node->left normally
2763 since we haven't ruled out the numbers less than
2764 this node's value. So handle node->left explicitly. */
2765 do_jump_if_equal (mode
, index
,
2768 expand_normal (node
->left
->low
),
2770 label_rtx (node
->left
->code_label
), unsignedp
, probability
);
2776 /* Node is a range. These cases are very similar to those for a single
2777 value, except that we do not start by testing whether this node
2778 is the one to branch to. */
2780 if (node
->right
!= 0 && node
->left
!= 0)
2782 /* Node has subtrees on both sides.
2783 If the right-hand subtree is bounded,
2784 test for it first, since we can go straight there.
2785 Otherwise, we need to make a branch in the control structure,
2786 then handle the two subtrees. */
2787 tree test_label
= 0;
2789 if (node_is_bounded (node
->right
, index_type
))
2791 /* Right hand node is fully bounded so we can eliminate any
2792 testing and branch directly to the target code. */
2793 probability
= conditional_probability (
2794 node
->right
->subtree_prob
,
2795 subtree_prob
+ default_prob
);
2796 emit_cmp_and_jump_insns (index
,
2799 expand_normal (node
->high
),
2801 GT
, NULL_RTX
, mode
, unsignedp
,
2802 label_rtx (node
->right
->code_label
),
2807 /* Right hand node requires testing.
2808 Branch to a label where we will handle it later. */
2810 test_label
= build_decl (curr_insn_location (),
2811 LABEL_DECL
, NULL_TREE
, NULL_TREE
);
2812 probability
= conditional_probability (
2813 node
->right
->subtree_prob
+ default_prob
/2,
2814 subtree_prob
+ default_prob
);
2815 emit_cmp_and_jump_insns (index
,
2818 expand_normal (node
->high
),
2820 GT
, NULL_RTX
, mode
, unsignedp
,
2821 label_rtx (test_label
),
2826 /* Value belongs to this node or to the left-hand subtree. */
2828 probability
= conditional_probability (
2830 subtree_prob
+ default_prob
);
2831 emit_cmp_and_jump_insns (index
,
2834 expand_normal (node
->low
),
2836 GE
, NULL_RTX
, mode
, unsignedp
,
2837 label_rtx (node
->code_label
),
2840 /* Handle the left-hand subtree. */
2841 emit_case_nodes (index
, node
->left
, default_label
, default_prob
, index_type
);
2843 /* If right node had to be handled later, do that now. */
2847 /* If the left-hand subtree fell through,
2848 don't let it fall into the right-hand subtree. */
2850 emit_jump (default_label
);
2852 expand_label (test_label
);
2853 emit_case_nodes (index
, node
->right
, default_label
, default_prob
, index_type
);
2857 else if (node
->right
!= 0 && node
->left
== 0)
2859 /* Deal with values to the left of this node,
2860 if they are possible. */
2861 if (!node_has_low_bound (node
, index_type
))
2863 probability
= conditional_probability (
2865 subtree_prob
+ default_prob
);
2866 emit_cmp_and_jump_insns (index
,
2869 expand_normal (node
->low
),
2871 LT
, NULL_RTX
, mode
, unsignedp
,
2877 /* Value belongs to this node or to the right-hand subtree. */
2879 probability
= conditional_probability (
2881 subtree_prob
+ default_prob
);
2882 emit_cmp_and_jump_insns (index
,
2885 expand_normal (node
->high
),
2887 LE
, NULL_RTX
, mode
, unsignedp
,
2888 label_rtx (node
->code_label
),
2891 emit_case_nodes (index
, node
->right
, default_label
, default_prob
, index_type
);
2894 else if (node
->right
== 0 && node
->left
!= 0)
2896 /* Deal with values to the right of this node,
2897 if they are possible. */
2898 if (!node_has_high_bound (node
, index_type
))
2900 probability
= conditional_probability (
2902 subtree_prob
+ default_prob
);
2903 emit_cmp_and_jump_insns (index
,
2906 expand_normal (node
->high
),
2908 GT
, NULL_RTX
, mode
, unsignedp
,
2914 /* Value belongs to this node or to the left-hand subtree. */
2916 probability
= conditional_probability (
2918 subtree_prob
+ default_prob
);
2919 emit_cmp_and_jump_insns (index
,
2922 expand_normal (node
->low
),
2924 GE
, NULL_RTX
, mode
, unsignedp
,
2925 label_rtx (node
->code_label
),
2928 emit_case_nodes (index
, node
->left
, default_label
, default_prob
, index_type
);
2933 /* Node has no children so we check low and high bounds to remove
2934 redundant tests. Only one of the bounds can exist,
2935 since otherwise this node is bounded--a case tested already. */
2936 int high_bound
= node_has_high_bound (node
, index_type
);
2937 int low_bound
= node_has_low_bound (node
, index_type
);
2939 if (!high_bound
&& low_bound
)
2941 probability
= conditional_probability (
2943 subtree_prob
+ default_prob
);
2944 emit_cmp_and_jump_insns (index
,
2947 expand_normal (node
->high
),
2949 GT
, NULL_RTX
, mode
, unsignedp
,
2954 else if (!low_bound
&& high_bound
)
2956 probability
= conditional_probability (
2958 subtree_prob
+ default_prob
);
2959 emit_cmp_and_jump_insns (index
,
2962 expand_normal (node
->low
),
2964 LT
, NULL_RTX
, mode
, unsignedp
,
2968 else if (!low_bound
&& !high_bound
)
2970 /* Widen LOW and HIGH to the same width as INDEX. */
2971 tree type
= lang_hooks
.types
.type_for_mode (mode
, unsignedp
);
2972 tree low
= build1 (CONVERT_EXPR
, type
, node
->low
);
2973 tree high
= build1 (CONVERT_EXPR
, type
, node
->high
);
2974 rtx low_rtx
, new_index
, new_bound
;
2976 /* Instead of doing two branches, emit one unsigned branch for
2977 (index-low) > (high-low). */
2978 low_rtx
= expand_expr (low
, NULL_RTX
, mode
, EXPAND_NORMAL
);
2979 new_index
= expand_simple_binop (mode
, MINUS
, index
, low_rtx
,
2980 NULL_RTX
, unsignedp
,
2982 new_bound
= expand_expr (fold_build2 (MINUS_EXPR
, type
,
2984 NULL_RTX
, mode
, EXPAND_NORMAL
);
2986 probability
= conditional_probability (
2988 subtree_prob
+ default_prob
);
2989 emit_cmp_and_jump_insns (new_index
, new_bound
, GT
, NULL_RTX
,
2990 mode
, 1, default_label
, probability
);
2993 emit_jump (label_rtx (node
->code_label
));