Mark ChangeLog
[official-gcc.git] / gcc / stmt.c
blobda9a335b1c7078da4a61f3ec9bdd3502a1eaee09
1 /* Expands front end tree to back end RTL for GCC
2 Copyright (C) 1987-2013 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* This file handles the generation of rtl code from tree structure
21 above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
22 The functions whose names start with `expand_' are called by the
23 expander to generate RTL instructions for various kinds of constructs. */
25 #include "config.h"
26 #include "system.h"
27 #include "coretypes.h"
28 #include "tm.h"
30 #include "rtl.h"
31 #include "hard-reg-set.h"
32 #include "tree.h"
33 #include "tm_p.h"
34 #include "flags.h"
35 #include "except.h"
36 #include "function.h"
37 #include "insn-config.h"
38 #include "expr.h"
39 #include "libfuncs.h"
40 #include "recog.h"
41 #include "machmode.h"
42 #include "diagnostic-core.h"
43 #include "output.h"
44 #include "ggc.h"
45 #include "langhooks.h"
46 #include "predict.h"
47 #include "optabs.h"
48 #include "target.h"
49 #include "gimple.h"
50 #include "regs.h"
51 #include "alloc-pool.h"
52 #include "pretty-print.h"
53 #include "pointer-set.h"
54 #include "params.h"
55 #include "dumpfile.h"
58 /* Functions and data structures for expanding case statements. */
60 /* Case label structure, used to hold info on labels within case
61 statements. We handle "range" labels; for a single-value label
62 as in C, the high and low limits are the same.
64 We start with a vector of case nodes sorted in ascending order, and
65 the default label as the last element in the vector. Before expanding
66 to RTL, we transform this vector into a list linked via the RIGHT
67 fields in the case_node struct. Nodes with higher case values are
68 later in the list.
70 Switch statements can be output in three forms. A branch table is
71 used if there are more than a few labels and the labels are dense
72 within the range between the smallest and largest case value. If a
73 branch table is used, no further manipulations are done with the case
74 node chain.
76 The alternative to the use of a branch table is to generate a series
77 of compare and jump insns. When that is done, we use the LEFT, RIGHT,
78 and PARENT fields to hold a binary tree. Initially the tree is
79 totally unbalanced, with everything on the right. We balance the tree
80 with nodes on the left having lower case values than the parent
81 and nodes on the right having higher values. We then output the tree
82 in order.
84 For very small, suitable switch statements, we can generate a series
85 of simple bit test and branches instead. */
87 struct case_node
89 struct case_node *left; /* Left son in binary tree */
90 struct case_node *right; /* Right son in binary tree; also node chain */
91 struct case_node *parent; /* Parent of node in binary tree */
92 tree low; /* Lowest index value for this label */
93 tree high; /* Highest index value for this label */
94 tree code_label; /* Label to jump to when node matches */
95 int prob; /* Probability of taking this case. */
96 /* Probability of reaching subtree rooted at this node */
97 int subtree_prob;
100 typedef struct case_node case_node;
101 typedef struct case_node *case_node_ptr;
103 extern basic_block label_to_block_fn (struct function *, tree);
105 static int n_occurrences (int, const char *);
106 static bool tree_conflicts_with_clobbers_p (tree, HARD_REG_SET *);
107 static void expand_nl_goto_receiver (void);
108 static bool check_operand_nalternatives (tree, tree);
109 static bool check_unique_operand_names (tree, tree, tree);
110 static char *resolve_operand_name_1 (char *, tree, tree, tree);
111 static void expand_null_return_1 (void);
112 static void expand_value_return (rtx);
113 static void balance_case_nodes (case_node_ptr *, case_node_ptr);
114 static int node_has_low_bound (case_node_ptr, tree);
115 static int node_has_high_bound (case_node_ptr, tree);
116 static int node_is_bounded (case_node_ptr, tree);
117 static void emit_case_nodes (rtx, case_node_ptr, rtx, int, tree);
119 /* Return the rtx-label that corresponds to a LABEL_DECL,
120 creating it if necessary. */
123 label_rtx (tree label)
125 gcc_assert (TREE_CODE (label) == LABEL_DECL);
127 if (!DECL_RTL_SET_P (label))
129 rtx r = gen_label_rtx ();
130 SET_DECL_RTL (label, r);
131 if (FORCED_LABEL (label) || DECL_NONLOCAL (label))
132 LABEL_PRESERVE_P (r) = 1;
135 return DECL_RTL (label);
138 /* As above, but also put it on the forced-reference list of the
139 function that contains it. */
141 force_label_rtx (tree label)
143 rtx ref = label_rtx (label);
144 tree function = decl_function_context (label);
146 gcc_assert (function);
148 forced_labels = gen_rtx_EXPR_LIST (VOIDmode, ref, forced_labels);
149 return ref;
152 /* Add an unconditional jump to LABEL as the next sequential instruction. */
154 void
155 emit_jump (rtx label)
157 do_pending_stack_adjust ();
158 emit_jump_insn (gen_jump (label));
159 emit_barrier ();
162 /* Emit code to jump to the address
163 specified by the pointer expression EXP. */
165 void
166 expand_computed_goto (tree exp)
168 rtx x = expand_normal (exp);
170 x = convert_memory_address (Pmode, x);
172 do_pending_stack_adjust ();
173 emit_indirect_jump (x);
176 /* Handle goto statements and the labels that they can go to. */
178 /* Specify the location in the RTL code of a label LABEL,
179 which is a LABEL_DECL tree node.
181 This is used for the kind of label that the user can jump to with a
182 goto statement, and for alternatives of a switch or case statement.
183 RTL labels generated for loops and conditionals don't go through here;
184 they are generated directly at the RTL level, by other functions below.
186 Note that this has nothing to do with defining label *names*.
187 Languages vary in how they do that and what that even means. */
189 void
190 expand_label (tree label)
192 rtx label_r = label_rtx (label);
194 do_pending_stack_adjust ();
195 emit_label (label_r);
196 if (DECL_NAME (label))
197 LABEL_NAME (DECL_RTL (label)) = IDENTIFIER_POINTER (DECL_NAME (label));
199 if (DECL_NONLOCAL (label))
201 expand_nl_goto_receiver ();
202 nonlocal_goto_handler_labels
203 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
204 nonlocal_goto_handler_labels);
207 if (FORCED_LABEL (label))
208 forced_labels = gen_rtx_EXPR_LIST (VOIDmode, label_r, forced_labels);
210 if (DECL_NONLOCAL (label) || FORCED_LABEL (label))
211 maybe_set_first_label_num (label_r);
214 /* Generate RTL code for a `goto' statement with target label LABEL.
215 LABEL should be a LABEL_DECL tree node that was or will later be
216 defined with `expand_label'. */
218 void
219 expand_goto (tree label)
221 #ifdef ENABLE_CHECKING
222 /* Check for a nonlocal goto to a containing function. Should have
223 gotten translated to __builtin_nonlocal_goto. */
224 tree context = decl_function_context (label);
225 gcc_assert (!context || context == current_function_decl);
226 #endif
228 emit_jump (label_rtx (label));
231 /* Return the number of times character C occurs in string S. */
232 static int
233 n_occurrences (int c, const char *s)
235 int n = 0;
236 while (*s)
237 n += (*s++ == c);
238 return n;
241 /* Generate RTL for an asm statement (explicit assembler code).
242 STRING is a STRING_CST node containing the assembler code text,
243 or an ADDR_EXPR containing a STRING_CST. VOL nonzero means the
244 insn is volatile; don't optimize it. */
246 static void
247 expand_asm_loc (tree string, int vol, location_t locus)
249 rtx body;
251 if (TREE_CODE (string) == ADDR_EXPR)
252 string = TREE_OPERAND (string, 0);
254 body = gen_rtx_ASM_INPUT_loc (VOIDmode,
255 ggc_strdup (TREE_STRING_POINTER (string)),
256 locus);
258 MEM_VOLATILE_P (body) = vol;
260 emit_insn (body);
263 /* Parse the output constraint pointed to by *CONSTRAINT_P. It is the
264 OPERAND_NUMth output operand, indexed from zero. There are NINPUTS
265 inputs and NOUTPUTS outputs to this extended-asm. Upon return,
266 *ALLOWS_MEM will be TRUE iff the constraint allows the use of a
267 memory operand. Similarly, *ALLOWS_REG will be TRUE iff the
268 constraint allows the use of a register operand. And, *IS_INOUT
269 will be true if the operand is read-write, i.e., if it is used as
270 an input as well as an output. If *CONSTRAINT_P is not in
271 canonical form, it will be made canonical. (Note that `+' will be
272 replaced with `=' as part of this process.)
274 Returns TRUE if all went well; FALSE if an error occurred. */
276 bool
277 parse_output_constraint (const char **constraint_p, int operand_num,
278 int ninputs, int noutputs, bool *allows_mem,
279 bool *allows_reg, bool *is_inout)
281 const char *constraint = *constraint_p;
282 const char *p;
284 /* Assume the constraint doesn't allow the use of either a register
285 or memory. */
286 *allows_mem = false;
287 *allows_reg = false;
289 /* Allow the `=' or `+' to not be at the beginning of the string,
290 since it wasn't explicitly documented that way, and there is a
291 large body of code that puts it last. Swap the character to
292 the front, so as not to uglify any place else. */
293 p = strchr (constraint, '=');
294 if (!p)
295 p = strchr (constraint, '+');
297 /* If the string doesn't contain an `=', issue an error
298 message. */
299 if (!p)
301 error ("output operand constraint lacks %<=%>");
302 return false;
305 /* If the constraint begins with `+', then the operand is both read
306 from and written to. */
307 *is_inout = (*p == '+');
309 /* Canonicalize the output constraint so that it begins with `='. */
310 if (p != constraint || *is_inout)
312 char *buf;
313 size_t c_len = strlen (constraint);
315 if (p != constraint)
316 warning (0, "output constraint %qc for operand %d "
317 "is not at the beginning",
318 *p, operand_num);
320 /* Make a copy of the constraint. */
321 buf = XALLOCAVEC (char, c_len + 1);
322 strcpy (buf, constraint);
323 /* Swap the first character and the `=' or `+'. */
324 buf[p - constraint] = buf[0];
325 /* Make sure the first character is an `='. (Until we do this,
326 it might be a `+'.) */
327 buf[0] = '=';
328 /* Replace the constraint with the canonicalized string. */
329 *constraint_p = ggc_alloc_string (buf, c_len);
330 constraint = *constraint_p;
333 /* Loop through the constraint string. */
334 for (p = constraint + 1; *p; p += CONSTRAINT_LEN (*p, p))
335 switch (*p)
337 case '+':
338 case '=':
339 error ("operand constraint contains incorrectly positioned "
340 "%<+%> or %<=%>");
341 return false;
343 case '%':
344 if (operand_num + 1 == ninputs + noutputs)
346 error ("%<%%%> constraint used with last operand");
347 return false;
349 break;
351 case 'V': case TARGET_MEM_CONSTRAINT: case 'o':
352 *allows_mem = true;
353 break;
355 case '?': case '!': case '*': case '&': case '#':
356 case 'E': case 'F': case 'G': case 'H':
357 case 's': case 'i': case 'n':
358 case 'I': case 'J': case 'K': case 'L': case 'M':
359 case 'N': case 'O': case 'P': case ',':
360 break;
362 case '0': case '1': case '2': case '3': case '4':
363 case '5': case '6': case '7': case '8': case '9':
364 case '[':
365 error ("matching constraint not valid in output operand");
366 return false;
368 case '<': case '>':
369 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
370 excepting those that expand_call created. So match memory
371 and hope. */
372 *allows_mem = true;
373 break;
375 case 'g': case 'X':
376 *allows_reg = true;
377 *allows_mem = true;
378 break;
380 case 'p': case 'r':
381 *allows_reg = true;
382 break;
384 default:
385 if (!ISALPHA (*p))
386 break;
387 if (REG_CLASS_FROM_CONSTRAINT (*p, p) != NO_REGS)
388 *allows_reg = true;
389 #ifdef EXTRA_CONSTRAINT_STR
390 else if (EXTRA_ADDRESS_CONSTRAINT (*p, p))
391 *allows_reg = true;
392 else if (EXTRA_MEMORY_CONSTRAINT (*p, p))
393 *allows_mem = true;
394 else
396 /* Otherwise we can't assume anything about the nature of
397 the constraint except that it isn't purely registers.
398 Treat it like "g" and hope for the best. */
399 *allows_reg = true;
400 *allows_mem = true;
402 #endif
403 break;
406 return true;
409 /* Similar, but for input constraints. */
411 bool
412 parse_input_constraint (const char **constraint_p, int input_num,
413 int ninputs, int noutputs, int ninout,
414 const char * const * constraints,
415 bool *allows_mem, bool *allows_reg)
417 const char *constraint = *constraint_p;
418 const char *orig_constraint = constraint;
419 size_t c_len = strlen (constraint);
420 size_t j;
421 bool saw_match = false;
423 /* Assume the constraint doesn't allow the use of either
424 a register or memory. */
425 *allows_mem = false;
426 *allows_reg = false;
428 /* Make sure constraint has neither `=', `+', nor '&'. */
430 for (j = 0; j < c_len; j += CONSTRAINT_LEN (constraint[j], constraint+j))
431 switch (constraint[j])
433 case '+': case '=': case '&':
434 if (constraint == orig_constraint)
436 error ("input operand constraint contains %qc", constraint[j]);
437 return false;
439 break;
441 case '%':
442 if (constraint == orig_constraint
443 && input_num + 1 == ninputs - ninout)
445 error ("%<%%%> constraint used with last operand");
446 return false;
448 break;
450 case 'V': case TARGET_MEM_CONSTRAINT: case 'o':
451 *allows_mem = true;
452 break;
454 case '<': case '>':
455 case '?': case '!': case '*': case '#':
456 case 'E': case 'F': case 'G': case 'H':
457 case 's': case 'i': case 'n':
458 case 'I': case 'J': case 'K': case 'L': case 'M':
459 case 'N': case 'O': case 'P': case ',':
460 break;
462 /* Whether or not a numeric constraint allows a register is
463 decided by the matching constraint, and so there is no need
464 to do anything special with them. We must handle them in
465 the default case, so that we don't unnecessarily force
466 operands to memory. */
467 case '0': case '1': case '2': case '3': case '4':
468 case '5': case '6': case '7': case '8': case '9':
470 char *end;
471 unsigned long match;
473 saw_match = true;
475 match = strtoul (constraint + j, &end, 10);
476 if (match >= (unsigned long) noutputs)
478 error ("matching constraint references invalid operand number");
479 return false;
482 /* Try and find the real constraint for this dup. Only do this
483 if the matching constraint is the only alternative. */
484 if (*end == '\0'
485 && (j == 0 || (j == 1 && constraint[0] == '%')))
487 constraint = constraints[match];
488 *constraint_p = constraint;
489 c_len = strlen (constraint);
490 j = 0;
491 /* ??? At the end of the loop, we will skip the first part of
492 the matched constraint. This assumes not only that the
493 other constraint is an output constraint, but also that
494 the '=' or '+' come first. */
495 break;
497 else
498 j = end - constraint;
499 /* Anticipate increment at end of loop. */
500 j--;
502 /* Fall through. */
504 case 'p': case 'r':
505 *allows_reg = true;
506 break;
508 case 'g': case 'X':
509 *allows_reg = true;
510 *allows_mem = true;
511 break;
513 default:
514 if (! ISALPHA (constraint[j]))
516 error ("invalid punctuation %qc in constraint", constraint[j]);
517 return false;
519 if (REG_CLASS_FROM_CONSTRAINT (constraint[j], constraint + j)
520 != NO_REGS)
521 *allows_reg = true;
522 #ifdef EXTRA_CONSTRAINT_STR
523 else if (EXTRA_ADDRESS_CONSTRAINT (constraint[j], constraint + j))
524 *allows_reg = true;
525 else if (EXTRA_MEMORY_CONSTRAINT (constraint[j], constraint + j))
526 *allows_mem = true;
527 else
529 /* Otherwise we can't assume anything about the nature of
530 the constraint except that it isn't purely registers.
531 Treat it like "g" and hope for the best. */
532 *allows_reg = true;
533 *allows_mem = true;
535 #endif
536 break;
539 if (saw_match && !*allows_reg)
540 warning (0, "matching constraint does not allow a register");
542 return true;
545 /* Return DECL iff there's an overlap between *REGS and DECL, where DECL
546 can be an asm-declared register. Called via walk_tree. */
548 static tree
549 decl_overlaps_hard_reg_set_p (tree *declp, int *walk_subtrees ATTRIBUTE_UNUSED,
550 void *data)
552 tree decl = *declp;
553 const HARD_REG_SET *const regs = (const HARD_REG_SET *) data;
555 if (TREE_CODE (decl) == VAR_DECL)
557 if (DECL_HARD_REGISTER (decl)
558 && REG_P (DECL_RTL (decl))
559 && REGNO (DECL_RTL (decl)) < FIRST_PSEUDO_REGISTER)
561 rtx reg = DECL_RTL (decl);
563 if (overlaps_hard_reg_set_p (*regs, GET_MODE (reg), REGNO (reg)))
564 return decl;
566 walk_subtrees = 0;
568 else if (TYPE_P (decl) || TREE_CODE (decl) == PARM_DECL)
569 walk_subtrees = 0;
570 return NULL_TREE;
573 /* If there is an overlap between *REGS and DECL, return the first overlap
574 found. */
575 tree
576 tree_overlaps_hard_reg_set (tree decl, HARD_REG_SET *regs)
578 return walk_tree (&decl, decl_overlaps_hard_reg_set_p, regs, NULL);
581 /* Check for overlap between registers marked in CLOBBERED_REGS and
582 anything inappropriate in T. Emit error and return the register
583 variable definition for error, NULL_TREE for ok. */
585 static bool
586 tree_conflicts_with_clobbers_p (tree t, HARD_REG_SET *clobbered_regs)
588 /* Conflicts between asm-declared register variables and the clobber
589 list are not allowed. */
590 tree overlap = tree_overlaps_hard_reg_set (t, clobbered_regs);
592 if (overlap)
594 error ("asm-specifier for variable %qE conflicts with asm clobber list",
595 DECL_NAME (overlap));
597 /* Reset registerness to stop multiple errors emitted for a single
598 variable. */
599 DECL_REGISTER (overlap) = 0;
600 return true;
603 return false;
606 /* Generate RTL for an asm statement with arguments.
607 STRING is the instruction template.
608 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
609 Each output or input has an expression in the TREE_VALUE and
610 a tree list in TREE_PURPOSE which in turn contains a constraint
611 name in TREE_VALUE (or NULL_TREE) and a constraint string
612 in TREE_PURPOSE.
613 CLOBBERS is a list of STRING_CST nodes each naming a hard register
614 that is clobbered by this insn.
616 LABELS is a list of labels, and if LABELS is non-NULL, FALLTHRU_BB
617 should be the fallthru basic block of the asm goto.
619 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
620 Some elements of OUTPUTS may be replaced with trees representing temporary
621 values. The caller should copy those temporary values to the originally
622 specified lvalues.
624 VOL nonzero means the insn is volatile; don't optimize it. */
626 static void
627 expand_asm_operands (tree string, tree outputs, tree inputs,
628 tree clobbers, tree labels, basic_block fallthru_bb,
629 int vol, location_t locus)
631 rtvec argvec, constraintvec, labelvec;
632 rtx body;
633 int ninputs = list_length (inputs);
634 int noutputs = list_length (outputs);
635 int nlabels = list_length (labels);
636 int ninout;
637 int nclobbers;
638 HARD_REG_SET clobbered_regs;
639 int clobber_conflict_found = 0;
640 tree tail;
641 tree t;
642 int i;
643 /* Vector of RTX's of evaluated output operands. */
644 rtx *output_rtx = XALLOCAVEC (rtx, noutputs);
645 int *inout_opnum = XALLOCAVEC (int, noutputs);
646 rtx *real_output_rtx = XALLOCAVEC (rtx, noutputs);
647 enum machine_mode *inout_mode = XALLOCAVEC (enum machine_mode, noutputs);
648 const char **constraints = XALLOCAVEC (const char *, noutputs + ninputs);
649 int old_generating_concat_p = generating_concat_p;
650 rtx fallthru_label = NULL_RTX;
652 /* An ASM with no outputs needs to be treated as volatile, for now. */
653 if (noutputs == 0)
654 vol = 1;
656 if (! check_operand_nalternatives (outputs, inputs))
657 return;
659 string = resolve_asm_operand_names (string, outputs, inputs, labels);
661 /* Collect constraints. */
662 i = 0;
663 for (t = outputs; t ; t = TREE_CHAIN (t), i++)
664 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
665 for (t = inputs; t ; t = TREE_CHAIN (t), i++)
666 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
668 /* Sometimes we wish to automatically clobber registers across an asm.
669 Case in point is when the i386 backend moved from cc0 to a hard reg --
670 maintaining source-level compatibility means automatically clobbering
671 the flags register. */
672 clobbers = targetm.md_asm_clobbers (outputs, inputs, clobbers);
674 /* Count the number of meaningful clobbered registers, ignoring what
675 we would ignore later. */
676 nclobbers = 0;
677 CLEAR_HARD_REG_SET (clobbered_regs);
678 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
680 const char *regname;
681 int nregs;
683 if (TREE_VALUE (tail) == error_mark_node)
684 return;
685 regname = TREE_STRING_POINTER (TREE_VALUE (tail));
687 i = decode_reg_name_and_count (regname, &nregs);
688 if (i == -4)
689 ++nclobbers;
690 else if (i == -2)
691 error ("unknown register name %qs in %<asm%>", regname);
693 /* Mark clobbered registers. */
694 if (i >= 0)
696 int reg;
698 for (reg = i; reg < i + nregs; reg++)
700 ++nclobbers;
702 /* Clobbering the PIC register is an error. */
703 if (reg == (int) PIC_OFFSET_TABLE_REGNUM)
705 error ("PIC register clobbered by %qs in %<asm%>", regname);
706 return;
709 SET_HARD_REG_BIT (clobbered_regs, reg);
714 /* First pass over inputs and outputs checks validity and sets
715 mark_addressable if needed. */
717 ninout = 0;
718 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
720 tree val = TREE_VALUE (tail);
721 tree type = TREE_TYPE (val);
722 const char *constraint;
723 bool is_inout;
724 bool allows_reg;
725 bool allows_mem;
727 /* If there's an erroneous arg, emit no insn. */
728 if (type == error_mark_node)
729 return;
731 /* Try to parse the output constraint. If that fails, there's
732 no point in going further. */
733 constraint = constraints[i];
734 if (!parse_output_constraint (&constraint, i, ninputs, noutputs,
735 &allows_mem, &allows_reg, &is_inout))
736 return;
738 if (! allows_reg
739 && (allows_mem
740 || is_inout
741 || (DECL_P (val)
742 && REG_P (DECL_RTL (val))
743 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type))))
744 mark_addressable (val);
746 if (is_inout)
747 ninout++;
750 ninputs += ninout;
751 if (ninputs + noutputs > MAX_RECOG_OPERANDS)
753 error ("more than %d operands in %<asm%>", MAX_RECOG_OPERANDS);
754 return;
757 for (i = 0, tail = inputs; tail; i++, tail = TREE_CHAIN (tail))
759 bool allows_reg, allows_mem;
760 const char *constraint;
762 /* If there's an erroneous arg, emit no insn, because the ASM_INPUT
763 would get VOIDmode and that could cause a crash in reload. */
764 if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
765 return;
767 constraint = constraints[i + noutputs];
768 if (! parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
769 constraints, &allows_mem, &allows_reg))
770 return;
772 if (! allows_reg && allows_mem)
773 mark_addressable (TREE_VALUE (tail));
776 /* Second pass evaluates arguments. */
778 /* Make sure stack is consistent for asm goto. */
779 if (nlabels > 0)
780 do_pending_stack_adjust ();
782 ninout = 0;
783 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
785 tree val = TREE_VALUE (tail);
786 tree type = TREE_TYPE (val);
787 bool is_inout;
788 bool allows_reg;
789 bool allows_mem;
790 rtx op;
791 bool ok;
793 ok = parse_output_constraint (&constraints[i], i, ninputs,
794 noutputs, &allows_mem, &allows_reg,
795 &is_inout);
796 gcc_assert (ok);
798 /* If an output operand is not a decl or indirect ref and our constraint
799 allows a register, make a temporary to act as an intermediate.
800 Make the asm insn write into that, then our caller will copy it to
801 the real output operand. Likewise for promoted variables. */
803 generating_concat_p = 0;
805 real_output_rtx[i] = NULL_RTX;
806 if ((TREE_CODE (val) == INDIRECT_REF
807 && allows_mem)
808 || (DECL_P (val)
809 && (allows_mem || REG_P (DECL_RTL (val)))
810 && ! (REG_P (DECL_RTL (val))
811 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
812 || ! allows_reg
813 || is_inout)
815 op = expand_expr (val, NULL_RTX, VOIDmode, EXPAND_WRITE);
816 if (MEM_P (op))
817 op = validize_mem (op);
819 if (! allows_reg && !MEM_P (op))
820 error ("output number %d not directly addressable", i);
821 if ((! allows_mem && MEM_P (op))
822 || GET_CODE (op) == CONCAT)
824 real_output_rtx[i] = op;
825 op = gen_reg_rtx (GET_MODE (op));
826 if (is_inout)
827 emit_move_insn (op, real_output_rtx[i]);
830 else
832 op = assign_temp (type, 0, 1);
833 op = validize_mem (op);
834 if (!MEM_P (op) && TREE_CODE (TREE_VALUE (tail)) == SSA_NAME)
835 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (TREE_VALUE (tail)), op);
836 TREE_VALUE (tail) = make_tree (type, op);
838 output_rtx[i] = op;
840 generating_concat_p = old_generating_concat_p;
842 if (is_inout)
844 inout_mode[ninout] = TYPE_MODE (type);
845 inout_opnum[ninout++] = i;
848 if (tree_conflicts_with_clobbers_p (val, &clobbered_regs))
849 clobber_conflict_found = 1;
852 /* Make vectors for the expression-rtx, constraint strings,
853 and named operands. */
855 argvec = rtvec_alloc (ninputs);
856 constraintvec = rtvec_alloc (ninputs);
857 labelvec = rtvec_alloc (nlabels);
859 body = gen_rtx_ASM_OPERANDS ((noutputs == 0 ? VOIDmode
860 : GET_MODE (output_rtx[0])),
861 ggc_strdup (TREE_STRING_POINTER (string)),
862 empty_string, 0, argvec, constraintvec,
863 labelvec, locus);
865 MEM_VOLATILE_P (body) = vol;
867 /* Eval the inputs and put them into ARGVEC.
868 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
870 for (i = 0, tail = inputs; tail; tail = TREE_CHAIN (tail), ++i)
872 bool allows_reg, allows_mem;
873 const char *constraint;
874 tree val, type;
875 rtx op;
876 bool ok;
878 constraint = constraints[i + noutputs];
879 ok = parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
880 constraints, &allows_mem, &allows_reg);
881 gcc_assert (ok);
883 generating_concat_p = 0;
885 val = TREE_VALUE (tail);
886 type = TREE_TYPE (val);
887 /* EXPAND_INITIALIZER will not generate code for valid initializer
888 constants, but will still generate code for other types of operand.
889 This is the behavior we want for constant constraints. */
890 op = expand_expr (val, NULL_RTX, VOIDmode,
891 allows_reg ? EXPAND_NORMAL
892 : allows_mem ? EXPAND_MEMORY
893 : EXPAND_INITIALIZER);
895 /* Never pass a CONCAT to an ASM. */
896 if (GET_CODE (op) == CONCAT)
897 op = force_reg (GET_MODE (op), op);
898 else if (MEM_P (op))
899 op = validize_mem (op);
901 if (asm_operand_ok (op, constraint, NULL) <= 0)
903 if (allows_reg && TYPE_MODE (type) != BLKmode)
904 op = force_reg (TYPE_MODE (type), op);
905 else if (!allows_mem)
906 warning (0, "asm operand %d probably doesn%'t match constraints",
907 i + noutputs);
908 else if (MEM_P (op))
910 /* We won't recognize either volatile memory or memory
911 with a queued address as available a memory_operand
912 at this point. Ignore it: clearly this *is* a memory. */
914 else
915 gcc_unreachable ();
918 generating_concat_p = old_generating_concat_p;
919 ASM_OPERANDS_INPUT (body, i) = op;
921 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, i)
922 = gen_rtx_ASM_INPUT (TYPE_MODE (type),
923 ggc_strdup (constraints[i + noutputs]));
925 if (tree_conflicts_with_clobbers_p (val, &clobbered_regs))
926 clobber_conflict_found = 1;
929 /* Protect all the operands from the queue now that they have all been
930 evaluated. */
932 generating_concat_p = 0;
934 /* For in-out operands, copy output rtx to input rtx. */
935 for (i = 0; i < ninout; i++)
937 int j = inout_opnum[i];
938 char buffer[16];
940 ASM_OPERANDS_INPUT (body, ninputs - ninout + i)
941 = output_rtx[j];
943 sprintf (buffer, "%d", j);
944 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, ninputs - ninout + i)
945 = gen_rtx_ASM_INPUT (inout_mode[i], ggc_strdup (buffer));
948 /* Copy labels to the vector. */
949 for (i = 0, tail = labels; i < nlabels; ++i, tail = TREE_CHAIN (tail))
951 rtx r;
952 /* If asm goto has any labels in the fallthru basic block, use
953 a label that we emit immediately after the asm goto. Expansion
954 may insert further instructions into the same basic block after
955 asm goto and if we don't do this, insertion of instructions on
956 the fallthru edge might misbehave. See PR58670. */
957 if (fallthru_bb
958 && label_to_block_fn (cfun, TREE_VALUE (tail)) == fallthru_bb)
960 if (fallthru_label == NULL_RTX)
961 fallthru_label = gen_label_rtx ();
962 r = fallthru_label;
964 else
965 r = label_rtx (TREE_VALUE (tail));
966 ASM_OPERANDS_LABEL (body, i) = gen_rtx_LABEL_REF (Pmode, r);
969 generating_concat_p = old_generating_concat_p;
971 /* Now, for each output, construct an rtx
972 (set OUTPUT (asm_operands INSN OUTPUTCONSTRAINT OUTPUTNUMBER
973 ARGVEC CONSTRAINTS OPNAMES))
974 If there is more than one, put them inside a PARALLEL. */
976 if (nlabels > 0 && nclobbers == 0)
978 gcc_assert (noutputs == 0);
979 emit_jump_insn (body);
981 else if (noutputs == 0 && nclobbers == 0)
983 /* No output operands: put in a raw ASM_OPERANDS rtx. */
984 emit_insn (body);
986 else if (noutputs == 1 && nclobbers == 0)
988 ASM_OPERANDS_OUTPUT_CONSTRAINT (body) = ggc_strdup (constraints[0]);
989 emit_insn (gen_rtx_SET (VOIDmode, output_rtx[0], body));
991 else
993 rtx obody = body;
994 int num = noutputs;
996 if (num == 0)
997 num = 1;
999 body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
1001 /* For each output operand, store a SET. */
1002 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1004 XVECEXP (body, 0, i)
1005 = gen_rtx_SET (VOIDmode,
1006 output_rtx[i],
1007 gen_rtx_ASM_OPERANDS
1008 (GET_MODE (output_rtx[i]),
1009 ggc_strdup (TREE_STRING_POINTER (string)),
1010 ggc_strdup (constraints[i]),
1011 i, argvec, constraintvec, labelvec, locus));
1013 MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
1016 /* If there are no outputs (but there are some clobbers)
1017 store the bare ASM_OPERANDS into the PARALLEL. */
1019 if (i == 0)
1020 XVECEXP (body, 0, i++) = obody;
1022 /* Store (clobber REG) for each clobbered register specified. */
1024 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1026 const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1027 int reg, nregs;
1028 int j = decode_reg_name_and_count (regname, &nregs);
1029 rtx clobbered_reg;
1031 if (j < 0)
1033 if (j == -3) /* `cc', which is not a register */
1034 continue;
1036 if (j == -4) /* `memory', don't cache memory across asm */
1038 XVECEXP (body, 0, i++)
1039 = gen_rtx_CLOBBER (VOIDmode,
1040 gen_rtx_MEM
1041 (BLKmode,
1042 gen_rtx_SCRATCH (VOIDmode)));
1043 continue;
1046 /* Ignore unknown register, error already signaled. */
1047 continue;
1050 for (reg = j; reg < j + nregs; reg++)
1052 /* Use QImode since that's guaranteed to clobber just
1053 * one reg. */
1054 clobbered_reg = gen_rtx_REG (QImode, reg);
1056 /* Do sanity check for overlap between clobbers and
1057 respectively input and outputs that hasn't been
1058 handled. Such overlap should have been detected and
1059 reported above. */
1060 if (!clobber_conflict_found)
1062 int opno;
1064 /* We test the old body (obody) contents to avoid
1065 tripping over the under-construction body. */
1066 for (opno = 0; opno < noutputs; opno++)
1067 if (reg_overlap_mentioned_p (clobbered_reg,
1068 output_rtx[opno]))
1069 internal_error
1070 ("asm clobber conflict with output operand");
1072 for (opno = 0; opno < ninputs - ninout; opno++)
1073 if (reg_overlap_mentioned_p (clobbered_reg,
1074 ASM_OPERANDS_INPUT (obody,
1075 opno)))
1076 internal_error
1077 ("asm clobber conflict with input operand");
1080 XVECEXP (body, 0, i++)
1081 = gen_rtx_CLOBBER (VOIDmode, clobbered_reg);
1085 if (nlabels > 0)
1086 emit_jump_insn (body);
1087 else
1088 emit_insn (body);
1091 if (fallthru_label)
1092 emit_label (fallthru_label);
1094 /* For any outputs that needed reloading into registers, spill them
1095 back to where they belong. */
1096 for (i = 0; i < noutputs; ++i)
1097 if (real_output_rtx[i])
1098 emit_move_insn (real_output_rtx[i], output_rtx[i]);
1100 crtl->has_asm_statement = 1;
1101 free_temp_slots ();
1104 void
1105 expand_asm_stmt (gimple stmt)
1107 int noutputs;
1108 tree outputs, tail, t;
1109 tree *o;
1110 size_t i, n;
1111 const char *s;
1112 tree str, out, in, cl, labels;
1113 location_t locus = gimple_location (stmt);
1114 basic_block fallthru_bb = NULL;
1116 /* Meh... convert the gimple asm operands into real tree lists.
1117 Eventually we should make all routines work on the vectors instead
1118 of relying on TREE_CHAIN. */
1119 out = NULL_TREE;
1120 n = gimple_asm_noutputs (stmt);
1121 if (n > 0)
1123 t = out = gimple_asm_output_op (stmt, 0);
1124 for (i = 1; i < n; i++)
1125 t = TREE_CHAIN (t) = gimple_asm_output_op (stmt, i);
1128 in = NULL_TREE;
1129 n = gimple_asm_ninputs (stmt);
1130 if (n > 0)
1132 t = in = gimple_asm_input_op (stmt, 0);
1133 for (i = 1; i < n; i++)
1134 t = TREE_CHAIN (t) = gimple_asm_input_op (stmt, i);
1137 cl = NULL_TREE;
1138 n = gimple_asm_nclobbers (stmt);
1139 if (n > 0)
1141 t = cl = gimple_asm_clobber_op (stmt, 0);
1142 for (i = 1; i < n; i++)
1143 t = TREE_CHAIN (t) = gimple_asm_clobber_op (stmt, i);
1146 labels = NULL_TREE;
1147 n = gimple_asm_nlabels (stmt);
1148 if (n > 0)
1150 edge fallthru = find_fallthru_edge (gimple_bb (stmt)->succs);
1151 if (fallthru)
1152 fallthru_bb = fallthru->dest;
1153 t = labels = gimple_asm_label_op (stmt, 0);
1154 for (i = 1; i < n; i++)
1155 t = TREE_CHAIN (t) = gimple_asm_label_op (stmt, i);
1158 s = gimple_asm_string (stmt);
1159 str = build_string (strlen (s), s);
1161 if (gimple_asm_input_p (stmt))
1163 expand_asm_loc (str, gimple_asm_volatile_p (stmt), locus);
1164 return;
1167 outputs = out;
1168 noutputs = gimple_asm_noutputs (stmt);
1169 /* o[I] is the place that output number I should be written. */
1170 o = (tree *) alloca (noutputs * sizeof (tree));
1172 /* Record the contents of OUTPUTS before it is modified. */
1173 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1174 o[i] = TREE_VALUE (tail);
1176 /* Generate the ASM_OPERANDS insn; store into the TREE_VALUEs of
1177 OUTPUTS some trees for where the values were actually stored. */
1178 expand_asm_operands (str, outputs, in, cl, labels, fallthru_bb,
1179 gimple_asm_volatile_p (stmt), locus);
1181 /* Copy all the intermediate outputs into the specified outputs. */
1182 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1184 if (o[i] != TREE_VALUE (tail))
1186 expand_assignment (o[i], TREE_VALUE (tail), false);
1187 free_temp_slots ();
1189 /* Restore the original value so that it's correct the next
1190 time we expand this function. */
1191 TREE_VALUE (tail) = o[i];
1196 /* A subroutine of expand_asm_operands. Check that all operands have
1197 the same number of alternatives. Return true if so. */
1199 static bool
1200 check_operand_nalternatives (tree outputs, tree inputs)
1202 if (outputs || inputs)
1204 tree tmp = TREE_PURPOSE (outputs ? outputs : inputs);
1205 int nalternatives
1206 = n_occurrences (',', TREE_STRING_POINTER (TREE_VALUE (tmp)));
1207 tree next = inputs;
1209 if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
1211 error ("too many alternatives in %<asm%>");
1212 return false;
1215 tmp = outputs;
1216 while (tmp)
1218 const char *constraint
1219 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (tmp)));
1221 if (n_occurrences (',', constraint) != nalternatives)
1223 error ("operand constraints for %<asm%> differ "
1224 "in number of alternatives");
1225 return false;
1228 if (TREE_CHAIN (tmp))
1229 tmp = TREE_CHAIN (tmp);
1230 else
1231 tmp = next, next = 0;
1235 return true;
1238 /* A subroutine of expand_asm_operands. Check that all operand names
1239 are unique. Return true if so. We rely on the fact that these names
1240 are identifiers, and so have been canonicalized by get_identifier,
1241 so all we need are pointer comparisons. */
1243 static bool
1244 check_unique_operand_names (tree outputs, tree inputs, tree labels)
1246 tree i, j, i_name = NULL_TREE;
1248 for (i = outputs; i ; i = TREE_CHAIN (i))
1250 i_name = TREE_PURPOSE (TREE_PURPOSE (i));
1251 if (! i_name)
1252 continue;
1254 for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
1255 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1256 goto failure;
1259 for (i = inputs; i ; i = TREE_CHAIN (i))
1261 i_name = TREE_PURPOSE (TREE_PURPOSE (i));
1262 if (! i_name)
1263 continue;
1265 for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
1266 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1267 goto failure;
1268 for (j = outputs; j ; j = TREE_CHAIN (j))
1269 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1270 goto failure;
1273 for (i = labels; i ; i = TREE_CHAIN (i))
1275 i_name = TREE_PURPOSE (i);
1276 if (! i_name)
1277 continue;
1279 for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
1280 if (simple_cst_equal (i_name, TREE_PURPOSE (j)))
1281 goto failure;
1282 for (j = inputs; j ; j = TREE_CHAIN (j))
1283 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1284 goto failure;
1287 return true;
1289 failure:
1290 error ("duplicate asm operand name %qs", TREE_STRING_POINTER (i_name));
1291 return false;
1294 /* A subroutine of expand_asm_operands. Resolve the names of the operands
1295 in *POUTPUTS and *PINPUTS to numbers, and replace the name expansions in
1296 STRING and in the constraints to those numbers. */
1298 tree
1299 resolve_asm_operand_names (tree string, tree outputs, tree inputs, tree labels)
1301 char *buffer;
1302 char *p;
1303 const char *c;
1304 tree t;
1306 check_unique_operand_names (outputs, inputs, labels);
1308 /* Substitute [<name>] in input constraint strings. There should be no
1309 named operands in output constraints. */
1310 for (t = inputs; t ; t = TREE_CHAIN (t))
1312 c = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
1313 if (strchr (c, '[') != NULL)
1315 p = buffer = xstrdup (c);
1316 while ((p = strchr (p, '[')) != NULL)
1317 p = resolve_operand_name_1 (p, outputs, inputs, NULL);
1318 TREE_VALUE (TREE_PURPOSE (t))
1319 = build_string (strlen (buffer), buffer);
1320 free (buffer);
1324 /* Now check for any needed substitutions in the template. */
1325 c = TREE_STRING_POINTER (string);
1326 while ((c = strchr (c, '%')) != NULL)
1328 if (c[1] == '[')
1329 break;
1330 else if (ISALPHA (c[1]) && c[2] == '[')
1331 break;
1332 else
1334 c += 1 + (c[1] == '%');
1335 continue;
1339 if (c)
1341 /* OK, we need to make a copy so we can perform the substitutions.
1342 Assume that we will not need extra space--we get to remove '['
1343 and ']', which means we cannot have a problem until we have more
1344 than 999 operands. */
1345 buffer = xstrdup (TREE_STRING_POINTER (string));
1346 p = buffer + (c - TREE_STRING_POINTER (string));
1348 while ((p = strchr (p, '%')) != NULL)
1350 if (p[1] == '[')
1351 p += 1;
1352 else if (ISALPHA (p[1]) && p[2] == '[')
1353 p += 2;
1354 else
1356 p += 1 + (p[1] == '%');
1357 continue;
1360 p = resolve_operand_name_1 (p, outputs, inputs, labels);
1363 string = build_string (strlen (buffer), buffer);
1364 free (buffer);
1367 return string;
1370 /* A subroutine of resolve_operand_names. P points to the '[' for a
1371 potential named operand of the form [<name>]. In place, replace
1372 the name and brackets with a number. Return a pointer to the
1373 balance of the string after substitution. */
1375 static char *
1376 resolve_operand_name_1 (char *p, tree outputs, tree inputs, tree labels)
1378 char *q;
1379 int op;
1380 tree t;
1382 /* Collect the operand name. */
1383 q = strchr (++p, ']');
1384 if (!q)
1386 error ("missing close brace for named operand");
1387 return strchr (p, '\0');
1389 *q = '\0';
1391 /* Resolve the name to a number. */
1392 for (op = 0, t = outputs; t ; t = TREE_CHAIN (t), op++)
1394 tree name = TREE_PURPOSE (TREE_PURPOSE (t));
1395 if (name && strcmp (TREE_STRING_POINTER (name), p) == 0)
1396 goto found;
1398 for (t = inputs; t ; t = TREE_CHAIN (t), op++)
1400 tree name = TREE_PURPOSE (TREE_PURPOSE (t));
1401 if (name && strcmp (TREE_STRING_POINTER (name), p) == 0)
1402 goto found;
1404 for (t = labels; t ; t = TREE_CHAIN (t), op++)
1406 tree name = TREE_PURPOSE (t);
1407 if (name && strcmp (TREE_STRING_POINTER (name), p) == 0)
1408 goto found;
1411 error ("undefined named operand %qs", identifier_to_locale (p));
1412 op = 0;
1414 found:
1415 /* Replace the name with the number. Unfortunately, not all libraries
1416 get the return value of sprintf correct, so search for the end of the
1417 generated string by hand. */
1418 sprintf (--p, "%d", op);
1419 p = strchr (p, '\0');
1421 /* Verify the no extra buffer space assumption. */
1422 gcc_assert (p <= q);
1424 /* Shift the rest of the buffer down to fill the gap. */
1425 memmove (p, q + 1, strlen (q + 1) + 1);
1427 return p;
1430 /* Generate RTL to return from the current function, with no value.
1431 (That is, we do not do anything about returning any value.) */
1433 void
1434 expand_null_return (void)
1436 /* If this function was declared to return a value, but we
1437 didn't, clobber the return registers so that they are not
1438 propagated live to the rest of the function. */
1439 clobber_return_register ();
1441 expand_null_return_1 ();
1444 /* Generate RTL to return directly from the current function.
1445 (That is, we bypass any return value.) */
1447 void
1448 expand_naked_return (void)
1450 rtx end_label;
1452 clear_pending_stack_adjust ();
1453 do_pending_stack_adjust ();
1455 end_label = naked_return_label;
1456 if (end_label == 0)
1457 end_label = naked_return_label = gen_label_rtx ();
1459 emit_jump (end_label);
1462 /* Generate RTL to return from the current function, with value VAL. */
1464 static void
1465 expand_value_return (rtx val)
1467 /* Copy the value to the return location unless it's already there. */
1469 tree decl = DECL_RESULT (current_function_decl);
1470 rtx return_reg = DECL_RTL (decl);
1471 if (return_reg != val)
1473 tree funtype = TREE_TYPE (current_function_decl);
1474 tree type = TREE_TYPE (decl);
1475 int unsignedp = TYPE_UNSIGNED (type);
1476 enum machine_mode old_mode = DECL_MODE (decl);
1477 enum machine_mode mode;
1478 if (DECL_BY_REFERENCE (decl))
1479 mode = promote_function_mode (type, old_mode, &unsignedp, funtype, 2);
1480 else
1481 mode = promote_function_mode (type, old_mode, &unsignedp, funtype, 1);
1483 if (mode != old_mode)
1484 val = convert_modes (mode, old_mode, val, unsignedp);
1486 if (GET_CODE (return_reg) == PARALLEL)
1487 emit_group_load (return_reg, val, type, int_size_in_bytes (type));
1488 else
1489 emit_move_insn (return_reg, val);
1492 expand_null_return_1 ();
1495 /* Output a return with no value. */
1497 static void
1498 expand_null_return_1 (void)
1500 clear_pending_stack_adjust ();
1501 do_pending_stack_adjust ();
1502 emit_jump (return_label);
1505 /* Generate RTL to evaluate the expression RETVAL and return it
1506 from the current function. */
1508 void
1509 expand_return (tree retval)
1511 rtx result_rtl;
1512 rtx val = 0;
1513 tree retval_rhs;
1515 /* If function wants no value, give it none. */
1516 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
1518 expand_normal (retval);
1519 expand_null_return ();
1520 return;
1523 if (retval == error_mark_node)
1525 /* Treat this like a return of no value from a function that
1526 returns a value. */
1527 expand_null_return ();
1528 return;
1530 else if ((TREE_CODE (retval) == MODIFY_EXPR
1531 || TREE_CODE (retval) == INIT_EXPR)
1532 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
1533 retval_rhs = TREE_OPERAND (retval, 1);
1534 else
1535 retval_rhs = retval;
1537 result_rtl = DECL_RTL (DECL_RESULT (current_function_decl));
1539 /* If we are returning the RESULT_DECL, then the value has already
1540 been stored into it, so we don't have to do anything special. */
1541 if (TREE_CODE (retval_rhs) == RESULT_DECL)
1542 expand_value_return (result_rtl);
1544 /* If the result is an aggregate that is being returned in one (or more)
1545 registers, load the registers here. */
1547 else if (retval_rhs != 0
1548 && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
1549 && REG_P (result_rtl))
1551 val = copy_blkmode_to_reg (GET_MODE (result_rtl), retval_rhs);
1552 if (val)
1554 /* Use the mode of the result value on the return register. */
1555 PUT_MODE (result_rtl, GET_MODE (val));
1556 expand_value_return (val);
1558 else
1559 expand_null_return ();
1561 else if (retval_rhs != 0
1562 && !VOID_TYPE_P (TREE_TYPE (retval_rhs))
1563 && (REG_P (result_rtl)
1564 || (GET_CODE (result_rtl) == PARALLEL)))
1566 /* Calculate the return value into a temporary (usually a pseudo
1567 reg). */
1568 tree ot = TREE_TYPE (DECL_RESULT (current_function_decl));
1569 tree nt = build_qualified_type (ot, TYPE_QUALS (ot) | TYPE_QUAL_CONST);
1571 val = assign_temp (nt, 0, 1);
1572 val = expand_expr (retval_rhs, val, GET_MODE (val), EXPAND_NORMAL);
1573 val = force_not_mem (val);
1574 /* Return the calculated value. */
1575 expand_value_return (val);
1577 else
1579 /* No hard reg used; calculate value into hard return reg. */
1580 expand_expr (retval, const0_rtx, VOIDmode, EXPAND_NORMAL);
1581 expand_value_return (result_rtl);
1585 /* Emit code to restore vital registers at the beginning of a nonlocal goto
1586 handler. */
1587 static void
1588 expand_nl_goto_receiver (void)
1590 rtx chain;
1592 /* Clobber the FP when we get here, so we have to make sure it's
1593 marked as used by this function. */
1594 emit_use (hard_frame_pointer_rtx);
1596 /* Mark the static chain as clobbered here so life information
1597 doesn't get messed up for it. */
1598 chain = targetm.calls.static_chain (current_function_decl, true);
1599 if (chain && REG_P (chain))
1600 emit_clobber (chain);
1602 #ifdef HAVE_nonlocal_goto
1603 if (! HAVE_nonlocal_goto)
1604 #endif
1606 /* First adjust our frame pointer to its actual value. It was
1607 previously set to the start of the virtual area corresponding to
1608 the stacked variables when we branched here and now needs to be
1609 adjusted to the actual hardware fp value.
1611 Assignments to virtual registers are converted by
1612 instantiate_virtual_regs into the corresponding assignment
1613 to the underlying register (fp in this case) that makes
1614 the original assignment true.
1615 So the following insn will actually be decrementing fp by
1616 STARTING_FRAME_OFFSET. */
1617 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
1619 /* Restoring the frame pointer also modifies the hard frame pointer.
1620 Mark it used (so that the previous assignment remains live once
1621 the frame pointer is eliminated) and clobbered (to represent the
1622 implicit update from the assignment). */
1623 emit_use (hard_frame_pointer_rtx);
1624 emit_clobber (hard_frame_pointer_rtx);
1627 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
1628 if (fixed_regs[ARG_POINTER_REGNUM])
1630 #ifdef ELIMINABLE_REGS
1631 /* If the argument pointer can be eliminated in favor of the
1632 frame pointer, we don't need to restore it. We assume here
1633 that if such an elimination is present, it can always be used.
1634 This is the case on all known machines; if we don't make this
1635 assumption, we do unnecessary saving on many machines. */
1636 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
1637 size_t i;
1639 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
1640 if (elim_regs[i].from == ARG_POINTER_REGNUM
1641 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
1642 break;
1644 if (i == ARRAY_SIZE (elim_regs))
1645 #endif
1647 /* Now restore our arg pointer from the address at which it
1648 was saved in our stack frame. */
1649 emit_move_insn (crtl->args.internal_arg_pointer,
1650 copy_to_reg (get_arg_pointer_save_area ()));
1653 #endif
1655 #ifdef HAVE_nonlocal_goto_receiver
1656 if (HAVE_nonlocal_goto_receiver)
1657 emit_insn (gen_nonlocal_goto_receiver ());
1658 #endif
1660 /* We must not allow the code we just generated to be reordered by
1661 scheduling. Specifically, the update of the frame pointer must
1662 happen immediately, not later. */
1663 emit_insn (gen_blockage ());
1666 /* Emit code to save the current value of stack. */
1668 expand_stack_save (void)
1670 rtx ret = NULL_RTX;
1672 do_pending_stack_adjust ();
1673 emit_stack_save (SAVE_BLOCK, &ret);
1674 return ret;
1677 /* Emit code to restore the current value of stack. */
1678 void
1679 expand_stack_restore (tree var)
1681 rtx prev, sa = expand_normal (var);
1683 sa = convert_memory_address (Pmode, sa);
1685 prev = get_last_insn ();
1686 emit_stack_restore (SAVE_BLOCK, sa);
1687 fixup_args_size_notes (prev, get_last_insn (), 0);
1690 /* Generate code to jump to LABEL if OP0 and OP1 are equal in mode MODE. PROB
1691 is the probability of jumping to LABEL. */
1692 static void
1693 do_jump_if_equal (enum machine_mode mode, rtx op0, rtx op1, rtx label,
1694 int unsignedp, int prob)
1696 gcc_assert (prob <= REG_BR_PROB_BASE);
1697 do_compare_rtx_and_jump (op0, op1, EQ, unsignedp, mode,
1698 NULL_RTX, NULL_RTX, label, prob);
1701 /* Do the insertion of a case label into case_list. The labels are
1702 fed to us in descending order from the sorted vector of case labels used
1703 in the tree part of the middle end. So the list we construct is
1704 sorted in ascending order.
1706 LABEL is the case label to be inserted. LOW and HIGH are the bounds
1707 against which the index is compared to jump to LABEL and PROB is the
1708 estimated probability LABEL is reached from the switch statement. */
1710 static struct case_node *
1711 add_case_node (struct case_node *head, tree low, tree high,
1712 tree label, int prob, alloc_pool case_node_pool)
1714 struct case_node *r;
1716 gcc_checking_assert (low);
1717 gcc_checking_assert (high && (TREE_TYPE (low) == TREE_TYPE (high)));
1719 /* Add this label to the chain. */
1720 r = (struct case_node *) pool_alloc (case_node_pool);
1721 r->low = low;
1722 r->high = high;
1723 r->code_label = label;
1724 r->parent = r->left = NULL;
1725 r->prob = prob;
1726 r->subtree_prob = prob;
1727 r->right = head;
1728 return r;
1731 /* Dump ROOT, a list or tree of case nodes, to file. */
1733 static void
1734 dump_case_nodes (FILE *f, struct case_node *root,
1735 int indent_step, int indent_level)
1737 HOST_WIDE_INT low, high;
1739 if (root == 0)
1740 return;
1741 indent_level++;
1743 dump_case_nodes (f, root->left, indent_step, indent_level);
1745 low = tree_low_cst (root->low, 0);
1746 high = tree_low_cst (root->high, 0);
1748 fputs (";; ", f);
1749 if (high == low)
1750 fprintf(f, "%*s" HOST_WIDE_INT_PRINT_DEC,
1751 indent_step * indent_level, "", low);
1752 else
1753 fprintf(f, "%*s" HOST_WIDE_INT_PRINT_DEC " ... " HOST_WIDE_INT_PRINT_DEC,
1754 indent_step * indent_level, "", low, high);
1755 fputs ("\n", f);
1757 dump_case_nodes (f, root->right, indent_step, indent_level);
1760 #ifndef HAVE_casesi
1761 #define HAVE_casesi 0
1762 #endif
1764 #ifndef HAVE_tablejump
1765 #define HAVE_tablejump 0
1766 #endif
1768 /* Return the smallest number of different values for which it is best to use a
1769 jump-table instead of a tree of conditional branches. */
1771 static unsigned int
1772 case_values_threshold (void)
1774 unsigned int threshold = PARAM_VALUE (PARAM_CASE_VALUES_THRESHOLD);
1776 if (threshold == 0)
1777 threshold = targetm.case_values_threshold ();
1779 return threshold;
1782 /* Return true if a switch should be expanded as a decision tree.
1783 RANGE is the difference between highest and lowest case.
1784 UNIQ is number of unique case node targets, not counting the default case.
1785 COUNT is the number of comparisons needed, not counting the default case. */
1787 static bool
1788 expand_switch_as_decision_tree_p (tree range,
1789 unsigned int uniq ATTRIBUTE_UNUSED,
1790 unsigned int count)
1792 int max_ratio;
1794 /* If neither casesi or tablejump is available, or flag_jump_tables
1795 over-ruled us, we really have no choice. */
1796 if (!HAVE_casesi && !HAVE_tablejump)
1797 return true;
1798 if (!flag_jump_tables)
1799 return true;
1800 #ifndef ASM_OUTPUT_ADDR_DIFF_ELT
1801 if (flag_pic)
1802 return true;
1803 #endif
1805 /* If the switch is relatively small such that the cost of one
1806 indirect jump on the target are higher than the cost of a
1807 decision tree, go with the decision tree.
1809 If range of values is much bigger than number of values,
1810 or if it is too large to represent in a HOST_WIDE_INT,
1811 make a sequence of conditional branches instead of a dispatch.
1813 The definition of "much bigger" depends on whether we are
1814 optimizing for size or for speed. If the former, the maximum
1815 ratio range/count = 3, because this was found to be the optimal
1816 ratio for size on i686-pc-linux-gnu, see PR11823. The ratio
1817 10 is much older, and was probably selected after an extensive
1818 benchmarking investigation on numerous platforms. Or maybe it
1819 just made sense to someone at some point in the history of GCC,
1820 who knows... */
1821 max_ratio = optimize_insn_for_size_p () ? 3 : 10;
1822 if (count < case_values_threshold ()
1823 || ! host_integerp (range, /*pos=*/1)
1824 || compare_tree_int (range, max_ratio * count) > 0)
1825 return true;
1827 return false;
1830 /* Generate a decision tree, switching on INDEX_EXPR and jumping to
1831 one of the labels in CASE_LIST or to the DEFAULT_LABEL.
1832 DEFAULT_PROB is the estimated probability that it jumps to
1833 DEFAULT_LABEL.
1835 We generate a binary decision tree to select the appropriate target
1836 code. This is done as follows:
1838 If the index is a short or char that we do not have
1839 an insn to handle comparisons directly, convert it to
1840 a full integer now, rather than letting each comparison
1841 generate the conversion.
1843 Load the index into a register.
1845 The list of cases is rearranged into a binary tree,
1846 nearly optimal assuming equal probability for each case.
1848 The tree is transformed into RTL, eliminating redundant
1849 test conditions at the same time.
1851 If program flow could reach the end of the decision tree
1852 an unconditional jump to the default code is emitted.
1854 The above process is unaware of the CFG. The caller has to fix up
1855 the CFG itself. This is done in cfgexpand.c. */
1857 static void
1858 emit_case_decision_tree (tree index_expr, tree index_type,
1859 struct case_node *case_list, rtx default_label,
1860 int default_prob)
1862 rtx index = expand_normal (index_expr);
1864 if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT
1865 && ! have_insn_for (COMPARE, GET_MODE (index)))
1867 int unsignedp = TYPE_UNSIGNED (index_type);
1868 enum machine_mode wider_mode;
1869 for (wider_mode = GET_MODE (index); wider_mode != VOIDmode;
1870 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
1871 if (have_insn_for (COMPARE, wider_mode))
1873 index = convert_to_mode (wider_mode, index, unsignedp);
1874 break;
1878 do_pending_stack_adjust ();
1880 if (MEM_P (index))
1882 index = copy_to_reg (index);
1883 if (TREE_CODE (index_expr) == SSA_NAME)
1884 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (index_expr), index);
1887 balance_case_nodes (&case_list, NULL);
1889 if (dump_file && (dump_flags & TDF_DETAILS))
1891 int indent_step = ceil_log2 (TYPE_PRECISION (index_type)) + 2;
1892 fprintf (dump_file, ";; Expanding GIMPLE switch as decision tree:\n");
1893 dump_case_nodes (dump_file, case_list, indent_step, 0);
1896 emit_case_nodes (index, case_list, default_label, default_prob, index_type);
1897 if (default_label)
1898 emit_jump (default_label);
1901 /* Return the sum of probabilities of outgoing edges of basic block BB. */
1903 static int
1904 get_outgoing_edge_probs (basic_block bb)
1906 edge e;
1907 edge_iterator ei;
1908 int prob_sum = 0;
1909 if (!bb)
1910 return 0;
1911 FOR_EACH_EDGE(e, ei, bb->succs)
1912 prob_sum += e->probability;
1913 return prob_sum;
1916 /* Computes the conditional probability of jumping to a target if the branch
1917 instruction is executed.
1918 TARGET_PROB is the estimated probability of jumping to a target relative
1919 to some basic block BB.
1920 BASE_PROB is the probability of reaching the branch instruction relative
1921 to the same basic block BB. */
1923 static inline int
1924 conditional_probability (int target_prob, int base_prob)
1926 if (base_prob > 0)
1928 gcc_assert (target_prob >= 0);
1929 gcc_assert (target_prob <= base_prob);
1930 return RDIV (target_prob * REG_BR_PROB_BASE, base_prob);
1932 return -1;
1935 /* Generate a dispatch tabler, switching on INDEX_EXPR and jumping to
1936 one of the labels in CASE_LIST or to the DEFAULT_LABEL.
1937 MINVAL, MAXVAL, and RANGE are the extrema and range of the case
1938 labels in CASE_LIST. STMT_BB is the basic block containing the statement.
1940 First, a jump insn is emitted. First we try "casesi". If that
1941 fails, try "tablejump". A target *must* have one of them (or both).
1943 Then, a table with the target labels is emitted.
1945 The process is unaware of the CFG. The caller has to fix up
1946 the CFG itself. This is done in cfgexpand.c. */
1948 static void
1949 emit_case_dispatch_table (tree index_expr, tree index_type,
1950 struct case_node *case_list, rtx default_label,
1951 tree minval, tree maxval, tree range,
1952 basic_block stmt_bb)
1954 int i, ncases;
1955 struct case_node *n;
1956 rtx *labelvec;
1957 rtx fallback_label = label_rtx (case_list->code_label);
1958 rtx table_label = gen_label_rtx ();
1959 bool has_gaps = false;
1960 edge default_edge = stmt_bb ? EDGE_SUCC(stmt_bb, 0) : NULL;
1961 int default_prob = default_edge ? default_edge->probability : 0;
1962 int base = get_outgoing_edge_probs (stmt_bb);
1963 bool try_with_tablejump = false;
1965 int new_default_prob = conditional_probability (default_prob,
1966 base);
1968 if (! try_casesi (index_type, index_expr, minval, range,
1969 table_label, default_label, fallback_label,
1970 new_default_prob))
1972 /* Index jumptables from zero for suitable values of minval to avoid
1973 a subtraction. For the rationale see:
1974 "http://gcc.gnu.org/ml/gcc-patches/2001-10/msg01234.html". */
1975 if (optimize_insn_for_speed_p ()
1976 && compare_tree_int (minval, 0) > 0
1977 && compare_tree_int (minval, 3) < 0)
1979 minval = build_int_cst (index_type, 0);
1980 range = maxval;
1981 has_gaps = true;
1983 try_with_tablejump = true;
1986 /* Get table of labels to jump to, in order of case index. */
1988 ncases = tree_low_cst (range, 0) + 1;
1989 labelvec = XALLOCAVEC (rtx, ncases);
1990 memset (labelvec, 0, ncases * sizeof (rtx));
1992 for (n = case_list; n; n = n->right)
1994 /* Compute the low and high bounds relative to the minimum
1995 value since that should fit in a HOST_WIDE_INT while the
1996 actual values may not. */
1997 HOST_WIDE_INT i_low
1998 = tree_low_cst (fold_build2 (MINUS_EXPR, index_type,
1999 n->low, minval), 1);
2000 HOST_WIDE_INT i_high
2001 = tree_low_cst (fold_build2 (MINUS_EXPR, index_type,
2002 n->high, minval), 1);
2003 HOST_WIDE_INT i;
2005 for (i = i_low; i <= i_high; i ++)
2006 labelvec[i]
2007 = gen_rtx_LABEL_REF (Pmode, label_rtx (n->code_label));
2010 /* Fill in the gaps with the default. We may have gaps at
2011 the beginning if we tried to avoid the minval subtraction,
2012 so substitute some label even if the default label was
2013 deemed unreachable. */
2014 if (!default_label)
2015 default_label = fallback_label;
2016 for (i = 0; i < ncases; i++)
2017 if (labelvec[i] == 0)
2019 has_gaps = true;
2020 labelvec[i] = gen_rtx_LABEL_REF (Pmode, default_label);
2023 if (has_gaps)
2025 /* There is at least one entry in the jump table that jumps
2026 to default label. The default label can either be reached
2027 through the indirect jump or the direct conditional jump
2028 before that. Split the probability of reaching the
2029 default label among these two jumps. */
2030 new_default_prob = conditional_probability (default_prob/2,
2031 base);
2032 default_prob /= 2;
2033 base -= default_prob;
2035 else
2037 base -= default_prob;
2038 default_prob = 0;
2041 if (default_edge)
2042 default_edge->probability = default_prob;
2044 /* We have altered the probability of the default edge. So the probabilities
2045 of all other edges need to be adjusted so that it sums up to
2046 REG_BR_PROB_BASE. */
2047 if (base)
2049 edge e;
2050 edge_iterator ei;
2051 FOR_EACH_EDGE (e, ei, stmt_bb->succs)
2052 e->probability = RDIV (e->probability * REG_BR_PROB_BASE, base);
2055 if (try_with_tablejump)
2057 bool ok = try_tablejump (index_type, index_expr, minval, range,
2058 table_label, default_label, new_default_prob);
2059 gcc_assert (ok);
2061 /* Output the table. */
2062 emit_label (table_label);
2064 if (CASE_VECTOR_PC_RELATIVE || flag_pic)
2065 emit_jump_insn (gen_rtx_ADDR_DIFF_VEC (CASE_VECTOR_MODE,
2066 gen_rtx_LABEL_REF (Pmode, table_label),
2067 gen_rtvec_v (ncases, labelvec),
2068 const0_rtx, const0_rtx));
2069 else
2070 emit_jump_insn (gen_rtx_ADDR_VEC (CASE_VECTOR_MODE,
2071 gen_rtvec_v (ncases, labelvec)));
2073 /* Record no drop-through after the table. */
2074 emit_barrier ();
2077 /* Reset the aux field of all outgoing edges of basic block BB. */
2079 static inline void
2080 reset_out_edges_aux (basic_block bb)
2082 edge e;
2083 edge_iterator ei;
2084 FOR_EACH_EDGE(e, ei, bb->succs)
2085 e->aux = (void *)0;
2088 /* Compute the number of case labels that correspond to each outgoing edge of
2089 STMT. Record this information in the aux field of the edge. */
2091 static inline void
2092 compute_cases_per_edge (gimple stmt)
2094 basic_block bb = gimple_bb (stmt);
2095 reset_out_edges_aux (bb);
2096 int ncases = gimple_switch_num_labels (stmt);
2097 for (int i = ncases - 1; i >= 1; --i)
2099 tree elt = gimple_switch_label (stmt, i);
2100 tree lab = CASE_LABEL (elt);
2101 basic_block case_bb = label_to_block_fn (cfun, lab);
2102 edge case_edge = find_edge (bb, case_bb);
2103 case_edge->aux = (void *)((intptr_t)(case_edge->aux) + 1);
2107 /* Terminate a case (Pascal/Ada) or switch (C) statement
2108 in which ORIG_INDEX is the expression to be tested.
2109 If ORIG_TYPE is not NULL, it is the original ORIG_INDEX
2110 type as given in the source before any compiler conversions.
2111 Generate the code to test it and jump to the right place. */
2113 void
2114 expand_case (gimple stmt)
2116 tree minval = NULL_TREE, maxval = NULL_TREE, range = NULL_TREE;
2117 rtx default_label = NULL_RTX;
2118 unsigned int count, uniq;
2119 int i;
2120 int ncases = gimple_switch_num_labels (stmt);
2121 tree index_expr = gimple_switch_index (stmt);
2122 tree index_type = TREE_TYPE (index_expr);
2123 tree elt;
2124 basic_block bb = gimple_bb (stmt);
2126 /* A list of case labels; it is first built as a list and it may then
2127 be rearranged into a nearly balanced binary tree. */
2128 struct case_node *case_list = 0;
2130 /* A pool for case nodes. */
2131 alloc_pool case_node_pool;
2133 /* An ERROR_MARK occurs for various reasons including invalid data type.
2134 ??? Can this still happen, with GIMPLE and all? */
2135 if (index_type == error_mark_node)
2136 return;
2138 /* cleanup_tree_cfg removes all SWITCH_EXPR with their index
2139 expressions being INTEGER_CST. */
2140 gcc_assert (TREE_CODE (index_expr) != INTEGER_CST);
2142 case_node_pool = create_alloc_pool ("struct case_node pool",
2143 sizeof (struct case_node),
2144 100);
2146 do_pending_stack_adjust ();
2148 /* Find the default case target label. */
2149 default_label = label_rtx (CASE_LABEL (gimple_switch_default_label (stmt)));
2150 edge default_edge = EDGE_SUCC(bb, 0);
2151 int default_prob = default_edge->probability;
2153 /* Get upper and lower bounds of case values. */
2154 elt = gimple_switch_label (stmt, 1);
2155 minval = fold_convert (index_type, CASE_LOW (elt));
2156 elt = gimple_switch_label (stmt, ncases - 1);
2157 if (CASE_HIGH (elt))
2158 maxval = fold_convert (index_type, CASE_HIGH (elt));
2159 else
2160 maxval = fold_convert (index_type, CASE_LOW (elt));
2162 /* Compute span of values. */
2163 range = fold_build2 (MINUS_EXPR, index_type, maxval, minval);
2165 /* Listify the labels queue and gather some numbers to decide
2166 how to expand this switch(). */
2167 uniq = 0;
2168 count = 0;
2169 struct pointer_set_t *seen_labels = pointer_set_create ();
2170 compute_cases_per_edge (stmt);
2172 for (i = ncases - 1; i >= 1; --i)
2174 elt = gimple_switch_label (stmt, i);
2175 tree low = CASE_LOW (elt);
2176 gcc_assert (low);
2177 tree high = CASE_HIGH (elt);
2178 gcc_assert (! high || tree_int_cst_lt (low, high));
2179 tree lab = CASE_LABEL (elt);
2181 /* Count the elements.
2182 A range counts double, since it requires two compares. */
2183 count++;
2184 if (high)
2185 count++;
2187 /* If we have not seen this label yet, then increase the
2188 number of unique case node targets seen. */
2189 if (!pointer_set_insert (seen_labels, lab))
2190 uniq++;
2192 /* The bounds on the case range, LOW and HIGH, have to be converted
2193 to case's index type TYPE. Note that the original type of the
2194 case index in the source code is usually "lost" during
2195 gimplification due to type promotion, but the case labels retain the
2196 original type. Make sure to drop overflow flags. */
2197 low = fold_convert (index_type, low);
2198 if (TREE_OVERFLOW (low))
2199 low = build_int_cst_wide (index_type,
2200 TREE_INT_CST_LOW (low),
2201 TREE_INT_CST_HIGH (low));
2203 /* The canonical from of a case label in GIMPLE is that a simple case
2204 has an empty CASE_HIGH. For the casesi and tablejump expanders,
2205 the back ends want simple cases to have high == low. */
2206 if (! high)
2207 high = low;
2208 high = fold_convert (index_type, high);
2209 if (TREE_OVERFLOW (high))
2210 high = build_int_cst_wide (index_type,
2211 TREE_INT_CST_LOW (high),
2212 TREE_INT_CST_HIGH (high));
2214 basic_block case_bb = label_to_block_fn (cfun, lab);
2215 edge case_edge = find_edge (bb, case_bb);
2216 case_list = add_case_node (
2217 case_list, low, high, lab,
2218 case_edge->probability / (intptr_t)(case_edge->aux),
2219 case_node_pool);
2221 pointer_set_destroy (seen_labels);
2222 reset_out_edges_aux (bb);
2224 /* cleanup_tree_cfg removes all SWITCH_EXPR with a single
2225 destination, such as one with a default case only.
2226 It also removes cases that are out of range for the switch
2227 type, so we should never get a zero here. */
2228 gcc_assert (count > 0);
2230 rtx before_case = get_last_insn ();
2232 /* Decide how to expand this switch.
2233 The two options at this point are a dispatch table (casesi or
2234 tablejump) or a decision tree. */
2236 if (expand_switch_as_decision_tree_p (range, uniq, count))
2237 emit_case_decision_tree (index_expr, index_type,
2238 case_list, default_label,
2239 default_prob);
2240 else
2241 emit_case_dispatch_table (index_expr, index_type,
2242 case_list, default_label,
2243 minval, maxval, range, bb);
2245 reorder_insns (NEXT_INSN (before_case), get_last_insn (), before_case);
2247 free_temp_slots ();
2248 free_alloc_pool (case_node_pool);
2251 /* Expand the dispatch to a short decrement chain if there are few cases
2252 to dispatch to. Likewise if neither casesi nor tablejump is available,
2253 or if flag_jump_tables is set. Otherwise, expand as a casesi or a
2254 tablejump. The index mode is always the mode of integer_type_node.
2255 Trap if no case matches the index.
2257 DISPATCH_INDEX is the index expression to switch on. It should be a
2258 memory or register operand.
2260 DISPATCH_TABLE is a set of case labels. The set should be sorted in
2261 ascending order, be contiguous, starting with value 0, and contain only
2262 single-valued case labels. */
2264 void
2265 expand_sjlj_dispatch_table (rtx dispatch_index,
2266 vec<tree> dispatch_table)
2268 tree index_type = integer_type_node;
2269 enum machine_mode index_mode = TYPE_MODE (index_type);
2271 int ncases = dispatch_table.length ();
2273 do_pending_stack_adjust ();
2274 rtx before_case = get_last_insn ();
2276 /* Expand as a decrement-chain if there are 5 or fewer dispatch
2277 labels. This covers more than 98% of the cases in libjava,
2278 and seems to be a reasonable compromise between the "old way"
2279 of expanding as a decision tree or dispatch table vs. the "new
2280 way" with decrement chain or dispatch table. */
2281 if (dispatch_table.length () <= 5
2282 || (!HAVE_casesi && !HAVE_tablejump)
2283 || !flag_jump_tables)
2285 /* Expand the dispatch as a decrement chain:
2287 "switch(index) {case 0: do_0; case 1: do_1; ...; case N: do_N;}"
2291 if (index == 0) do_0; else index--;
2292 if (index == 0) do_1; else index--;
2294 if (index == 0) do_N; else index--;
2296 This is more efficient than a dispatch table on most machines.
2297 The last "index--" is redundant but the code is trivially dead
2298 and will be cleaned up by later passes. */
2299 rtx index = copy_to_mode_reg (index_mode, dispatch_index);
2300 rtx zero = CONST0_RTX (index_mode);
2301 for (int i = 0; i < ncases; i++)
2303 tree elt = dispatch_table[i];
2304 rtx lab = label_rtx (CASE_LABEL (elt));
2305 do_jump_if_equal (index_mode, index, zero, lab, 0, -1);
2306 force_expand_binop (index_mode, sub_optab,
2307 index, CONST1_RTX (index_mode),
2308 index, 0, OPTAB_DIRECT);
2311 else
2313 /* Similar to expand_case, but much simpler. */
2314 struct case_node *case_list = 0;
2315 alloc_pool case_node_pool = create_alloc_pool ("struct sjlj_case pool",
2316 sizeof (struct case_node),
2317 ncases);
2318 tree index_expr = make_tree (index_type, dispatch_index);
2319 tree minval = build_int_cst (index_type, 0);
2320 tree maxval = CASE_LOW (dispatch_table.last ());
2321 tree range = maxval;
2322 rtx default_label = gen_label_rtx ();
2324 for (int i = ncases - 1; i >= 0; --i)
2326 tree elt = dispatch_table[i];
2327 tree low = CASE_LOW (elt);
2328 tree lab = CASE_LABEL (elt);
2329 case_list = add_case_node (case_list, low, low, lab, 0, case_node_pool);
2332 emit_case_dispatch_table (index_expr, index_type,
2333 case_list, default_label,
2334 minval, maxval, range,
2335 BLOCK_FOR_INSN (before_case));
2336 emit_label (default_label);
2337 free_alloc_pool (case_node_pool);
2340 /* Dispatching something not handled? Trap! */
2341 expand_builtin_trap ();
2343 reorder_insns (NEXT_INSN (before_case), get_last_insn (), before_case);
2345 free_temp_slots ();
2349 /* Take an ordered list of case nodes
2350 and transform them into a near optimal binary tree,
2351 on the assumption that any target code selection value is as
2352 likely as any other.
2354 The transformation is performed by splitting the ordered
2355 list into two equal sections plus a pivot. The parts are
2356 then attached to the pivot as left and right branches. Each
2357 branch is then transformed recursively. */
2359 static void
2360 balance_case_nodes (case_node_ptr *head, case_node_ptr parent)
2362 case_node_ptr np;
2364 np = *head;
2365 if (np)
2367 int i = 0;
2368 int ranges = 0;
2369 case_node_ptr *npp;
2370 case_node_ptr left;
2372 /* Count the number of entries on branch. Also count the ranges. */
2374 while (np)
2376 if (!tree_int_cst_equal (np->low, np->high))
2377 ranges++;
2379 i++;
2380 np = np->right;
2383 if (i > 2)
2385 /* Split this list if it is long enough for that to help. */
2386 npp = head;
2387 left = *npp;
2389 /* If there are just three nodes, split at the middle one. */
2390 if (i == 3)
2391 npp = &(*npp)->right;
2392 else
2394 /* Find the place in the list that bisects the list's total cost,
2395 where ranges count as 2.
2396 Here I gets half the total cost. */
2397 i = (i + ranges + 1) / 2;
2398 while (1)
2400 /* Skip nodes while their cost does not reach that amount. */
2401 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
2402 i--;
2403 i--;
2404 if (i <= 0)
2405 break;
2406 npp = &(*npp)->right;
2409 *head = np = *npp;
2410 *npp = 0;
2411 np->parent = parent;
2412 np->left = left;
2414 /* Optimize each of the two split parts. */
2415 balance_case_nodes (&np->left, np);
2416 balance_case_nodes (&np->right, np);
2417 np->subtree_prob = np->prob;
2418 np->subtree_prob += np->left->subtree_prob;
2419 np->subtree_prob += np->right->subtree_prob;
2421 else
2423 /* Else leave this branch as one level,
2424 but fill in `parent' fields. */
2425 np = *head;
2426 np->parent = parent;
2427 np->subtree_prob = np->prob;
2428 for (; np->right; np = np->right)
2430 np->right->parent = np;
2431 (*head)->subtree_prob += np->right->subtree_prob;
2437 /* Search the parent sections of the case node tree
2438 to see if a test for the lower bound of NODE would be redundant.
2439 INDEX_TYPE is the type of the index expression.
2441 The instructions to generate the case decision tree are
2442 output in the same order as nodes are processed so it is
2443 known that if a parent node checks the range of the current
2444 node minus one that the current node is bounded at its lower
2445 span. Thus the test would be redundant. */
2447 static int
2448 node_has_low_bound (case_node_ptr node, tree index_type)
2450 tree low_minus_one;
2451 case_node_ptr pnode;
2453 /* If the lower bound of this node is the lowest value in the index type,
2454 we need not test it. */
2456 if (tree_int_cst_equal (node->low, TYPE_MIN_VALUE (index_type)))
2457 return 1;
2459 /* If this node has a left branch, the value at the left must be less
2460 than that at this node, so it cannot be bounded at the bottom and
2461 we need not bother testing any further. */
2463 if (node->left)
2464 return 0;
2466 low_minus_one = fold_build2 (MINUS_EXPR, TREE_TYPE (node->low),
2467 node->low,
2468 build_int_cst (TREE_TYPE (node->low), 1));
2470 /* If the subtraction above overflowed, we can't verify anything.
2471 Otherwise, look for a parent that tests our value - 1. */
2473 if (! tree_int_cst_lt (low_minus_one, node->low))
2474 return 0;
2476 for (pnode = node->parent; pnode; pnode = pnode->parent)
2477 if (tree_int_cst_equal (low_minus_one, pnode->high))
2478 return 1;
2480 return 0;
2483 /* Search the parent sections of the case node tree
2484 to see if a test for the upper bound of NODE would be redundant.
2485 INDEX_TYPE is the type of the index expression.
2487 The instructions to generate the case decision tree are
2488 output in the same order as nodes are processed so it is
2489 known that if a parent node checks the range of the current
2490 node plus one that the current node is bounded at its upper
2491 span. Thus the test would be redundant. */
2493 static int
2494 node_has_high_bound (case_node_ptr node, tree index_type)
2496 tree high_plus_one;
2497 case_node_ptr pnode;
2499 /* If there is no upper bound, obviously no test is needed. */
2501 if (TYPE_MAX_VALUE (index_type) == NULL)
2502 return 1;
2504 /* If the upper bound of this node is the highest value in the type
2505 of the index expression, we need not test against it. */
2507 if (tree_int_cst_equal (node->high, TYPE_MAX_VALUE (index_type)))
2508 return 1;
2510 /* If this node has a right branch, the value at the right must be greater
2511 than that at this node, so it cannot be bounded at the top and
2512 we need not bother testing any further. */
2514 if (node->right)
2515 return 0;
2517 high_plus_one = fold_build2 (PLUS_EXPR, TREE_TYPE (node->high),
2518 node->high,
2519 build_int_cst (TREE_TYPE (node->high), 1));
2521 /* If the addition above overflowed, we can't verify anything.
2522 Otherwise, look for a parent that tests our value + 1. */
2524 if (! tree_int_cst_lt (node->high, high_plus_one))
2525 return 0;
2527 for (pnode = node->parent; pnode; pnode = pnode->parent)
2528 if (tree_int_cst_equal (high_plus_one, pnode->low))
2529 return 1;
2531 return 0;
2534 /* Search the parent sections of the
2535 case node tree to see if both tests for the upper and lower
2536 bounds of NODE would be redundant. */
2538 static int
2539 node_is_bounded (case_node_ptr node, tree index_type)
2541 return (node_has_low_bound (node, index_type)
2542 && node_has_high_bound (node, index_type));
2546 /* Emit step-by-step code to select a case for the value of INDEX.
2547 The thus generated decision tree follows the form of the
2548 case-node binary tree NODE, whose nodes represent test conditions.
2549 INDEX_TYPE is the type of the index of the switch.
2551 Care is taken to prune redundant tests from the decision tree
2552 by detecting any boundary conditions already checked by
2553 emitted rtx. (See node_has_high_bound, node_has_low_bound
2554 and node_is_bounded, above.)
2556 Where the test conditions can be shown to be redundant we emit
2557 an unconditional jump to the target code. As a further
2558 optimization, the subordinates of a tree node are examined to
2559 check for bounded nodes. In this case conditional and/or
2560 unconditional jumps as a result of the boundary check for the
2561 current node are arranged to target the subordinates associated
2562 code for out of bound conditions on the current node.
2564 We can assume that when control reaches the code generated here,
2565 the index value has already been compared with the parents
2566 of this node, and determined to be on the same side of each parent
2567 as this node is. Thus, if this node tests for the value 51,
2568 and a parent tested for 52, we don't need to consider
2569 the possibility of a value greater than 51. If another parent
2570 tests for the value 50, then this node need not test anything. */
2572 static void
2573 emit_case_nodes (rtx index, case_node_ptr node, rtx default_label,
2574 int default_prob, tree index_type)
2576 /* If INDEX has an unsigned type, we must make unsigned branches. */
2577 int unsignedp = TYPE_UNSIGNED (index_type);
2578 int probability;
2579 int prob = node->prob, subtree_prob = node->subtree_prob;
2580 enum machine_mode mode = GET_MODE (index);
2581 enum machine_mode imode = TYPE_MODE (index_type);
2583 /* Handle indices detected as constant during RTL expansion. */
2584 if (mode == VOIDmode)
2585 mode = imode;
2587 /* See if our parents have already tested everything for us.
2588 If they have, emit an unconditional jump for this node. */
2589 if (node_is_bounded (node, index_type))
2590 emit_jump (label_rtx (node->code_label));
2592 else if (tree_int_cst_equal (node->low, node->high))
2594 probability = conditional_probability (prob, subtree_prob + default_prob);
2595 /* Node is single valued. First see if the index expression matches
2596 this node and then check our children, if any. */
2597 do_jump_if_equal (mode, index,
2598 convert_modes (mode, imode,
2599 expand_normal (node->low),
2600 unsignedp),
2601 label_rtx (node->code_label), unsignedp, probability);
2602 /* Since this case is taken at this point, reduce its weight from
2603 subtree_weight. */
2604 subtree_prob -= prob;
2605 if (node->right != 0 && node->left != 0)
2607 /* This node has children on both sides.
2608 Dispatch to one side or the other
2609 by comparing the index value with this node's value.
2610 If one subtree is bounded, check that one first,
2611 so we can avoid real branches in the tree. */
2613 if (node_is_bounded (node->right, index_type))
2615 probability = conditional_probability (
2616 node->right->prob,
2617 subtree_prob + default_prob);
2618 emit_cmp_and_jump_insns (index,
2619 convert_modes
2620 (mode, imode,
2621 expand_normal (node->high),
2622 unsignedp),
2623 GT, NULL_RTX, mode, unsignedp,
2624 label_rtx (node->right->code_label),
2625 probability);
2626 emit_case_nodes (index, node->left, default_label, default_prob,
2627 index_type);
2630 else if (node_is_bounded (node->left, index_type))
2632 probability = conditional_probability (
2633 node->left->prob,
2634 subtree_prob + default_prob);
2635 emit_cmp_and_jump_insns (index,
2636 convert_modes
2637 (mode, imode,
2638 expand_normal (node->high),
2639 unsignedp),
2640 LT, NULL_RTX, mode, unsignedp,
2641 label_rtx (node->left->code_label),
2642 probability);
2643 emit_case_nodes (index, node->right, default_label, default_prob, index_type);
2646 /* If both children are single-valued cases with no
2647 children, finish up all the work. This way, we can save
2648 one ordered comparison. */
2649 else if (tree_int_cst_equal (node->right->low, node->right->high)
2650 && node->right->left == 0
2651 && node->right->right == 0
2652 && tree_int_cst_equal (node->left->low, node->left->high)
2653 && node->left->left == 0
2654 && node->left->right == 0)
2656 /* Neither node is bounded. First distinguish the two sides;
2657 then emit the code for one side at a time. */
2659 /* See if the value matches what the right hand side
2660 wants. */
2661 probability = conditional_probability (
2662 node->right->prob,
2663 subtree_prob + default_prob);
2664 do_jump_if_equal (mode, index,
2665 convert_modes (mode, imode,
2666 expand_normal (node->right->low),
2667 unsignedp),
2668 label_rtx (node->right->code_label),
2669 unsignedp, probability);
2671 /* See if the value matches what the left hand side
2672 wants. */
2673 probability = conditional_probability (
2674 node->left->prob,
2675 subtree_prob + default_prob);
2676 do_jump_if_equal (mode, index,
2677 convert_modes (mode, imode,
2678 expand_normal (node->left->low),
2679 unsignedp),
2680 label_rtx (node->left->code_label),
2681 unsignedp, probability);
2684 else
2686 /* Neither node is bounded. First distinguish the two sides;
2687 then emit the code for one side at a time. */
2689 tree test_label
2690 = build_decl (curr_insn_location (),
2691 LABEL_DECL, NULL_TREE, NULL_TREE);
2693 /* The default label could be reached either through the right
2694 subtree or the left subtree. Divide the probability
2695 equally. */
2696 probability = conditional_probability (
2697 node->right->subtree_prob + default_prob/2,
2698 subtree_prob + default_prob);
2699 /* See if the value is on the right. */
2700 emit_cmp_and_jump_insns (index,
2701 convert_modes
2702 (mode, imode,
2703 expand_normal (node->high),
2704 unsignedp),
2705 GT, NULL_RTX, mode, unsignedp,
2706 label_rtx (test_label),
2707 probability);
2708 default_prob /= 2;
2710 /* Value must be on the left.
2711 Handle the left-hand subtree. */
2712 emit_case_nodes (index, node->left, default_label, default_prob, index_type);
2713 /* If left-hand subtree does nothing,
2714 go to default. */
2715 if (default_label)
2716 emit_jump (default_label);
2718 /* Code branches here for the right-hand subtree. */
2719 expand_label (test_label);
2720 emit_case_nodes (index, node->right, default_label, default_prob, index_type);
2724 else if (node->right != 0 && node->left == 0)
2726 /* Here we have a right child but no left so we issue a conditional
2727 branch to default and process the right child.
2729 Omit the conditional branch to default if the right child
2730 does not have any children and is single valued; it would
2731 cost too much space to save so little time. */
2733 if (node->right->right || node->right->left
2734 || !tree_int_cst_equal (node->right->low, node->right->high))
2736 if (!node_has_low_bound (node, index_type))
2738 probability = conditional_probability (
2739 default_prob/2,
2740 subtree_prob + default_prob);
2741 emit_cmp_and_jump_insns (index,
2742 convert_modes
2743 (mode, imode,
2744 expand_normal (node->high),
2745 unsignedp),
2746 LT, NULL_RTX, mode, unsignedp,
2747 default_label,
2748 probability);
2749 default_prob /= 2;
2752 emit_case_nodes (index, node->right, default_label, default_prob, index_type);
2754 else
2756 probability = conditional_probability (
2757 node->right->subtree_prob,
2758 subtree_prob + default_prob);
2759 /* We cannot process node->right normally
2760 since we haven't ruled out the numbers less than
2761 this node's value. So handle node->right explicitly. */
2762 do_jump_if_equal (mode, index,
2763 convert_modes
2764 (mode, imode,
2765 expand_normal (node->right->low),
2766 unsignedp),
2767 label_rtx (node->right->code_label), unsignedp, probability);
2771 else if (node->right == 0 && node->left != 0)
2773 /* Just one subtree, on the left. */
2774 if (node->left->left || node->left->right
2775 || !tree_int_cst_equal (node->left->low, node->left->high))
2777 if (!node_has_high_bound (node, index_type))
2779 probability = conditional_probability (
2780 default_prob/2,
2781 subtree_prob + default_prob);
2782 emit_cmp_and_jump_insns (index,
2783 convert_modes
2784 (mode, imode,
2785 expand_normal (node->high),
2786 unsignedp),
2787 GT, NULL_RTX, mode, unsignedp,
2788 default_label,
2789 probability);
2790 default_prob /= 2;
2793 emit_case_nodes (index, node->left, default_label,
2794 default_prob, index_type);
2796 else
2798 probability = conditional_probability (
2799 node->left->subtree_prob,
2800 subtree_prob + default_prob);
2801 /* We cannot process node->left normally
2802 since we haven't ruled out the numbers less than
2803 this node's value. So handle node->left explicitly. */
2804 do_jump_if_equal (mode, index,
2805 convert_modes
2806 (mode, imode,
2807 expand_normal (node->left->low),
2808 unsignedp),
2809 label_rtx (node->left->code_label), unsignedp, probability);
2813 else
2815 /* Node is a range. These cases are very similar to those for a single
2816 value, except that we do not start by testing whether this node
2817 is the one to branch to. */
2819 if (node->right != 0 && node->left != 0)
2821 /* Node has subtrees on both sides.
2822 If the right-hand subtree is bounded,
2823 test for it first, since we can go straight there.
2824 Otherwise, we need to make a branch in the control structure,
2825 then handle the two subtrees. */
2826 tree test_label = 0;
2828 if (node_is_bounded (node->right, index_type))
2830 /* Right hand node is fully bounded so we can eliminate any
2831 testing and branch directly to the target code. */
2832 probability = conditional_probability (
2833 node->right->subtree_prob,
2834 subtree_prob + default_prob);
2835 emit_cmp_and_jump_insns (index,
2836 convert_modes
2837 (mode, imode,
2838 expand_normal (node->high),
2839 unsignedp),
2840 GT, NULL_RTX, mode, unsignedp,
2841 label_rtx (node->right->code_label),
2842 probability);
2844 else
2846 /* Right hand node requires testing.
2847 Branch to a label where we will handle it later. */
2849 test_label = build_decl (curr_insn_location (),
2850 LABEL_DECL, NULL_TREE, NULL_TREE);
2851 probability = conditional_probability (
2852 node->right->subtree_prob + default_prob/2,
2853 subtree_prob + default_prob);
2854 emit_cmp_and_jump_insns (index,
2855 convert_modes
2856 (mode, imode,
2857 expand_normal (node->high),
2858 unsignedp),
2859 GT, NULL_RTX, mode, unsignedp,
2860 label_rtx (test_label),
2861 probability);
2862 default_prob /= 2;
2865 /* Value belongs to this node or to the left-hand subtree. */
2867 probability = conditional_probability (
2868 prob,
2869 subtree_prob + default_prob);
2870 emit_cmp_and_jump_insns (index,
2871 convert_modes
2872 (mode, imode,
2873 expand_normal (node->low),
2874 unsignedp),
2875 GE, NULL_RTX, mode, unsignedp,
2876 label_rtx (node->code_label),
2877 probability);
2879 /* Handle the left-hand subtree. */
2880 emit_case_nodes (index, node->left, default_label, default_prob, index_type);
2882 /* If right node had to be handled later, do that now. */
2884 if (test_label)
2886 /* If the left-hand subtree fell through,
2887 don't let it fall into the right-hand subtree. */
2888 if (default_label)
2889 emit_jump (default_label);
2891 expand_label (test_label);
2892 emit_case_nodes (index, node->right, default_label, default_prob, index_type);
2896 else if (node->right != 0 && node->left == 0)
2898 /* Deal with values to the left of this node,
2899 if they are possible. */
2900 if (!node_has_low_bound (node, index_type))
2902 probability = conditional_probability (
2903 default_prob/2,
2904 subtree_prob + default_prob);
2905 emit_cmp_and_jump_insns (index,
2906 convert_modes
2907 (mode, imode,
2908 expand_normal (node->low),
2909 unsignedp),
2910 LT, NULL_RTX, mode, unsignedp,
2911 default_label,
2912 probability);
2913 default_prob /= 2;
2916 /* Value belongs to this node or to the right-hand subtree. */
2918 probability = conditional_probability (
2919 prob,
2920 subtree_prob + default_prob);
2921 emit_cmp_and_jump_insns (index,
2922 convert_modes
2923 (mode, imode,
2924 expand_normal (node->high),
2925 unsignedp),
2926 LE, NULL_RTX, mode, unsignedp,
2927 label_rtx (node->code_label),
2928 probability);
2930 emit_case_nodes (index, node->right, default_label, default_prob, index_type);
2933 else if (node->right == 0 && node->left != 0)
2935 /* Deal with values to the right of this node,
2936 if they are possible. */
2937 if (!node_has_high_bound (node, index_type))
2939 probability = conditional_probability (
2940 default_prob/2,
2941 subtree_prob + default_prob);
2942 emit_cmp_and_jump_insns (index,
2943 convert_modes
2944 (mode, imode,
2945 expand_normal (node->high),
2946 unsignedp),
2947 GT, NULL_RTX, mode, unsignedp,
2948 default_label,
2949 probability);
2950 default_prob /= 2;
2953 /* Value belongs to this node or to the left-hand subtree. */
2955 probability = conditional_probability (
2956 prob,
2957 subtree_prob + default_prob);
2958 emit_cmp_and_jump_insns (index,
2959 convert_modes
2960 (mode, imode,
2961 expand_normal (node->low),
2962 unsignedp),
2963 GE, NULL_RTX, mode, unsignedp,
2964 label_rtx (node->code_label),
2965 probability);
2967 emit_case_nodes (index, node->left, default_label, default_prob, index_type);
2970 else
2972 /* Node has no children so we check low and high bounds to remove
2973 redundant tests. Only one of the bounds can exist,
2974 since otherwise this node is bounded--a case tested already. */
2975 int high_bound = node_has_high_bound (node, index_type);
2976 int low_bound = node_has_low_bound (node, index_type);
2978 if (!high_bound && low_bound)
2980 probability = conditional_probability (
2981 default_prob,
2982 subtree_prob + default_prob);
2983 emit_cmp_and_jump_insns (index,
2984 convert_modes
2985 (mode, imode,
2986 expand_normal (node->high),
2987 unsignedp),
2988 GT, NULL_RTX, mode, unsignedp,
2989 default_label,
2990 probability);
2993 else if (!low_bound && high_bound)
2995 probability = conditional_probability (
2996 default_prob,
2997 subtree_prob + default_prob);
2998 emit_cmp_and_jump_insns (index,
2999 convert_modes
3000 (mode, imode,
3001 expand_normal (node->low),
3002 unsignedp),
3003 LT, NULL_RTX, mode, unsignedp,
3004 default_label,
3005 probability);
3007 else if (!low_bound && !high_bound)
3009 /* Widen LOW and HIGH to the same width as INDEX. */
3010 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
3011 tree low = build1 (CONVERT_EXPR, type, node->low);
3012 tree high = build1 (CONVERT_EXPR, type, node->high);
3013 rtx low_rtx, new_index, new_bound;
3015 /* Instead of doing two branches, emit one unsigned branch for
3016 (index-low) > (high-low). */
3017 low_rtx = expand_expr (low, NULL_RTX, mode, EXPAND_NORMAL);
3018 new_index = expand_simple_binop (mode, MINUS, index, low_rtx,
3019 NULL_RTX, unsignedp,
3020 OPTAB_WIDEN);
3021 new_bound = expand_expr (fold_build2 (MINUS_EXPR, type,
3022 high, low),
3023 NULL_RTX, mode, EXPAND_NORMAL);
3025 probability = conditional_probability (
3026 default_prob,
3027 subtree_prob + default_prob);
3028 emit_cmp_and_jump_insns (new_index, new_bound, GT, NULL_RTX,
3029 mode, 1, default_label, probability);
3032 emit_jump (label_rtx (node->code_label));