gcc/
[official-gcc.git] / gcc / stmt.c
blobdd34890d46215679ccf3ac10f31960da1129a3b1
1 /* Expands front end tree to back end RTL for GCC
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009,
4 2010, 2011, 2012 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /* This file handles the generation of rtl code from tree structure
23 above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
24 The functions whose names start with `expand_' are called by the
25 expander to generate RTL instructions for various kinds of constructs. */
27 #include "config.h"
28 #include "system.h"
29 #include "coretypes.h"
30 #include "tm.h"
32 #include "rtl.h"
33 #include "hard-reg-set.h"
34 #include "tree.h"
35 #include "tm_p.h"
36 #include "flags.h"
37 #include "except.h"
38 #include "function.h"
39 #include "insn-config.h"
40 #include "expr.h"
41 #include "libfuncs.h"
42 #include "recog.h"
43 #include "machmode.h"
44 #include "diagnostic-core.h"
45 #include "output.h"
46 #include "ggc.h"
47 #include "langhooks.h"
48 #include "predict.h"
49 #include "optabs.h"
50 #include "target.h"
51 #include "gimple.h"
52 #include "regs.h"
53 #include "alloc-pool.h"
54 #include "pretty-print.h"
55 #include "bitmap.h"
56 #include "params.h"
59 /* Functions and data structures for expanding case statements. */
61 /* Case label structure, used to hold info on labels within case
62 statements. We handle "range" labels; for a single-value label
63 as in C, the high and low limits are the same.
65 We start with a vector of case nodes sorted in ascending order, and
66 the default label as the last element in the vector. Before expanding
67 to RTL, we transform this vector into a list linked via the RIGHT
68 fields in the case_node struct. Nodes with higher case values are
69 later in the list.
71 Switch statements can be output in three forms. A branch table is
72 used if there are more than a few labels and the labels are dense
73 within the range between the smallest and largest case value. If a
74 branch table is used, no further manipulations are done with the case
75 node chain.
77 The alternative to the use of a branch table is to generate a series
78 of compare and jump insns. When that is done, we use the LEFT, RIGHT,
79 and PARENT fields to hold a binary tree. Initially the tree is
80 totally unbalanced, with everything on the right. We balance the tree
81 with nodes on the left having lower case values than the parent
82 and nodes on the right having higher values. We then output the tree
83 in order.
85 For very small, suitable switch statements, we can generate a series
86 of simple bit test and branches instead. */
88 struct case_node
90 struct case_node *left; /* Left son in binary tree */
91 struct case_node *right; /* Right son in binary tree; also node chain */
92 struct case_node *parent; /* Parent of node in binary tree */
93 tree low; /* Lowest index value for this label */
94 tree high; /* Highest index value for this label */
95 tree code_label; /* Label to jump to when node matches */
98 typedef struct case_node case_node;
99 typedef struct case_node *case_node_ptr;
102 static int n_occurrences (int, const char *);
103 static bool tree_conflicts_with_clobbers_p (tree, HARD_REG_SET *);
104 static void expand_nl_goto_receiver (void);
105 static bool check_operand_nalternatives (tree, tree);
106 static bool check_unique_operand_names (tree, tree, tree);
107 static char *resolve_operand_name_1 (char *, tree, tree, tree);
108 static void expand_null_return_1 (void);
109 static void expand_value_return (rtx);
110 static bool lshift_cheap_p (void);
111 static int case_bit_test_cmp (const void *, const void *);
112 static void emit_case_bit_tests (tree, tree, tree, tree, case_node_ptr, rtx);
113 static void balance_case_nodes (case_node_ptr *, case_node_ptr);
114 static int node_has_low_bound (case_node_ptr, tree);
115 static int node_has_high_bound (case_node_ptr, tree);
116 static int node_is_bounded (case_node_ptr, tree);
117 static void emit_case_nodes (rtx, case_node_ptr, rtx, tree);
118 static struct case_node *add_case_node (struct case_node *, tree,
119 tree, tree, tree, alloc_pool);
122 /* Return the rtx-label that corresponds to a LABEL_DECL,
123 creating it if necessary. */
126 label_rtx (tree label)
128 gcc_assert (TREE_CODE (label) == LABEL_DECL);
130 if (!DECL_RTL_SET_P (label))
132 rtx r = gen_label_rtx ();
133 SET_DECL_RTL (label, r);
134 if (FORCED_LABEL (label) || DECL_NONLOCAL (label))
135 LABEL_PRESERVE_P (r) = 1;
138 return DECL_RTL (label);
141 /* As above, but also put it on the forced-reference list of the
142 function that contains it. */
144 force_label_rtx (tree label)
146 rtx ref = label_rtx (label);
147 tree function = decl_function_context (label);
149 gcc_assert (function);
151 forced_labels = gen_rtx_EXPR_LIST (VOIDmode, ref, forced_labels);
152 return ref;
155 /* Add an unconditional jump to LABEL as the next sequential instruction. */
157 void
158 emit_jump (rtx label)
160 do_pending_stack_adjust ();
161 emit_jump_insn (gen_jump (label));
162 emit_barrier ();
165 /* Emit code to jump to the address
166 specified by the pointer expression EXP. */
168 void
169 expand_computed_goto (tree exp)
171 rtx x = expand_normal (exp);
173 x = convert_memory_address (Pmode, x);
175 do_pending_stack_adjust ();
176 emit_indirect_jump (x);
179 /* Handle goto statements and the labels that they can go to. */
181 /* Specify the location in the RTL code of a label LABEL,
182 which is a LABEL_DECL tree node.
184 This is used for the kind of label that the user can jump to with a
185 goto statement, and for alternatives of a switch or case statement.
186 RTL labels generated for loops and conditionals don't go through here;
187 they are generated directly at the RTL level, by other functions below.
189 Note that this has nothing to do with defining label *names*.
190 Languages vary in how they do that and what that even means. */
192 void
193 expand_label (tree label)
195 rtx label_r = label_rtx (label);
197 do_pending_stack_adjust ();
198 emit_label (label_r);
199 if (DECL_NAME (label))
200 LABEL_NAME (DECL_RTL (label)) = IDENTIFIER_POINTER (DECL_NAME (label));
202 if (DECL_NONLOCAL (label))
204 expand_nl_goto_receiver ();
205 nonlocal_goto_handler_labels
206 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
207 nonlocal_goto_handler_labels);
210 if (FORCED_LABEL (label))
211 forced_labels = gen_rtx_EXPR_LIST (VOIDmode, label_r, forced_labels);
213 if (DECL_NONLOCAL (label) || FORCED_LABEL (label))
214 maybe_set_first_label_num (label_r);
217 /* Generate RTL code for a `goto' statement with target label LABEL.
218 LABEL should be a LABEL_DECL tree node that was or will later be
219 defined with `expand_label'. */
221 void
222 expand_goto (tree label)
224 #ifdef ENABLE_CHECKING
225 /* Check for a nonlocal goto to a containing function. Should have
226 gotten translated to __builtin_nonlocal_goto. */
227 tree context = decl_function_context (label);
228 gcc_assert (!context || context == current_function_decl);
229 #endif
231 emit_jump (label_rtx (label));
234 /* Return the number of times character C occurs in string S. */
235 static int
236 n_occurrences (int c, const char *s)
238 int n = 0;
239 while (*s)
240 n += (*s++ == c);
241 return n;
244 /* Generate RTL for an asm statement (explicit assembler code).
245 STRING is a STRING_CST node containing the assembler code text,
246 or an ADDR_EXPR containing a STRING_CST. VOL nonzero means the
247 insn is volatile; don't optimize it. */
249 static void
250 expand_asm_loc (tree string, int vol, location_t locus)
252 rtx body;
254 if (TREE_CODE (string) == ADDR_EXPR)
255 string = TREE_OPERAND (string, 0);
257 body = gen_rtx_ASM_INPUT_loc (VOIDmode,
258 ggc_strdup (TREE_STRING_POINTER (string)),
259 locus);
261 MEM_VOLATILE_P (body) = vol;
263 emit_insn (body);
266 /* Parse the output constraint pointed to by *CONSTRAINT_P. It is the
267 OPERAND_NUMth output operand, indexed from zero. There are NINPUTS
268 inputs and NOUTPUTS outputs to this extended-asm. Upon return,
269 *ALLOWS_MEM will be TRUE iff the constraint allows the use of a
270 memory operand. Similarly, *ALLOWS_REG will be TRUE iff the
271 constraint allows the use of a register operand. And, *IS_INOUT
272 will be true if the operand is read-write, i.e., if it is used as
273 an input as well as an output. If *CONSTRAINT_P is not in
274 canonical form, it will be made canonical. (Note that `+' will be
275 replaced with `=' as part of this process.)
277 Returns TRUE if all went well; FALSE if an error occurred. */
279 bool
280 parse_output_constraint (const char **constraint_p, int operand_num,
281 int ninputs, int noutputs, bool *allows_mem,
282 bool *allows_reg, bool *is_inout)
284 const char *constraint = *constraint_p;
285 const char *p;
287 /* Assume the constraint doesn't allow the use of either a register
288 or memory. */
289 *allows_mem = false;
290 *allows_reg = false;
292 /* Allow the `=' or `+' to not be at the beginning of the string,
293 since it wasn't explicitly documented that way, and there is a
294 large body of code that puts it last. Swap the character to
295 the front, so as not to uglify any place else. */
296 p = strchr (constraint, '=');
297 if (!p)
298 p = strchr (constraint, '+');
300 /* If the string doesn't contain an `=', issue an error
301 message. */
302 if (!p)
304 error ("output operand constraint lacks %<=%>");
305 return false;
308 /* If the constraint begins with `+', then the operand is both read
309 from and written to. */
310 *is_inout = (*p == '+');
312 /* Canonicalize the output constraint so that it begins with `='. */
313 if (p != constraint || *is_inout)
315 char *buf;
316 size_t c_len = strlen (constraint);
318 if (p != constraint)
319 warning (0, "output constraint %qc for operand %d "
320 "is not at the beginning",
321 *p, operand_num);
323 /* Make a copy of the constraint. */
324 buf = XALLOCAVEC (char, c_len + 1);
325 strcpy (buf, constraint);
326 /* Swap the first character and the `=' or `+'. */
327 buf[p - constraint] = buf[0];
328 /* Make sure the first character is an `='. (Until we do this,
329 it might be a `+'.) */
330 buf[0] = '=';
331 /* Replace the constraint with the canonicalized string. */
332 *constraint_p = ggc_alloc_string (buf, c_len);
333 constraint = *constraint_p;
336 /* Loop through the constraint string. */
337 for (p = constraint + 1; *p; p += CONSTRAINT_LEN (*p, p))
338 switch (*p)
340 case '+':
341 case '=':
342 error ("operand constraint contains incorrectly positioned "
343 "%<+%> or %<=%>");
344 return false;
346 case '%':
347 if (operand_num + 1 == ninputs + noutputs)
349 error ("%<%%%> constraint used with last operand");
350 return false;
352 break;
354 case 'V': case TARGET_MEM_CONSTRAINT: case 'o':
355 *allows_mem = true;
356 break;
358 case '?': case '!': case '*': case '&': case '#':
359 case 'E': case 'F': case 'G': case 'H':
360 case 's': case 'i': case 'n':
361 case 'I': case 'J': case 'K': case 'L': case 'M':
362 case 'N': case 'O': case 'P': case ',':
363 break;
365 case '0': case '1': case '2': case '3': case '4':
366 case '5': case '6': case '7': case '8': case '9':
367 case '[':
368 error ("matching constraint not valid in output operand");
369 return false;
371 case '<': case '>':
372 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
373 excepting those that expand_call created. So match memory
374 and hope. */
375 *allows_mem = true;
376 break;
378 case 'g': case 'X':
379 *allows_reg = true;
380 *allows_mem = true;
381 break;
383 case 'p': case 'r':
384 *allows_reg = true;
385 break;
387 default:
388 if (!ISALPHA (*p))
389 break;
390 if (REG_CLASS_FROM_CONSTRAINT (*p, p) != NO_REGS)
391 *allows_reg = true;
392 #ifdef EXTRA_CONSTRAINT_STR
393 else if (EXTRA_ADDRESS_CONSTRAINT (*p, p))
394 *allows_reg = true;
395 else if (EXTRA_MEMORY_CONSTRAINT (*p, p))
396 *allows_mem = true;
397 else
399 /* Otherwise we can't assume anything about the nature of
400 the constraint except that it isn't purely registers.
401 Treat it like "g" and hope for the best. */
402 *allows_reg = true;
403 *allows_mem = true;
405 #endif
406 break;
409 return true;
412 /* Similar, but for input constraints. */
414 bool
415 parse_input_constraint (const char **constraint_p, int input_num,
416 int ninputs, int noutputs, int ninout,
417 const char * const * constraints,
418 bool *allows_mem, bool *allows_reg)
420 const char *constraint = *constraint_p;
421 const char *orig_constraint = constraint;
422 size_t c_len = strlen (constraint);
423 size_t j;
424 bool saw_match = false;
426 /* Assume the constraint doesn't allow the use of either
427 a register or memory. */
428 *allows_mem = false;
429 *allows_reg = false;
431 /* Make sure constraint has neither `=', `+', nor '&'. */
433 for (j = 0; j < c_len; j += CONSTRAINT_LEN (constraint[j], constraint+j))
434 switch (constraint[j])
436 case '+': case '=': case '&':
437 if (constraint == orig_constraint)
439 error ("input operand constraint contains %qc", constraint[j]);
440 return false;
442 break;
444 case '%':
445 if (constraint == orig_constraint
446 && input_num + 1 == ninputs - ninout)
448 error ("%<%%%> constraint used with last operand");
449 return false;
451 break;
453 case 'V': case TARGET_MEM_CONSTRAINT: case 'o':
454 *allows_mem = true;
455 break;
457 case '<': case '>':
458 case '?': case '!': case '*': case '#':
459 case 'E': case 'F': case 'G': case 'H':
460 case 's': case 'i': case 'n':
461 case 'I': case 'J': case 'K': case 'L': case 'M':
462 case 'N': case 'O': case 'P': case ',':
463 break;
465 /* Whether or not a numeric constraint allows a register is
466 decided by the matching constraint, and so there is no need
467 to do anything special with them. We must handle them in
468 the default case, so that we don't unnecessarily force
469 operands to memory. */
470 case '0': case '1': case '2': case '3': case '4':
471 case '5': case '6': case '7': case '8': case '9':
473 char *end;
474 unsigned long match;
476 saw_match = true;
478 match = strtoul (constraint + j, &end, 10);
479 if (match >= (unsigned long) noutputs)
481 error ("matching constraint references invalid operand number");
482 return false;
485 /* Try and find the real constraint for this dup. Only do this
486 if the matching constraint is the only alternative. */
487 if (*end == '\0'
488 && (j == 0 || (j == 1 && constraint[0] == '%')))
490 constraint = constraints[match];
491 *constraint_p = constraint;
492 c_len = strlen (constraint);
493 j = 0;
494 /* ??? At the end of the loop, we will skip the first part of
495 the matched constraint. This assumes not only that the
496 other constraint is an output constraint, but also that
497 the '=' or '+' come first. */
498 break;
500 else
501 j = end - constraint;
502 /* Anticipate increment at end of loop. */
503 j--;
505 /* Fall through. */
507 case 'p': case 'r':
508 *allows_reg = true;
509 break;
511 case 'g': case 'X':
512 *allows_reg = true;
513 *allows_mem = true;
514 break;
516 default:
517 if (! ISALPHA (constraint[j]))
519 error ("invalid punctuation %qc in constraint", constraint[j]);
520 return false;
522 if (REG_CLASS_FROM_CONSTRAINT (constraint[j], constraint + j)
523 != NO_REGS)
524 *allows_reg = true;
525 #ifdef EXTRA_CONSTRAINT_STR
526 else if (EXTRA_ADDRESS_CONSTRAINT (constraint[j], constraint + j))
527 *allows_reg = true;
528 else if (EXTRA_MEMORY_CONSTRAINT (constraint[j], constraint + j))
529 *allows_mem = true;
530 else
532 /* Otherwise we can't assume anything about the nature of
533 the constraint except that it isn't purely registers.
534 Treat it like "g" and hope for the best. */
535 *allows_reg = true;
536 *allows_mem = true;
538 #endif
539 break;
542 if (saw_match && !*allows_reg)
543 warning (0, "matching constraint does not allow a register");
545 return true;
548 /* Return DECL iff there's an overlap between *REGS and DECL, where DECL
549 can be an asm-declared register. Called via walk_tree. */
551 static tree
552 decl_overlaps_hard_reg_set_p (tree *declp, int *walk_subtrees ATTRIBUTE_UNUSED,
553 void *data)
555 tree decl = *declp;
556 const HARD_REG_SET *const regs = (const HARD_REG_SET *) data;
558 if (TREE_CODE (decl) == VAR_DECL)
560 if (DECL_HARD_REGISTER (decl)
561 && REG_P (DECL_RTL (decl))
562 && REGNO (DECL_RTL (decl)) < FIRST_PSEUDO_REGISTER)
564 rtx reg = DECL_RTL (decl);
566 if (overlaps_hard_reg_set_p (*regs, GET_MODE (reg), REGNO (reg)))
567 return decl;
569 walk_subtrees = 0;
571 else if (TYPE_P (decl) || TREE_CODE (decl) == PARM_DECL)
572 walk_subtrees = 0;
573 return NULL_TREE;
576 /* If there is an overlap between *REGS and DECL, return the first overlap
577 found. */
578 tree
579 tree_overlaps_hard_reg_set (tree decl, HARD_REG_SET *regs)
581 return walk_tree (&decl, decl_overlaps_hard_reg_set_p, regs, NULL);
584 /* Check for overlap between registers marked in CLOBBERED_REGS and
585 anything inappropriate in T. Emit error and return the register
586 variable definition for error, NULL_TREE for ok. */
588 static bool
589 tree_conflicts_with_clobbers_p (tree t, HARD_REG_SET *clobbered_regs)
591 /* Conflicts between asm-declared register variables and the clobber
592 list are not allowed. */
593 tree overlap = tree_overlaps_hard_reg_set (t, clobbered_regs);
595 if (overlap)
597 error ("asm-specifier for variable %qE conflicts with asm clobber list",
598 DECL_NAME (overlap));
600 /* Reset registerness to stop multiple errors emitted for a single
601 variable. */
602 DECL_REGISTER (overlap) = 0;
603 return true;
606 return false;
609 /* Generate RTL for an asm statement with arguments.
610 STRING is the instruction template.
611 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
612 Each output or input has an expression in the TREE_VALUE and
613 a tree list in TREE_PURPOSE which in turn contains a constraint
614 name in TREE_VALUE (or NULL_TREE) and a constraint string
615 in TREE_PURPOSE.
616 CLOBBERS is a list of STRING_CST nodes each naming a hard register
617 that is clobbered by this insn.
619 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
620 Some elements of OUTPUTS may be replaced with trees representing temporary
621 values. The caller should copy those temporary values to the originally
622 specified lvalues.
624 VOL nonzero means the insn is volatile; don't optimize it. */
626 static void
627 expand_asm_operands (tree string, tree outputs, tree inputs,
628 tree clobbers, tree labels, int vol, location_t locus)
630 rtvec argvec, constraintvec, labelvec;
631 rtx body;
632 int ninputs = list_length (inputs);
633 int noutputs = list_length (outputs);
634 int nlabels = list_length (labels);
635 int ninout;
636 int nclobbers;
637 HARD_REG_SET clobbered_regs;
638 int clobber_conflict_found = 0;
639 tree tail;
640 tree t;
641 int i;
642 /* Vector of RTX's of evaluated output operands. */
643 rtx *output_rtx = XALLOCAVEC (rtx, noutputs);
644 int *inout_opnum = XALLOCAVEC (int, noutputs);
645 rtx *real_output_rtx = XALLOCAVEC (rtx, noutputs);
646 enum machine_mode *inout_mode = XALLOCAVEC (enum machine_mode, noutputs);
647 const char **constraints = XALLOCAVEC (const char *, noutputs + ninputs);
648 int old_generating_concat_p = generating_concat_p;
650 /* An ASM with no outputs needs to be treated as volatile, for now. */
651 if (noutputs == 0)
652 vol = 1;
654 if (! check_operand_nalternatives (outputs, inputs))
655 return;
657 string = resolve_asm_operand_names (string, outputs, inputs, labels);
659 /* Collect constraints. */
660 i = 0;
661 for (t = outputs; t ; t = TREE_CHAIN (t), i++)
662 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
663 for (t = inputs; t ; t = TREE_CHAIN (t), i++)
664 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
666 /* Sometimes we wish to automatically clobber registers across an asm.
667 Case in point is when the i386 backend moved from cc0 to a hard reg --
668 maintaining source-level compatibility means automatically clobbering
669 the flags register. */
670 clobbers = targetm.md_asm_clobbers (outputs, inputs, clobbers);
672 /* Count the number of meaningful clobbered registers, ignoring what
673 we would ignore later. */
674 nclobbers = 0;
675 CLEAR_HARD_REG_SET (clobbered_regs);
676 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
678 const char *regname;
679 int nregs;
681 if (TREE_VALUE (tail) == error_mark_node)
682 return;
683 regname = TREE_STRING_POINTER (TREE_VALUE (tail));
685 i = decode_reg_name_and_count (regname, &nregs);
686 if (i == -4)
687 ++nclobbers;
688 else if (i == -2)
689 error ("unknown register name %qs in %<asm%>", regname);
691 /* Mark clobbered registers. */
692 if (i >= 0)
694 int reg;
696 for (reg = i; reg < i + nregs; reg++)
698 ++nclobbers;
700 /* Clobbering the PIC register is an error. */
701 if (reg == (int) PIC_OFFSET_TABLE_REGNUM)
703 error ("PIC register clobbered by %qs in %<asm%>", regname);
704 return;
707 SET_HARD_REG_BIT (clobbered_regs, reg);
712 /* First pass over inputs and outputs checks validity and sets
713 mark_addressable if needed. */
715 ninout = 0;
716 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
718 tree val = TREE_VALUE (tail);
719 tree type = TREE_TYPE (val);
720 const char *constraint;
721 bool is_inout;
722 bool allows_reg;
723 bool allows_mem;
725 /* If there's an erroneous arg, emit no insn. */
726 if (type == error_mark_node)
727 return;
729 /* Try to parse the output constraint. If that fails, there's
730 no point in going further. */
731 constraint = constraints[i];
732 if (!parse_output_constraint (&constraint, i, ninputs, noutputs,
733 &allows_mem, &allows_reg, &is_inout))
734 return;
736 if (! allows_reg
737 && (allows_mem
738 || is_inout
739 || (DECL_P (val)
740 && REG_P (DECL_RTL (val))
741 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type))))
742 mark_addressable (val);
744 if (is_inout)
745 ninout++;
748 ninputs += ninout;
749 if (ninputs + noutputs > MAX_RECOG_OPERANDS)
751 error ("more than %d operands in %<asm%>", MAX_RECOG_OPERANDS);
752 return;
755 for (i = 0, tail = inputs; tail; i++, tail = TREE_CHAIN (tail))
757 bool allows_reg, allows_mem;
758 const char *constraint;
760 /* If there's an erroneous arg, emit no insn, because the ASM_INPUT
761 would get VOIDmode and that could cause a crash in reload. */
762 if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
763 return;
765 constraint = constraints[i + noutputs];
766 if (! parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
767 constraints, &allows_mem, &allows_reg))
768 return;
770 if (! allows_reg && allows_mem)
771 mark_addressable (TREE_VALUE (tail));
774 /* Second pass evaluates arguments. */
776 /* Make sure stack is consistent for asm goto. */
777 if (nlabels > 0)
778 do_pending_stack_adjust ();
780 ninout = 0;
781 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
783 tree val = TREE_VALUE (tail);
784 tree type = TREE_TYPE (val);
785 bool is_inout;
786 bool allows_reg;
787 bool allows_mem;
788 rtx op;
789 bool ok;
791 ok = parse_output_constraint (&constraints[i], i, ninputs,
792 noutputs, &allows_mem, &allows_reg,
793 &is_inout);
794 gcc_assert (ok);
796 /* If an output operand is not a decl or indirect ref and our constraint
797 allows a register, make a temporary to act as an intermediate.
798 Make the asm insn write into that, then our caller will copy it to
799 the real output operand. Likewise for promoted variables. */
801 generating_concat_p = 0;
803 real_output_rtx[i] = NULL_RTX;
804 if ((TREE_CODE (val) == INDIRECT_REF
805 && allows_mem)
806 || (DECL_P (val)
807 && (allows_mem || REG_P (DECL_RTL (val)))
808 && ! (REG_P (DECL_RTL (val))
809 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
810 || ! allows_reg
811 || is_inout)
813 op = expand_expr (val, NULL_RTX, VOIDmode, EXPAND_WRITE);
814 if (MEM_P (op))
815 op = validize_mem (op);
817 if (! allows_reg && !MEM_P (op))
818 error ("output number %d not directly addressable", i);
819 if ((! allows_mem && MEM_P (op))
820 || GET_CODE (op) == CONCAT)
822 real_output_rtx[i] = op;
823 op = gen_reg_rtx (GET_MODE (op));
824 if (is_inout)
825 emit_move_insn (op, real_output_rtx[i]);
828 else
830 op = assign_temp (type, 0, 0, 1);
831 op = validize_mem (op);
832 if (!MEM_P (op) && TREE_CODE (TREE_VALUE (tail)) == SSA_NAME)
833 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (TREE_VALUE (tail)), op);
834 TREE_VALUE (tail) = make_tree (type, op);
836 output_rtx[i] = op;
838 generating_concat_p = old_generating_concat_p;
840 if (is_inout)
842 inout_mode[ninout] = TYPE_MODE (type);
843 inout_opnum[ninout++] = i;
846 if (tree_conflicts_with_clobbers_p (val, &clobbered_regs))
847 clobber_conflict_found = 1;
850 /* Make vectors for the expression-rtx, constraint strings,
851 and named operands. */
853 argvec = rtvec_alloc (ninputs);
854 constraintvec = rtvec_alloc (ninputs);
855 labelvec = rtvec_alloc (nlabels);
857 body = gen_rtx_ASM_OPERANDS ((noutputs == 0 ? VOIDmode
858 : GET_MODE (output_rtx[0])),
859 ggc_strdup (TREE_STRING_POINTER (string)),
860 empty_string, 0, argvec, constraintvec,
861 labelvec, locus);
863 MEM_VOLATILE_P (body) = vol;
865 /* Eval the inputs and put them into ARGVEC.
866 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
868 for (i = 0, tail = inputs; tail; tail = TREE_CHAIN (tail), ++i)
870 bool allows_reg, allows_mem;
871 const char *constraint;
872 tree val, type;
873 rtx op;
874 bool ok;
876 constraint = constraints[i + noutputs];
877 ok = parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
878 constraints, &allows_mem, &allows_reg);
879 gcc_assert (ok);
881 generating_concat_p = 0;
883 val = TREE_VALUE (tail);
884 type = TREE_TYPE (val);
885 /* EXPAND_INITIALIZER will not generate code for valid initializer
886 constants, but will still generate code for other types of operand.
887 This is the behavior we want for constant constraints. */
888 op = expand_expr (val, NULL_RTX, VOIDmode,
889 allows_reg ? EXPAND_NORMAL
890 : allows_mem ? EXPAND_MEMORY
891 : EXPAND_INITIALIZER);
893 /* Never pass a CONCAT to an ASM. */
894 if (GET_CODE (op) == CONCAT)
895 op = force_reg (GET_MODE (op), op);
896 else if (MEM_P (op))
897 op = validize_mem (op);
899 if (asm_operand_ok (op, constraint, NULL) <= 0)
901 if (allows_reg && TYPE_MODE (type) != BLKmode)
902 op = force_reg (TYPE_MODE (type), op);
903 else if (!allows_mem)
904 warning (0, "asm operand %d probably doesn%'t match constraints",
905 i + noutputs);
906 else if (MEM_P (op))
908 /* We won't recognize either volatile memory or memory
909 with a queued address as available a memory_operand
910 at this point. Ignore it: clearly this *is* a memory. */
912 else
914 warning (0, "use of memory input without lvalue in "
915 "asm operand %d is deprecated", i + noutputs);
917 if (CONSTANT_P (op))
919 rtx mem = force_const_mem (TYPE_MODE (type), op);
920 if (mem)
921 op = validize_mem (mem);
922 else
923 op = force_reg (TYPE_MODE (type), op);
925 if (REG_P (op)
926 || GET_CODE (op) == SUBREG
927 || GET_CODE (op) == CONCAT)
929 tree qual_type = build_qualified_type (type,
930 (TYPE_QUALS (type)
931 | TYPE_QUAL_CONST));
932 rtx memloc = assign_temp (qual_type, 1, 1, 1);
933 memloc = validize_mem (memloc);
934 emit_move_insn (memloc, op);
935 op = memloc;
940 generating_concat_p = old_generating_concat_p;
941 ASM_OPERANDS_INPUT (body, i) = op;
943 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, i)
944 = gen_rtx_ASM_INPUT (TYPE_MODE (type),
945 ggc_strdup (constraints[i + noutputs]));
947 if (tree_conflicts_with_clobbers_p (val, &clobbered_regs))
948 clobber_conflict_found = 1;
951 /* Protect all the operands from the queue now that they have all been
952 evaluated. */
954 generating_concat_p = 0;
956 /* For in-out operands, copy output rtx to input rtx. */
957 for (i = 0; i < ninout; i++)
959 int j = inout_opnum[i];
960 char buffer[16];
962 ASM_OPERANDS_INPUT (body, ninputs - ninout + i)
963 = output_rtx[j];
965 sprintf (buffer, "%d", j);
966 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, ninputs - ninout + i)
967 = gen_rtx_ASM_INPUT (inout_mode[i], ggc_strdup (buffer));
970 /* Copy labels to the vector. */
971 for (i = 0, tail = labels; i < nlabels; ++i, tail = TREE_CHAIN (tail))
972 ASM_OPERANDS_LABEL (body, i)
973 = gen_rtx_LABEL_REF (Pmode, label_rtx (TREE_VALUE (tail)));
975 generating_concat_p = old_generating_concat_p;
977 /* Now, for each output, construct an rtx
978 (set OUTPUT (asm_operands INSN OUTPUTCONSTRAINT OUTPUTNUMBER
979 ARGVEC CONSTRAINTS OPNAMES))
980 If there is more than one, put them inside a PARALLEL. */
982 if (nlabels > 0 && nclobbers == 0)
984 gcc_assert (noutputs == 0);
985 emit_jump_insn (body);
987 else if (noutputs == 0 && nclobbers == 0)
989 /* No output operands: put in a raw ASM_OPERANDS rtx. */
990 emit_insn (body);
992 else if (noutputs == 1 && nclobbers == 0)
994 ASM_OPERANDS_OUTPUT_CONSTRAINT (body) = ggc_strdup (constraints[0]);
995 emit_insn (gen_rtx_SET (VOIDmode, output_rtx[0], body));
997 else
999 rtx obody = body;
1000 int num = noutputs;
1002 if (num == 0)
1003 num = 1;
1005 body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
1007 /* For each output operand, store a SET. */
1008 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1010 XVECEXP (body, 0, i)
1011 = gen_rtx_SET (VOIDmode,
1012 output_rtx[i],
1013 gen_rtx_ASM_OPERANDS
1014 (GET_MODE (output_rtx[i]),
1015 ggc_strdup (TREE_STRING_POINTER (string)),
1016 ggc_strdup (constraints[i]),
1017 i, argvec, constraintvec, labelvec, locus));
1019 MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
1022 /* If there are no outputs (but there are some clobbers)
1023 store the bare ASM_OPERANDS into the PARALLEL. */
1025 if (i == 0)
1026 XVECEXP (body, 0, i++) = obody;
1028 /* Store (clobber REG) for each clobbered register specified. */
1030 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1032 const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1033 int reg, nregs;
1034 int j = decode_reg_name_and_count (regname, &nregs);
1035 rtx clobbered_reg;
1037 if (j < 0)
1039 if (j == -3) /* `cc', which is not a register */
1040 continue;
1042 if (j == -4) /* `memory', don't cache memory across asm */
1044 XVECEXP (body, 0, i++)
1045 = gen_rtx_CLOBBER (VOIDmode,
1046 gen_rtx_MEM
1047 (BLKmode,
1048 gen_rtx_SCRATCH (VOIDmode)));
1049 continue;
1052 /* Ignore unknown register, error already signaled. */
1053 continue;
1056 for (reg = j; reg < j + nregs; reg++)
1058 /* Use QImode since that's guaranteed to clobber just
1059 * one reg. */
1060 clobbered_reg = gen_rtx_REG (QImode, reg);
1062 /* Do sanity check for overlap between clobbers and
1063 respectively input and outputs that hasn't been
1064 handled. Such overlap should have been detected and
1065 reported above. */
1066 if (!clobber_conflict_found)
1068 int opno;
1070 /* We test the old body (obody) contents to avoid
1071 tripping over the under-construction body. */
1072 for (opno = 0; opno < noutputs; opno++)
1073 if (reg_overlap_mentioned_p (clobbered_reg,
1074 output_rtx[opno]))
1075 internal_error
1076 ("asm clobber conflict with output operand");
1078 for (opno = 0; opno < ninputs - ninout; opno++)
1079 if (reg_overlap_mentioned_p (clobbered_reg,
1080 ASM_OPERANDS_INPUT (obody,
1081 opno)))
1082 internal_error
1083 ("asm clobber conflict with input operand");
1086 XVECEXP (body, 0, i++)
1087 = gen_rtx_CLOBBER (VOIDmode, clobbered_reg);
1091 if (nlabels > 0)
1092 emit_jump_insn (body);
1093 else
1094 emit_insn (body);
1097 /* For any outputs that needed reloading into registers, spill them
1098 back to where they belong. */
1099 for (i = 0; i < noutputs; ++i)
1100 if (real_output_rtx[i])
1101 emit_move_insn (real_output_rtx[i], output_rtx[i]);
1103 crtl->has_asm_statement = 1;
1104 free_temp_slots ();
1107 void
1108 expand_asm_stmt (gimple stmt)
1110 int noutputs;
1111 tree outputs, tail, t;
1112 tree *o;
1113 size_t i, n;
1114 const char *s;
1115 tree str, out, in, cl, labels;
1116 location_t locus = gimple_location (stmt);
1118 /* Meh... convert the gimple asm operands into real tree lists.
1119 Eventually we should make all routines work on the vectors instead
1120 of relying on TREE_CHAIN. */
1121 out = NULL_TREE;
1122 n = gimple_asm_noutputs (stmt);
1123 if (n > 0)
1125 t = out = gimple_asm_output_op (stmt, 0);
1126 for (i = 1; i < n; i++)
1127 t = TREE_CHAIN (t) = gimple_asm_output_op (stmt, i);
1130 in = NULL_TREE;
1131 n = gimple_asm_ninputs (stmt);
1132 if (n > 0)
1134 t = in = gimple_asm_input_op (stmt, 0);
1135 for (i = 1; i < n; i++)
1136 t = TREE_CHAIN (t) = gimple_asm_input_op (stmt, i);
1139 cl = NULL_TREE;
1140 n = gimple_asm_nclobbers (stmt);
1141 if (n > 0)
1143 t = cl = gimple_asm_clobber_op (stmt, 0);
1144 for (i = 1; i < n; i++)
1145 t = TREE_CHAIN (t) = gimple_asm_clobber_op (stmt, i);
1148 labels = NULL_TREE;
1149 n = gimple_asm_nlabels (stmt);
1150 if (n > 0)
1152 t = labels = gimple_asm_label_op (stmt, 0);
1153 for (i = 1; i < n; i++)
1154 t = TREE_CHAIN (t) = gimple_asm_label_op (stmt, i);
1157 s = gimple_asm_string (stmt);
1158 str = build_string (strlen (s), s);
1160 if (gimple_asm_input_p (stmt))
1162 expand_asm_loc (str, gimple_asm_volatile_p (stmt), locus);
1163 return;
1166 outputs = out;
1167 noutputs = gimple_asm_noutputs (stmt);
1168 /* o[I] is the place that output number I should be written. */
1169 o = (tree *) alloca (noutputs * sizeof (tree));
1171 /* Record the contents of OUTPUTS before it is modified. */
1172 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1173 o[i] = TREE_VALUE (tail);
1175 /* Generate the ASM_OPERANDS insn; store into the TREE_VALUEs of
1176 OUTPUTS some trees for where the values were actually stored. */
1177 expand_asm_operands (str, outputs, in, cl, labels,
1178 gimple_asm_volatile_p (stmt), locus);
1180 /* Copy all the intermediate outputs into the specified outputs. */
1181 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1183 if (o[i] != TREE_VALUE (tail))
1185 expand_assignment (o[i], TREE_VALUE (tail), false);
1186 free_temp_slots ();
1188 /* Restore the original value so that it's correct the next
1189 time we expand this function. */
1190 TREE_VALUE (tail) = o[i];
1195 /* A subroutine of expand_asm_operands. Check that all operands have
1196 the same number of alternatives. Return true if so. */
1198 static bool
1199 check_operand_nalternatives (tree outputs, tree inputs)
1201 if (outputs || inputs)
1203 tree tmp = TREE_PURPOSE (outputs ? outputs : inputs);
1204 int nalternatives
1205 = n_occurrences (',', TREE_STRING_POINTER (TREE_VALUE (tmp)));
1206 tree next = inputs;
1208 if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
1210 error ("too many alternatives in %<asm%>");
1211 return false;
1214 tmp = outputs;
1215 while (tmp)
1217 const char *constraint
1218 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (tmp)));
1220 if (n_occurrences (',', constraint) != nalternatives)
1222 error ("operand constraints for %<asm%> differ "
1223 "in number of alternatives");
1224 return false;
1227 if (TREE_CHAIN (tmp))
1228 tmp = TREE_CHAIN (tmp);
1229 else
1230 tmp = next, next = 0;
1234 return true;
1237 /* A subroutine of expand_asm_operands. Check that all operand names
1238 are unique. Return true if so. We rely on the fact that these names
1239 are identifiers, and so have been canonicalized by get_identifier,
1240 so all we need are pointer comparisons. */
1242 static bool
1243 check_unique_operand_names (tree outputs, tree inputs, tree labels)
1245 tree i, j, i_name = NULL_TREE;
1247 for (i = outputs; i ; i = TREE_CHAIN (i))
1249 i_name = TREE_PURPOSE (TREE_PURPOSE (i));
1250 if (! i_name)
1251 continue;
1253 for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
1254 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1255 goto failure;
1258 for (i = inputs; i ; i = TREE_CHAIN (i))
1260 i_name = TREE_PURPOSE (TREE_PURPOSE (i));
1261 if (! i_name)
1262 continue;
1264 for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
1265 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1266 goto failure;
1267 for (j = outputs; j ; j = TREE_CHAIN (j))
1268 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1269 goto failure;
1272 for (i = labels; i ; i = TREE_CHAIN (i))
1274 i_name = TREE_PURPOSE (i);
1275 if (! i_name)
1276 continue;
1278 for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
1279 if (simple_cst_equal (i_name, TREE_PURPOSE (j)))
1280 goto failure;
1281 for (j = inputs; j ; j = TREE_CHAIN (j))
1282 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1283 goto failure;
1286 return true;
1288 failure:
1289 error ("duplicate asm operand name %qs", TREE_STRING_POINTER (i_name));
1290 return false;
1293 /* A subroutine of expand_asm_operands. Resolve the names of the operands
1294 in *POUTPUTS and *PINPUTS to numbers, and replace the name expansions in
1295 STRING and in the constraints to those numbers. */
1297 tree
1298 resolve_asm_operand_names (tree string, tree outputs, tree inputs, tree labels)
1300 char *buffer;
1301 char *p;
1302 const char *c;
1303 tree t;
1305 check_unique_operand_names (outputs, inputs, labels);
1307 /* Substitute [<name>] in input constraint strings. There should be no
1308 named operands in output constraints. */
1309 for (t = inputs; t ; t = TREE_CHAIN (t))
1311 c = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
1312 if (strchr (c, '[') != NULL)
1314 p = buffer = xstrdup (c);
1315 while ((p = strchr (p, '[')) != NULL)
1316 p = resolve_operand_name_1 (p, outputs, inputs, NULL);
1317 TREE_VALUE (TREE_PURPOSE (t))
1318 = build_string (strlen (buffer), buffer);
1319 free (buffer);
1323 /* Now check for any needed substitutions in the template. */
1324 c = TREE_STRING_POINTER (string);
1325 while ((c = strchr (c, '%')) != NULL)
1327 if (c[1] == '[')
1328 break;
1329 else if (ISALPHA (c[1]) && c[2] == '[')
1330 break;
1331 else
1333 c += 1 + (c[1] == '%');
1334 continue;
1338 if (c)
1340 /* OK, we need to make a copy so we can perform the substitutions.
1341 Assume that we will not need extra space--we get to remove '['
1342 and ']', which means we cannot have a problem until we have more
1343 than 999 operands. */
1344 buffer = xstrdup (TREE_STRING_POINTER (string));
1345 p = buffer + (c - TREE_STRING_POINTER (string));
1347 while ((p = strchr (p, '%')) != NULL)
1349 if (p[1] == '[')
1350 p += 1;
1351 else if (ISALPHA (p[1]) && p[2] == '[')
1352 p += 2;
1353 else
1355 p += 1 + (p[1] == '%');
1356 continue;
1359 p = resolve_operand_name_1 (p, outputs, inputs, labels);
1362 string = build_string (strlen (buffer), buffer);
1363 free (buffer);
1366 return string;
1369 /* A subroutine of resolve_operand_names. P points to the '[' for a
1370 potential named operand of the form [<name>]. In place, replace
1371 the name and brackets with a number. Return a pointer to the
1372 balance of the string after substitution. */
1374 static char *
1375 resolve_operand_name_1 (char *p, tree outputs, tree inputs, tree labels)
1377 char *q;
1378 int op;
1379 tree t;
1381 /* Collect the operand name. */
1382 q = strchr (++p, ']');
1383 if (!q)
1385 error ("missing close brace for named operand");
1386 return strchr (p, '\0');
1388 *q = '\0';
1390 /* Resolve the name to a number. */
1391 for (op = 0, t = outputs; t ; t = TREE_CHAIN (t), op++)
1393 tree name = TREE_PURPOSE (TREE_PURPOSE (t));
1394 if (name && strcmp (TREE_STRING_POINTER (name), p) == 0)
1395 goto found;
1397 for (t = inputs; t ; t = TREE_CHAIN (t), op++)
1399 tree name = TREE_PURPOSE (TREE_PURPOSE (t));
1400 if (name && strcmp (TREE_STRING_POINTER (name), p) == 0)
1401 goto found;
1403 for (t = labels; t ; t = TREE_CHAIN (t), op++)
1405 tree name = TREE_PURPOSE (t);
1406 if (name && strcmp (TREE_STRING_POINTER (name), p) == 0)
1407 goto found;
1410 error ("undefined named operand %qs", identifier_to_locale (p));
1411 op = 0;
1413 found:
1414 /* Replace the name with the number. Unfortunately, not all libraries
1415 get the return value of sprintf correct, so search for the end of the
1416 generated string by hand. */
1417 sprintf (--p, "%d", op);
1418 p = strchr (p, '\0');
1420 /* Verify the no extra buffer space assumption. */
1421 gcc_assert (p <= q);
1423 /* Shift the rest of the buffer down to fill the gap. */
1424 memmove (p, q + 1, strlen (q + 1) + 1);
1426 return p;
1429 /* Generate RTL to evaluate the expression EXP. */
1431 void
1432 expand_expr_stmt (tree exp)
1434 rtx value;
1435 tree type;
1437 value = expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
1438 type = TREE_TYPE (exp);
1440 /* If all we do is reference a volatile value in memory,
1441 copy it to a register to be sure it is actually touched. */
1442 if (value && MEM_P (value) && TREE_THIS_VOLATILE (exp))
1444 if (TYPE_MODE (type) == VOIDmode)
1446 else if (TYPE_MODE (type) != BLKmode)
1447 copy_to_reg (value);
1448 else
1450 rtx lab = gen_label_rtx ();
1452 /* Compare the value with itself to reference it. */
1453 emit_cmp_and_jump_insns (value, value, EQ,
1454 expand_normal (TYPE_SIZE (type)),
1455 BLKmode, 0, lab);
1456 emit_label (lab);
1460 /* Free any temporaries used to evaluate this expression. */
1461 free_temp_slots ();
1465 /* Generate RTL to return from the current function, with no value.
1466 (That is, we do not do anything about returning any value.) */
1468 void
1469 expand_null_return (void)
1471 /* If this function was declared to return a value, but we
1472 didn't, clobber the return registers so that they are not
1473 propagated live to the rest of the function. */
1474 clobber_return_register ();
1476 expand_null_return_1 ();
1479 /* Generate RTL to return directly from the current function.
1480 (That is, we bypass any return value.) */
1482 void
1483 expand_naked_return (void)
1485 rtx end_label;
1487 clear_pending_stack_adjust ();
1488 do_pending_stack_adjust ();
1490 end_label = naked_return_label;
1491 if (end_label == 0)
1492 end_label = naked_return_label = gen_label_rtx ();
1494 emit_jump (end_label);
1497 /* Generate RTL to return from the current function, with value VAL. */
1499 static void
1500 expand_value_return (rtx val)
1502 /* Copy the value to the return location unless it's already there. */
1504 tree decl = DECL_RESULT (current_function_decl);
1505 rtx return_reg = DECL_RTL (decl);
1506 if (return_reg != val)
1508 tree funtype = TREE_TYPE (current_function_decl);
1509 tree type = TREE_TYPE (decl);
1510 int unsignedp = TYPE_UNSIGNED (type);
1511 enum machine_mode old_mode = DECL_MODE (decl);
1512 enum machine_mode mode;
1513 if (DECL_BY_REFERENCE (decl))
1514 mode = promote_function_mode (type, old_mode, &unsignedp, funtype, 2);
1515 else
1516 mode = promote_function_mode (type, old_mode, &unsignedp, funtype, 1);
1518 if (mode != old_mode)
1519 val = convert_modes (mode, old_mode, val, unsignedp);
1521 if (GET_CODE (return_reg) == PARALLEL)
1522 emit_group_load (return_reg, val, type, int_size_in_bytes (type));
1523 else
1524 emit_move_insn (return_reg, val);
1527 expand_null_return_1 ();
1530 /* Output a return with no value. */
1532 static void
1533 expand_null_return_1 (void)
1535 clear_pending_stack_adjust ();
1536 do_pending_stack_adjust ();
1537 emit_jump (return_label);
1540 /* Generate RTL to evaluate the expression RETVAL and return it
1541 from the current function. */
1543 void
1544 expand_return (tree retval)
1546 rtx result_rtl;
1547 rtx val = 0;
1548 tree retval_rhs;
1550 /* If function wants no value, give it none. */
1551 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
1553 expand_normal (retval);
1554 expand_null_return ();
1555 return;
1558 if (retval == error_mark_node)
1560 /* Treat this like a return of no value from a function that
1561 returns a value. */
1562 expand_null_return ();
1563 return;
1565 else if ((TREE_CODE (retval) == MODIFY_EXPR
1566 || TREE_CODE (retval) == INIT_EXPR)
1567 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
1568 retval_rhs = TREE_OPERAND (retval, 1);
1569 else
1570 retval_rhs = retval;
1572 result_rtl = DECL_RTL (DECL_RESULT (current_function_decl));
1574 /* If we are returning the RESULT_DECL, then the value has already
1575 been stored into it, so we don't have to do anything special. */
1576 if (TREE_CODE (retval_rhs) == RESULT_DECL)
1577 expand_value_return (result_rtl);
1579 /* If the result is an aggregate that is being returned in one (or more)
1580 registers, load the registers here. */
1582 else if (retval_rhs != 0
1583 && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
1584 && REG_P (result_rtl))
1586 val = copy_blkmode_to_reg (GET_MODE (result_rtl), retval_rhs);
1587 if (val)
1589 /* Use the mode of the result value on the return register. */
1590 PUT_MODE (result_rtl, GET_MODE (val));
1591 expand_value_return (val);
1593 else
1594 expand_null_return ();
1596 else if (retval_rhs != 0
1597 && !VOID_TYPE_P (TREE_TYPE (retval_rhs))
1598 && (REG_P (result_rtl)
1599 || (GET_CODE (result_rtl) == PARALLEL)))
1601 /* Calculate the return value into a temporary (usually a pseudo
1602 reg). */
1603 tree ot = TREE_TYPE (DECL_RESULT (current_function_decl));
1604 tree nt = build_qualified_type (ot, TYPE_QUALS (ot) | TYPE_QUAL_CONST);
1606 val = assign_temp (nt, 0, 0, 1);
1607 val = expand_expr (retval_rhs, val, GET_MODE (val), EXPAND_NORMAL);
1608 val = force_not_mem (val);
1609 /* Return the calculated value. */
1610 expand_value_return (val);
1612 else
1614 /* No hard reg used; calculate value into hard return reg. */
1615 expand_expr (retval, const0_rtx, VOIDmode, EXPAND_NORMAL);
1616 expand_value_return (result_rtl);
1620 /* Emit code to restore vital registers at the beginning of a nonlocal goto
1621 handler. */
1622 static void
1623 expand_nl_goto_receiver (void)
1625 rtx chain;
1627 /* Clobber the FP when we get here, so we have to make sure it's
1628 marked as used by this function. */
1629 emit_use (hard_frame_pointer_rtx);
1631 /* Mark the static chain as clobbered here so life information
1632 doesn't get messed up for it. */
1633 chain = targetm.calls.static_chain (current_function_decl, true);
1634 if (chain && REG_P (chain))
1635 emit_clobber (chain);
1637 #ifdef HAVE_nonlocal_goto
1638 if (! HAVE_nonlocal_goto)
1639 #endif
1640 /* First adjust our frame pointer to its actual value. It was
1641 previously set to the start of the virtual area corresponding to
1642 the stacked variables when we branched here and now needs to be
1643 adjusted to the actual hardware fp value.
1645 Assignments are to virtual registers are converted by
1646 instantiate_virtual_regs into the corresponding assignment
1647 to the underlying register (fp in this case) that makes
1648 the original assignment true.
1649 So the following insn will actually be
1650 decrementing fp by STARTING_FRAME_OFFSET. */
1651 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
1653 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
1654 if (fixed_regs[ARG_POINTER_REGNUM])
1656 #ifdef ELIMINABLE_REGS
1657 /* If the argument pointer can be eliminated in favor of the
1658 frame pointer, we don't need to restore it. We assume here
1659 that if such an elimination is present, it can always be used.
1660 This is the case on all known machines; if we don't make this
1661 assumption, we do unnecessary saving on many machines. */
1662 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
1663 size_t i;
1665 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
1666 if (elim_regs[i].from == ARG_POINTER_REGNUM
1667 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
1668 break;
1670 if (i == ARRAY_SIZE (elim_regs))
1671 #endif
1673 /* Now restore our arg pointer from the address at which it
1674 was saved in our stack frame. */
1675 emit_move_insn (crtl->args.internal_arg_pointer,
1676 copy_to_reg (get_arg_pointer_save_area ()));
1679 #endif
1681 #ifdef HAVE_nonlocal_goto_receiver
1682 if (HAVE_nonlocal_goto_receiver)
1683 emit_insn (gen_nonlocal_goto_receiver ());
1684 #endif
1686 /* We must not allow the code we just generated to be reordered by
1687 scheduling. Specifically, the update of the frame pointer must
1688 happen immediately, not later. */
1689 emit_insn (gen_blockage ());
1692 /* Generate RTL for the automatic variable declaration DECL.
1693 (Other kinds of declarations are simply ignored if seen here.) */
1695 void
1696 expand_decl (tree decl)
1698 tree type;
1700 type = TREE_TYPE (decl);
1702 /* For a CONST_DECL, set mode, alignment, and sizes from those of the
1703 type in case this node is used in a reference. */
1704 if (TREE_CODE (decl) == CONST_DECL)
1706 DECL_MODE (decl) = TYPE_MODE (type);
1707 DECL_ALIGN (decl) = TYPE_ALIGN (type);
1708 DECL_SIZE (decl) = TYPE_SIZE (type);
1709 DECL_SIZE_UNIT (decl) = TYPE_SIZE_UNIT (type);
1710 return;
1713 /* Otherwise, only automatic variables need any expansion done. Static and
1714 external variables, and external functions, will be handled by
1715 `assemble_variable' (called from finish_decl). TYPE_DECL requires
1716 nothing. PARM_DECLs are handled in `assign_parms'. */
1717 if (TREE_CODE (decl) != VAR_DECL)
1718 return;
1720 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
1721 return;
1723 /* Create the RTL representation for the variable. */
1725 if (type == error_mark_node)
1726 SET_DECL_RTL (decl, gen_rtx_MEM (BLKmode, const0_rtx));
1728 else if (DECL_SIZE (decl) == 0)
1730 /* Variable with incomplete type. */
1731 rtx x;
1732 if (DECL_INITIAL (decl) == 0)
1733 /* Error message was already done; now avoid a crash. */
1734 x = gen_rtx_MEM (BLKmode, const0_rtx);
1735 else
1736 /* An initializer is going to decide the size of this array.
1737 Until we know the size, represent its address with a reg. */
1738 x = gen_rtx_MEM (BLKmode, gen_reg_rtx (Pmode));
1740 set_mem_attributes (x, decl, 1);
1741 SET_DECL_RTL (decl, x);
1743 else if (use_register_for_decl (decl))
1745 /* Automatic variable that can go in a register. */
1746 enum machine_mode reg_mode = promote_decl_mode (decl, NULL);
1748 SET_DECL_RTL (decl, gen_reg_rtx (reg_mode));
1750 /* Note if the object is a user variable. */
1751 if (!DECL_ARTIFICIAL (decl))
1752 mark_user_reg (DECL_RTL (decl));
1754 if (POINTER_TYPE_P (type))
1755 mark_reg_pointer (DECL_RTL (decl),
1756 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (decl))));
1759 else
1761 rtx oldaddr = 0;
1762 rtx addr;
1763 rtx x;
1765 /* Variable-sized decls are dealt with in the gimplifier. */
1766 gcc_assert (TREE_CODE (DECL_SIZE_UNIT (decl)) == INTEGER_CST);
1768 /* If we previously made RTL for this decl, it must be an array
1769 whose size was determined by the initializer.
1770 The old address was a register; set that register now
1771 to the proper address. */
1772 if (DECL_RTL_SET_P (decl))
1774 gcc_assert (MEM_P (DECL_RTL (decl)));
1775 gcc_assert (REG_P (XEXP (DECL_RTL (decl), 0)));
1776 oldaddr = XEXP (DECL_RTL (decl), 0);
1779 /* Set alignment we actually gave this decl. */
1780 DECL_ALIGN (decl) = (DECL_MODE (decl) == BLKmode ? BIGGEST_ALIGNMENT
1781 : GET_MODE_BITSIZE (DECL_MODE (decl)));
1782 DECL_USER_ALIGN (decl) = 0;
1784 x = assign_temp (decl, 1, 1, 1);
1785 set_mem_attributes (x, decl, 1);
1786 SET_DECL_RTL (decl, x);
1788 if (oldaddr)
1790 addr = force_operand (XEXP (DECL_RTL (decl), 0), oldaddr);
1791 if (addr != oldaddr)
1792 emit_move_insn (oldaddr, addr);
1797 /* Emit code to save the current value of stack. */
1799 expand_stack_save (void)
1801 rtx ret = NULL_RTX;
1803 do_pending_stack_adjust ();
1804 emit_stack_save (SAVE_BLOCK, &ret);
1805 return ret;
1808 /* Emit code to restore the current value of stack. */
1809 void
1810 expand_stack_restore (tree var)
1812 rtx prev, sa = expand_normal (var);
1814 sa = convert_memory_address (Pmode, sa);
1816 prev = get_last_insn ();
1817 emit_stack_restore (SAVE_BLOCK, sa);
1818 fixup_args_size_notes (prev, get_last_insn (), 0);
1821 /* Do the insertion of a case label into case_list. The labels are
1822 fed to us in descending order from the sorted vector of case labels used
1823 in the tree part of the middle end. So the list we construct is
1824 sorted in ascending order. The bounds on the case range, LOW and HIGH,
1825 are converted to case's index type TYPE. Note that the original type
1826 of the case index in the source code is usually "lost" during
1827 gimplification due to type promotion, but the case labels retain the
1828 original type. */
1830 static struct case_node *
1831 add_case_node (struct case_node *head, tree type, tree low, tree high,
1832 tree label, alloc_pool case_node_pool)
1834 struct case_node *r;
1836 gcc_checking_assert (low);
1837 gcc_checking_assert (! high || (TREE_TYPE (low) == TREE_TYPE (high)));
1839 /* Add this label to the chain. Make sure to drop overflow flags. */
1840 r = (struct case_node *) pool_alloc (case_node_pool);
1841 r->low = build_int_cst_wide (type, TREE_INT_CST_LOW (low),
1842 TREE_INT_CST_HIGH (low));
1843 r->high = build_int_cst_wide (type, TREE_INT_CST_LOW (high),
1844 TREE_INT_CST_HIGH (high));
1845 r->code_label = label;
1846 r->parent = r->left = NULL;
1847 r->right = head;
1848 return r;
1851 /* Maximum number of case bit tests. */
1852 #define MAX_CASE_BIT_TESTS 3
1854 /* By default, enable case bit tests on targets with ashlsi3. */
1855 #ifndef CASE_USE_BIT_TESTS
1856 #define CASE_USE_BIT_TESTS (optab_handler (ashl_optab, word_mode) \
1857 != CODE_FOR_nothing)
1858 #endif
1861 /* A case_bit_test represents a set of case nodes that may be
1862 selected from using a bit-wise comparison. HI and LO hold
1863 the integer to be tested against, LABEL contains the label
1864 to jump to upon success and BITS counts the number of case
1865 nodes handled by this test, typically the number of bits
1866 set in HI:LO. */
1868 struct case_bit_test
1870 HOST_WIDE_INT hi;
1871 HOST_WIDE_INT lo;
1872 rtx label;
1873 int bits;
1876 /* Determine whether "1 << x" is relatively cheap in word_mode. */
1878 static
1879 bool lshift_cheap_p (void)
1881 static bool init[2] = {false, false};
1882 static bool cheap[2] = {true, true};
1884 bool speed_p = optimize_insn_for_speed_p ();
1886 if (!init[speed_p])
1888 rtx reg = gen_rtx_REG (word_mode, 10000);
1889 int cost = set_src_cost (gen_rtx_ASHIFT (word_mode, const1_rtx, reg),
1890 speed_p);
1891 cheap[speed_p] = cost < COSTS_N_INSNS (3);
1892 init[speed_p] = true;
1895 return cheap[speed_p];
1898 /* Comparison function for qsort to order bit tests by decreasing
1899 number of case nodes, i.e. the node with the most cases gets
1900 tested first. */
1902 static int
1903 case_bit_test_cmp (const void *p1, const void *p2)
1905 const struct case_bit_test *const d1 = (const struct case_bit_test *) p1;
1906 const struct case_bit_test *const d2 = (const struct case_bit_test *) p2;
1908 if (d2->bits != d1->bits)
1909 return d2->bits - d1->bits;
1911 /* Stabilize the sort. */
1912 return CODE_LABEL_NUMBER (d2->label) - CODE_LABEL_NUMBER (d1->label);
1915 /* Expand a switch statement by a short sequence of bit-wise
1916 comparisons. "switch(x)" is effectively converted into
1917 "if ((1 << (x-MINVAL)) & CST)" where CST and MINVAL are
1918 integer constants.
1920 INDEX_EXPR is the value being switched on, which is of
1921 type INDEX_TYPE. MINVAL is the lowest case value of in
1922 the case nodes, of INDEX_TYPE type, and RANGE is highest
1923 value minus MINVAL, also of type INDEX_TYPE. NODES is
1924 the set of case nodes, and DEFAULT_LABEL is the label to
1925 branch to should none of the cases match.
1927 There *MUST* be MAX_CASE_BIT_TESTS or less unique case
1928 node targets. */
1930 static void
1931 emit_case_bit_tests (tree index_type, tree index_expr, tree minval,
1932 tree range, case_node_ptr nodes, rtx default_label)
1934 struct case_bit_test test[MAX_CASE_BIT_TESTS];
1935 enum machine_mode mode;
1936 rtx expr, index, label;
1937 unsigned int i,j,lo,hi;
1938 struct case_node *n;
1939 unsigned int count;
1941 count = 0;
1942 for (n = nodes; n; n = n->right)
1944 label = label_rtx (n->code_label);
1945 for (i = 0; i < count; i++)
1946 if (label == test[i].label)
1947 break;
1949 if (i == count)
1951 gcc_assert (count < MAX_CASE_BIT_TESTS);
1952 test[i].hi = 0;
1953 test[i].lo = 0;
1954 test[i].label = label;
1955 test[i].bits = 1;
1956 count++;
1958 else
1959 test[i].bits++;
1961 lo = tree_low_cst (fold_build2 (MINUS_EXPR, index_type,
1962 n->low, minval), 1);
1963 hi = tree_low_cst (fold_build2 (MINUS_EXPR, index_type,
1964 n->high, minval), 1);
1965 for (j = lo; j <= hi; j++)
1966 if (j >= HOST_BITS_PER_WIDE_INT)
1967 test[i].hi |= (HOST_WIDE_INT) 1 << (j - HOST_BITS_PER_INT);
1968 else
1969 test[i].lo |= (HOST_WIDE_INT) 1 << j;
1972 qsort (test, count, sizeof(*test), case_bit_test_cmp);
1974 index_expr = fold_build2 (MINUS_EXPR, index_type,
1975 fold_convert (index_type, index_expr),
1976 fold_convert (index_type, minval));
1977 index = expand_normal (index_expr);
1978 do_pending_stack_adjust ();
1980 mode = TYPE_MODE (index_type);
1981 expr = expand_normal (range);
1982 if (default_label)
1983 emit_cmp_and_jump_insns (index, expr, GTU, NULL_RTX, mode, 1,
1984 default_label);
1986 index = convert_to_mode (word_mode, index, 0);
1987 index = expand_binop (word_mode, ashl_optab, const1_rtx,
1988 index, NULL_RTX, 1, OPTAB_WIDEN);
1990 for (i = 0; i < count; i++)
1992 expr = immed_double_const (test[i].lo, test[i].hi, word_mode);
1993 expr = expand_binop (word_mode, and_optab, index, expr,
1994 NULL_RTX, 1, OPTAB_WIDEN);
1995 emit_cmp_and_jump_insns (expr, const0_rtx, NE, NULL_RTX,
1996 word_mode, 1, test[i].label);
1999 if (default_label)
2000 emit_jump (default_label);
2003 #ifndef HAVE_casesi
2004 #define HAVE_casesi 0
2005 #endif
2007 #ifndef HAVE_tablejump
2008 #define HAVE_tablejump 0
2009 #endif
2011 /* Return true if a switch should be expanded as a bit test.
2012 INDEX_EXPR is the index expression, RANGE is the difference between
2013 highest and lowest case, UNIQ is number of unique case node targets
2014 not counting the default case and COUNT is the number of comparisons
2015 needed, not counting the default case. */
2016 bool
2017 expand_switch_using_bit_tests_p (tree index_expr, tree range,
2018 unsigned int uniq, unsigned int count)
2020 return (CASE_USE_BIT_TESTS
2021 && ! TREE_CONSTANT (index_expr)
2022 && compare_tree_int (range, GET_MODE_BITSIZE (word_mode)) < 0
2023 && compare_tree_int (range, 0) > 0
2024 && lshift_cheap_p ()
2025 && ((uniq == 1 && count >= 3)
2026 || (uniq == 2 && count >= 5)
2027 || (uniq == 3 && count >= 6)));
2030 /* Return the smallest number of different values for which it is best to use a
2031 jump-table instead of a tree of conditional branches. */
2033 static unsigned int
2034 case_values_threshold (void)
2036 unsigned int threshold = PARAM_VALUE (PARAM_CASE_VALUES_THRESHOLD);
2038 if (threshold == 0)
2039 threshold = targetm.case_values_threshold ();
2041 return threshold;
2044 /* Terminate a case (Pascal/Ada) or switch (C) statement
2045 in which ORIG_INDEX is the expression to be tested.
2046 If ORIG_TYPE is not NULL, it is the original ORIG_INDEX
2047 type as given in the source before any compiler conversions.
2048 Generate the code to test it and jump to the right place. */
2050 void
2051 expand_case (gimple stmt)
2053 tree minval = NULL_TREE, maxval = NULL_TREE, range = NULL_TREE;
2054 rtx default_label = 0;
2055 struct case_node *n;
2056 unsigned int count, uniq;
2057 rtx index;
2058 rtx table_label;
2059 int ncases;
2060 rtx *labelvec;
2061 int i;
2062 rtx before_case, end, lab;
2064 tree index_expr = gimple_switch_index (stmt);
2065 tree index_type = TREE_TYPE (index_expr);
2066 int unsignedp = TYPE_UNSIGNED (index_type);
2068 /* The insn after which the case dispatch should finally
2069 be emitted. Zero for a dummy. */
2070 rtx start;
2072 /* A list of case labels; it is first built as a list and it may then
2073 be rearranged into a nearly balanced binary tree. */
2074 struct case_node *case_list = 0;
2076 /* Label to jump to if no case matches. */
2077 tree default_label_decl = NULL_TREE;
2079 alloc_pool case_node_pool = create_alloc_pool ("struct case_node pool",
2080 sizeof (struct case_node),
2081 100);
2083 do_pending_stack_adjust ();
2085 /* An ERROR_MARK occurs for various reasons including invalid data type. */
2086 if (index_type != error_mark_node)
2088 tree elt;
2089 bitmap label_bitmap;
2090 int stopi = 0;
2092 /* cleanup_tree_cfg removes all SWITCH_EXPR with their index
2093 expressions being INTEGER_CST. */
2094 gcc_assert (TREE_CODE (index_expr) != INTEGER_CST);
2096 /* The default case, if ever taken, is the first element. */
2097 elt = gimple_switch_label (stmt, 0);
2098 if (!CASE_LOW (elt) && !CASE_HIGH (elt))
2100 default_label_decl = CASE_LABEL (elt);
2101 stopi = 1;
2104 for (i = gimple_switch_num_labels (stmt) - 1; i >= stopi; --i)
2106 tree low, high;
2107 elt = gimple_switch_label (stmt, i);
2109 low = CASE_LOW (elt);
2110 gcc_assert (low);
2111 high = CASE_HIGH (elt);
2113 /* The canonical from of a case label in GIMPLE is that a simple case
2114 has an empty CASE_HIGH. For the casesi and tablejump expanders,
2115 the back ends want simple cases to have high == low. */
2116 gcc_assert (! high || tree_int_cst_lt (low, high));
2117 if (! high)
2118 high = low;
2120 case_list = add_case_node (case_list, index_type, low, high,
2121 CASE_LABEL (elt), case_node_pool);
2125 before_case = start = get_last_insn ();
2126 if (default_label_decl)
2127 default_label = label_rtx (default_label_decl);
2129 /* Get upper and lower bounds of case values. */
2131 uniq = 0;
2132 count = 0;
2133 label_bitmap = BITMAP_ALLOC (NULL);
2134 for (n = case_list; n; n = n->right)
2136 /* Count the elements and track the largest and smallest
2137 of them (treating them as signed even if they are not). */
2138 if (count++ == 0)
2140 minval = n->low;
2141 maxval = n->high;
2143 else
2145 if (tree_int_cst_lt (n->low, minval))
2146 minval = n->low;
2147 if (tree_int_cst_lt (maxval, n->high))
2148 maxval = n->high;
2150 /* A range counts double, since it requires two compares. */
2151 if (! tree_int_cst_equal (n->low, n->high))
2152 count++;
2154 /* If we have not seen this label yet, then increase the
2155 number of unique case node targets seen. */
2156 lab = label_rtx (n->code_label);
2157 if (bitmap_set_bit (label_bitmap, CODE_LABEL_NUMBER (lab)))
2158 uniq++;
2161 BITMAP_FREE (label_bitmap);
2163 /* cleanup_tree_cfg removes all SWITCH_EXPR with a single
2164 destination, such as one with a default case only.
2165 It also removes cases that are out of range for the switch
2166 type, so we should never get a zero here. */
2167 gcc_assert (count > 0);
2169 /* Compute span of values. */
2170 range = fold_build2 (MINUS_EXPR, index_type, maxval, minval);
2172 /* Try implementing this switch statement by a short sequence of
2173 bit-wise comparisons. However, we let the binary-tree case
2174 below handle constant index expressions. */
2175 if (expand_switch_using_bit_tests_p (index_expr, range, uniq, count))
2177 /* Optimize the case where all the case values fit in a
2178 word without having to subtract MINVAL. In this case,
2179 we can optimize away the subtraction. */
2180 if (compare_tree_int (minval, 0) > 0
2181 && compare_tree_int (maxval, GET_MODE_BITSIZE (word_mode)) < 0)
2183 minval = build_int_cst (index_type, 0);
2184 range = maxval;
2186 emit_case_bit_tests (index_type, index_expr, minval, range,
2187 case_list, default_label);
2190 /* If range of values is much bigger than number of values,
2191 make a sequence of conditional branches instead of a dispatch.
2192 If the switch-index is a constant, do it this way
2193 because we can optimize it. */
2195 else if (count < case_values_threshold ()
2196 || compare_tree_int (range,
2197 (optimize_insn_for_size_p () ? 3 : 10) * count) > 0
2198 /* RANGE may be signed, and really large ranges will show up
2199 as negative numbers. */
2200 || compare_tree_int (range, 0) < 0
2201 || !flag_jump_tables
2202 || TREE_CONSTANT (index_expr)
2203 /* If neither casesi or tablejump is available, we can
2204 only go this way. */
2205 || (!HAVE_casesi && !HAVE_tablejump))
2207 index = expand_normal (index_expr);
2209 /* If the index is a short or char that we do not have
2210 an insn to handle comparisons directly, convert it to
2211 a full integer now, rather than letting each comparison
2212 generate the conversion. */
2214 if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT
2215 && ! have_insn_for (COMPARE, GET_MODE (index)))
2217 enum machine_mode wider_mode;
2218 for (wider_mode = GET_MODE (index); wider_mode != VOIDmode;
2219 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
2220 if (have_insn_for (COMPARE, wider_mode))
2222 index = convert_to_mode (wider_mode, index, unsignedp);
2223 break;
2227 do_pending_stack_adjust ();
2229 if (MEM_P (index))
2231 index = copy_to_reg (index);
2232 if (TREE_CODE (index_expr) == SSA_NAME)
2233 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (index_expr), index);
2236 /* We generate a binary decision tree to select the
2237 appropriate target code. This is done as follows:
2239 The list of cases is rearranged into a binary tree,
2240 nearly optimal assuming equal probability for each case.
2242 The tree is transformed into RTL, eliminating
2243 redundant test conditions at the same time.
2245 If program flow could reach the end of the
2246 decision tree an unconditional jump to the
2247 default code is emitted. */
2249 balance_case_nodes (&case_list, NULL);
2250 emit_case_nodes (index, case_list, default_label, index_type);
2251 if (default_label)
2252 emit_jump (default_label);
2254 else
2256 rtx fallback_label = label_rtx (case_list->code_label);
2257 table_label = gen_label_rtx ();
2258 if (! try_casesi (index_type, index_expr, minval, range,
2259 table_label, default_label, fallback_label))
2261 bool ok;
2263 /* Index jumptables from zero for suitable values of
2264 minval to avoid a subtraction. */
2265 if (optimize_insn_for_speed_p ()
2266 && compare_tree_int (minval, 0) > 0
2267 && compare_tree_int (minval, 3) < 0)
2269 minval = build_int_cst (index_type, 0);
2270 range = maxval;
2273 ok = try_tablejump (index_type, index_expr, minval, range,
2274 table_label, default_label);
2275 gcc_assert (ok);
2278 /* Get table of labels to jump to, in order of case index. */
2280 ncases = tree_low_cst (range, 0) + 1;
2281 labelvec = XALLOCAVEC (rtx, ncases);
2282 memset (labelvec, 0, ncases * sizeof (rtx));
2284 for (n = case_list; n; n = n->right)
2286 /* Compute the low and high bounds relative to the minimum
2287 value since that should fit in a HOST_WIDE_INT while the
2288 actual values may not. */
2289 HOST_WIDE_INT i_low
2290 = tree_low_cst (fold_build2 (MINUS_EXPR, index_type,
2291 n->low, minval), 1);
2292 HOST_WIDE_INT i_high
2293 = tree_low_cst (fold_build2 (MINUS_EXPR, index_type,
2294 n->high, minval), 1);
2295 HOST_WIDE_INT i;
2297 for (i = i_low; i <= i_high; i ++)
2298 labelvec[i]
2299 = gen_rtx_LABEL_REF (Pmode, label_rtx (n->code_label));
2302 /* Fill in the gaps with the default. We may have gaps at
2303 the beginning if we tried to avoid the minval subtraction,
2304 so substitute some label even if the default label was
2305 deemed unreachable. */
2306 if (!default_label)
2307 default_label = fallback_label;
2308 for (i = 0; i < ncases; i++)
2309 if (labelvec[i] == 0)
2310 labelvec[i] = gen_rtx_LABEL_REF (Pmode, default_label);
2312 /* Output the table. */
2313 emit_label (table_label);
2315 if (CASE_VECTOR_PC_RELATIVE || flag_pic)
2316 emit_jump_insn (gen_rtx_ADDR_DIFF_VEC (CASE_VECTOR_MODE,
2317 gen_rtx_LABEL_REF (Pmode, table_label),
2318 gen_rtvec_v (ncases, labelvec),
2319 const0_rtx, const0_rtx));
2320 else
2321 emit_jump_insn (gen_rtx_ADDR_VEC (CASE_VECTOR_MODE,
2322 gen_rtvec_v (ncases, labelvec)));
2324 /* Record no drop-through after the table. */
2325 emit_barrier ();
2328 before_case = NEXT_INSN (before_case);
2329 end = get_last_insn ();
2330 reorder_insns (before_case, end, start);
2333 free_temp_slots ();
2334 free_alloc_pool (case_node_pool);
2337 /* Generate code to jump to LABEL if OP0 and OP1 are equal in mode MODE. */
2339 static void
2340 do_jump_if_equal (enum machine_mode mode, rtx op0, rtx op1, rtx label,
2341 int unsignedp)
2343 do_compare_rtx_and_jump (op0, op1, EQ, unsignedp, mode,
2344 NULL_RTX, NULL_RTX, label, -1);
2347 /* Take an ordered list of case nodes
2348 and transform them into a near optimal binary tree,
2349 on the assumption that any target code selection value is as
2350 likely as any other.
2352 The transformation is performed by splitting the ordered
2353 list into two equal sections plus a pivot. The parts are
2354 then attached to the pivot as left and right branches. Each
2355 branch is then transformed recursively. */
2357 static void
2358 balance_case_nodes (case_node_ptr *head, case_node_ptr parent)
2360 case_node_ptr np;
2362 np = *head;
2363 if (np)
2365 int i = 0;
2366 int ranges = 0;
2367 case_node_ptr *npp;
2368 case_node_ptr left;
2370 /* Count the number of entries on branch. Also count the ranges. */
2372 while (np)
2374 if (!tree_int_cst_equal (np->low, np->high))
2375 ranges++;
2377 i++;
2378 np = np->right;
2381 if (i > 2)
2383 /* Split this list if it is long enough for that to help. */
2384 npp = head;
2385 left = *npp;
2387 /* If there are just three nodes, split at the middle one. */
2388 if (i == 3)
2389 npp = &(*npp)->right;
2390 else
2392 /* Find the place in the list that bisects the list's total cost,
2393 where ranges count as 2.
2394 Here I gets half the total cost. */
2395 i = (i + ranges + 1) / 2;
2396 while (1)
2398 /* Skip nodes while their cost does not reach that amount. */
2399 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
2400 i--;
2401 i--;
2402 if (i <= 0)
2403 break;
2404 npp = &(*npp)->right;
2407 *head = np = *npp;
2408 *npp = 0;
2409 np->parent = parent;
2410 np->left = left;
2412 /* Optimize each of the two split parts. */
2413 balance_case_nodes (&np->left, np);
2414 balance_case_nodes (&np->right, np);
2416 else
2418 /* Else leave this branch as one level,
2419 but fill in `parent' fields. */
2420 np = *head;
2421 np->parent = parent;
2422 for (; np->right; np = np->right)
2423 np->right->parent = np;
2428 /* Search the parent sections of the case node tree
2429 to see if a test for the lower bound of NODE would be redundant.
2430 INDEX_TYPE is the type of the index expression.
2432 The instructions to generate the case decision tree are
2433 output in the same order as nodes are processed so it is
2434 known that if a parent node checks the range of the current
2435 node minus one that the current node is bounded at its lower
2436 span. Thus the test would be redundant. */
2438 static int
2439 node_has_low_bound (case_node_ptr node, tree index_type)
2441 tree low_minus_one;
2442 case_node_ptr pnode;
2444 /* If the lower bound of this node is the lowest value in the index type,
2445 we need not test it. */
2447 if (tree_int_cst_equal (node->low, TYPE_MIN_VALUE (index_type)))
2448 return 1;
2450 /* If this node has a left branch, the value at the left must be less
2451 than that at this node, so it cannot be bounded at the bottom and
2452 we need not bother testing any further. */
2454 if (node->left)
2455 return 0;
2457 low_minus_one = fold_build2 (MINUS_EXPR, TREE_TYPE (node->low),
2458 node->low,
2459 build_int_cst (TREE_TYPE (node->low), 1));
2461 /* If the subtraction above overflowed, we can't verify anything.
2462 Otherwise, look for a parent that tests our value - 1. */
2464 if (! tree_int_cst_lt (low_minus_one, node->low))
2465 return 0;
2467 for (pnode = node->parent; pnode; pnode = pnode->parent)
2468 if (tree_int_cst_equal (low_minus_one, pnode->high))
2469 return 1;
2471 return 0;
2474 /* Search the parent sections of the case node tree
2475 to see if a test for the upper bound of NODE would be redundant.
2476 INDEX_TYPE is the type of the index expression.
2478 The instructions to generate the case decision tree are
2479 output in the same order as nodes are processed so it is
2480 known that if a parent node checks the range of the current
2481 node plus one that the current node is bounded at its upper
2482 span. Thus the test would be redundant. */
2484 static int
2485 node_has_high_bound (case_node_ptr node, tree index_type)
2487 tree high_plus_one;
2488 case_node_ptr pnode;
2490 /* If there is no upper bound, obviously no test is needed. */
2492 if (TYPE_MAX_VALUE (index_type) == NULL)
2493 return 1;
2495 /* If the upper bound of this node is the highest value in the type
2496 of the index expression, we need not test against it. */
2498 if (tree_int_cst_equal (node->high, TYPE_MAX_VALUE (index_type)))
2499 return 1;
2501 /* If this node has a right branch, the value at the right must be greater
2502 than that at this node, so it cannot be bounded at the top and
2503 we need not bother testing any further. */
2505 if (node->right)
2506 return 0;
2508 high_plus_one = fold_build2 (PLUS_EXPR, TREE_TYPE (node->high),
2509 node->high,
2510 build_int_cst (TREE_TYPE (node->high), 1));
2512 /* If the addition above overflowed, we can't verify anything.
2513 Otherwise, look for a parent that tests our value + 1. */
2515 if (! tree_int_cst_lt (node->high, high_plus_one))
2516 return 0;
2518 for (pnode = node->parent; pnode; pnode = pnode->parent)
2519 if (tree_int_cst_equal (high_plus_one, pnode->low))
2520 return 1;
2522 return 0;
2525 /* Search the parent sections of the
2526 case node tree to see if both tests for the upper and lower
2527 bounds of NODE would be redundant. */
2529 static int
2530 node_is_bounded (case_node_ptr node, tree index_type)
2532 return (node_has_low_bound (node, index_type)
2533 && node_has_high_bound (node, index_type));
2536 /* Emit step-by-step code to select a case for the value of INDEX.
2537 The thus generated decision tree follows the form of the
2538 case-node binary tree NODE, whose nodes represent test conditions.
2539 INDEX_TYPE is the type of the index of the switch.
2541 Care is taken to prune redundant tests from the decision tree
2542 by detecting any boundary conditions already checked by
2543 emitted rtx. (See node_has_high_bound, node_has_low_bound
2544 and node_is_bounded, above.)
2546 Where the test conditions can be shown to be redundant we emit
2547 an unconditional jump to the target code. As a further
2548 optimization, the subordinates of a tree node are examined to
2549 check for bounded nodes. In this case conditional and/or
2550 unconditional jumps as a result of the boundary check for the
2551 current node are arranged to target the subordinates associated
2552 code for out of bound conditions on the current node.
2554 We can assume that when control reaches the code generated here,
2555 the index value has already been compared with the parents
2556 of this node, and determined to be on the same side of each parent
2557 as this node is. Thus, if this node tests for the value 51,
2558 and a parent tested for 52, we don't need to consider
2559 the possibility of a value greater than 51. If another parent
2560 tests for the value 50, then this node need not test anything. */
2562 static void
2563 emit_case_nodes (rtx index, case_node_ptr node, rtx default_label,
2564 tree index_type)
2566 /* If INDEX has an unsigned type, we must make unsigned branches. */
2567 int unsignedp = TYPE_UNSIGNED (index_type);
2568 enum machine_mode mode = GET_MODE (index);
2569 enum machine_mode imode = TYPE_MODE (index_type);
2571 /* Handle indices detected as constant during RTL expansion. */
2572 if (mode == VOIDmode)
2573 mode = imode;
2575 /* See if our parents have already tested everything for us.
2576 If they have, emit an unconditional jump for this node. */
2577 if (node_is_bounded (node, index_type))
2578 emit_jump (label_rtx (node->code_label));
2580 else if (tree_int_cst_equal (node->low, node->high))
2582 /* Node is single valued. First see if the index expression matches
2583 this node and then check our children, if any. */
2585 do_jump_if_equal (mode, index,
2586 convert_modes (mode, imode,
2587 expand_normal (node->low),
2588 unsignedp),
2589 label_rtx (node->code_label), unsignedp);
2591 if (node->right != 0 && node->left != 0)
2593 /* This node has children on both sides.
2594 Dispatch to one side or the other
2595 by comparing the index value with this node's value.
2596 If one subtree is bounded, check that one first,
2597 so we can avoid real branches in the tree. */
2599 if (node_is_bounded (node->right, index_type))
2601 emit_cmp_and_jump_insns (index,
2602 convert_modes
2603 (mode, imode,
2604 expand_normal (node->high),
2605 unsignedp),
2606 GT, NULL_RTX, mode, unsignedp,
2607 label_rtx (node->right->code_label));
2608 emit_case_nodes (index, node->left, default_label, index_type);
2611 else if (node_is_bounded (node->left, index_type))
2613 emit_cmp_and_jump_insns (index,
2614 convert_modes
2615 (mode, imode,
2616 expand_normal (node->high),
2617 unsignedp),
2618 LT, NULL_RTX, mode, unsignedp,
2619 label_rtx (node->left->code_label));
2620 emit_case_nodes (index, node->right, default_label, index_type);
2623 /* If both children are single-valued cases with no
2624 children, finish up all the work. This way, we can save
2625 one ordered comparison. */
2626 else if (tree_int_cst_equal (node->right->low, node->right->high)
2627 && node->right->left == 0
2628 && node->right->right == 0
2629 && tree_int_cst_equal (node->left->low, node->left->high)
2630 && node->left->left == 0
2631 && node->left->right == 0)
2633 /* Neither node is bounded. First distinguish the two sides;
2634 then emit the code for one side at a time. */
2636 /* See if the value matches what the right hand side
2637 wants. */
2638 do_jump_if_equal (mode, index,
2639 convert_modes (mode, imode,
2640 expand_normal (node->right->low),
2641 unsignedp),
2642 label_rtx (node->right->code_label),
2643 unsignedp);
2645 /* See if the value matches what the left hand side
2646 wants. */
2647 do_jump_if_equal (mode, index,
2648 convert_modes (mode, imode,
2649 expand_normal (node->left->low),
2650 unsignedp),
2651 label_rtx (node->left->code_label),
2652 unsignedp);
2655 else
2657 /* Neither node is bounded. First distinguish the two sides;
2658 then emit the code for one side at a time. */
2660 tree test_label
2661 = build_decl (CURR_INSN_LOCATION,
2662 LABEL_DECL, NULL_TREE, NULL_TREE);
2664 /* See if the value is on the right. */
2665 emit_cmp_and_jump_insns (index,
2666 convert_modes
2667 (mode, imode,
2668 expand_normal (node->high),
2669 unsignedp),
2670 GT, NULL_RTX, mode, unsignedp,
2671 label_rtx (test_label));
2673 /* Value must be on the left.
2674 Handle the left-hand subtree. */
2675 emit_case_nodes (index, node->left, default_label, index_type);
2676 /* If left-hand subtree does nothing,
2677 go to default. */
2678 if (default_label)
2679 emit_jump (default_label);
2681 /* Code branches here for the right-hand subtree. */
2682 expand_label (test_label);
2683 emit_case_nodes (index, node->right, default_label, index_type);
2687 else if (node->right != 0 && node->left == 0)
2689 /* Here we have a right child but no left so we issue a conditional
2690 branch to default and process the right child.
2692 Omit the conditional branch to default if the right child
2693 does not have any children and is single valued; it would
2694 cost too much space to save so little time. */
2696 if (node->right->right || node->right->left
2697 || !tree_int_cst_equal (node->right->low, node->right->high))
2699 if (!node_has_low_bound (node, index_type))
2701 emit_cmp_and_jump_insns (index,
2702 convert_modes
2703 (mode, imode,
2704 expand_normal (node->high),
2705 unsignedp),
2706 LT, NULL_RTX, mode, unsignedp,
2707 default_label);
2710 emit_case_nodes (index, node->right, default_label, index_type);
2712 else
2713 /* We cannot process node->right normally
2714 since we haven't ruled out the numbers less than
2715 this node's value. So handle node->right explicitly. */
2716 do_jump_if_equal (mode, index,
2717 convert_modes
2718 (mode, imode,
2719 expand_normal (node->right->low),
2720 unsignedp),
2721 label_rtx (node->right->code_label), unsignedp);
2724 else if (node->right == 0 && node->left != 0)
2726 /* Just one subtree, on the left. */
2727 if (node->left->left || node->left->right
2728 || !tree_int_cst_equal (node->left->low, node->left->high))
2730 if (!node_has_high_bound (node, index_type))
2732 emit_cmp_and_jump_insns (index,
2733 convert_modes
2734 (mode, imode,
2735 expand_normal (node->high),
2736 unsignedp),
2737 GT, NULL_RTX, mode, unsignedp,
2738 default_label);
2741 emit_case_nodes (index, node->left, default_label, index_type);
2743 else
2744 /* We cannot process node->left normally
2745 since we haven't ruled out the numbers less than
2746 this node's value. So handle node->left explicitly. */
2747 do_jump_if_equal (mode, index,
2748 convert_modes
2749 (mode, imode,
2750 expand_normal (node->left->low),
2751 unsignedp),
2752 label_rtx (node->left->code_label), unsignedp);
2755 else
2757 /* Node is a range. These cases are very similar to those for a single
2758 value, except that we do not start by testing whether this node
2759 is the one to branch to. */
2761 if (node->right != 0 && node->left != 0)
2763 /* Node has subtrees on both sides.
2764 If the right-hand subtree is bounded,
2765 test for it first, since we can go straight there.
2766 Otherwise, we need to make a branch in the control structure,
2767 then handle the two subtrees. */
2768 tree test_label = 0;
2770 if (node_is_bounded (node->right, index_type))
2771 /* Right hand node is fully bounded so we can eliminate any
2772 testing and branch directly to the target code. */
2773 emit_cmp_and_jump_insns (index,
2774 convert_modes
2775 (mode, imode,
2776 expand_normal (node->high),
2777 unsignedp),
2778 GT, NULL_RTX, mode, unsignedp,
2779 label_rtx (node->right->code_label));
2780 else
2782 /* Right hand node requires testing.
2783 Branch to a label where we will handle it later. */
2785 test_label = build_decl (CURR_INSN_LOCATION,
2786 LABEL_DECL, NULL_TREE, NULL_TREE);
2787 emit_cmp_and_jump_insns (index,
2788 convert_modes
2789 (mode, imode,
2790 expand_normal (node->high),
2791 unsignedp),
2792 GT, NULL_RTX, mode, unsignedp,
2793 label_rtx (test_label));
2796 /* Value belongs to this node or to the left-hand subtree. */
2798 emit_cmp_and_jump_insns (index,
2799 convert_modes
2800 (mode, imode,
2801 expand_normal (node->low),
2802 unsignedp),
2803 GE, NULL_RTX, mode, unsignedp,
2804 label_rtx (node->code_label));
2806 /* Handle the left-hand subtree. */
2807 emit_case_nodes (index, node->left, default_label, index_type);
2809 /* If right node had to be handled later, do that now. */
2811 if (test_label)
2813 /* If the left-hand subtree fell through,
2814 don't let it fall into the right-hand subtree. */
2815 if (default_label)
2816 emit_jump (default_label);
2818 expand_label (test_label);
2819 emit_case_nodes (index, node->right, default_label, index_type);
2823 else if (node->right != 0 && node->left == 0)
2825 /* Deal with values to the left of this node,
2826 if they are possible. */
2827 if (!node_has_low_bound (node, index_type))
2829 emit_cmp_and_jump_insns (index,
2830 convert_modes
2831 (mode, imode,
2832 expand_normal (node->low),
2833 unsignedp),
2834 LT, NULL_RTX, mode, unsignedp,
2835 default_label);
2838 /* Value belongs to this node or to the right-hand subtree. */
2840 emit_cmp_and_jump_insns (index,
2841 convert_modes
2842 (mode, imode,
2843 expand_normal (node->high),
2844 unsignedp),
2845 LE, NULL_RTX, mode, unsignedp,
2846 label_rtx (node->code_label));
2848 emit_case_nodes (index, node->right, default_label, index_type);
2851 else if (node->right == 0 && node->left != 0)
2853 /* Deal with values to the right of this node,
2854 if they are possible. */
2855 if (!node_has_high_bound (node, index_type))
2857 emit_cmp_and_jump_insns (index,
2858 convert_modes
2859 (mode, imode,
2860 expand_normal (node->high),
2861 unsignedp),
2862 GT, NULL_RTX, mode, unsignedp,
2863 default_label);
2866 /* Value belongs to this node or to the left-hand subtree. */
2868 emit_cmp_and_jump_insns (index,
2869 convert_modes
2870 (mode, imode,
2871 expand_normal (node->low),
2872 unsignedp),
2873 GE, NULL_RTX, mode, unsignedp,
2874 label_rtx (node->code_label));
2876 emit_case_nodes (index, node->left, default_label, index_type);
2879 else
2881 /* Node has no children so we check low and high bounds to remove
2882 redundant tests. Only one of the bounds can exist,
2883 since otherwise this node is bounded--a case tested already. */
2884 int high_bound = node_has_high_bound (node, index_type);
2885 int low_bound = node_has_low_bound (node, index_type);
2887 if (!high_bound && low_bound)
2889 emit_cmp_and_jump_insns (index,
2890 convert_modes
2891 (mode, imode,
2892 expand_normal (node->high),
2893 unsignedp),
2894 GT, NULL_RTX, mode, unsignedp,
2895 default_label);
2898 else if (!low_bound && high_bound)
2900 emit_cmp_and_jump_insns (index,
2901 convert_modes
2902 (mode, imode,
2903 expand_normal (node->low),
2904 unsignedp),
2905 LT, NULL_RTX, mode, unsignedp,
2906 default_label);
2908 else if (!low_bound && !high_bound)
2910 /* Widen LOW and HIGH to the same width as INDEX. */
2911 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
2912 tree low = build1 (CONVERT_EXPR, type, node->low);
2913 tree high = build1 (CONVERT_EXPR, type, node->high);
2914 rtx low_rtx, new_index, new_bound;
2916 /* Instead of doing two branches, emit one unsigned branch for
2917 (index-low) > (high-low). */
2918 low_rtx = expand_expr (low, NULL_RTX, mode, EXPAND_NORMAL);
2919 new_index = expand_simple_binop (mode, MINUS, index, low_rtx,
2920 NULL_RTX, unsignedp,
2921 OPTAB_WIDEN);
2922 new_bound = expand_expr (fold_build2 (MINUS_EXPR, type,
2923 high, low),
2924 NULL_RTX, mode, EXPAND_NORMAL);
2926 emit_cmp_and_jump_insns (new_index, new_bound, GT, NULL_RTX,
2927 mode, 1, default_label);
2930 emit_jump (label_rtx (node->code_label));