The apostrophe was there to signal that the s was coming
[official-gcc.git] / gcc / stmt.c
blob5560dbc5b2dfa7a8c0b2718e72574e84128a7286
1 /* Expands front end tree to back end RTL for GCC
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 /* This file handles the generation of rtl code from tree structure
23 above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
24 The functions whose names start with `expand_' are called by the
25 expander to generate RTL instructions for various kinds of constructs. */
27 #include "config.h"
28 #include "system.h"
29 #include "coretypes.h"
30 #include "tm.h"
32 #include "rtl.h"
33 #include "tree.h"
34 #include "tm_p.h"
35 #include "flags.h"
36 #include "except.h"
37 #include "function.h"
38 #include "insn-config.h"
39 #include "expr.h"
40 #include "libfuncs.h"
41 #include "hard-reg-set.h"
42 #include "recog.h"
43 #include "machmode.h"
44 #include "toplev.h"
45 #include "output.h"
46 #include "ggc.h"
47 #include "langhooks.h"
48 #include "predict.h"
49 #include "optabs.h"
50 #include "target.h"
51 #include "regs.h"
53 /* Functions and data structures for expanding case statements. */
55 /* Case label structure, used to hold info on labels within case
56 statements. We handle "range" labels; for a single-value label
57 as in C, the high and low limits are the same.
59 We start with a vector of case nodes sorted in ascending order, and
60 the default label as the last element in the vector. Before expanding
61 to RTL, we transform this vector into a list linked via the RIGHT
62 fields in the case_node struct. Nodes with higher case values are
63 later in the list.
65 Switch statements can be output in three forms. A branch table is
66 used if there are more than a few labels and the labels are dense
67 within the range between the smallest and largest case value. If a
68 branch table is used, no further manipulations are done with the case
69 node chain.
71 The alternative to the use of a branch table is to generate a series
72 of compare and jump insns. When that is done, we use the LEFT, RIGHT,
73 and PARENT fields to hold a binary tree. Initially the tree is
74 totally unbalanced, with everything on the right. We balance the tree
75 with nodes on the left having lower case values than the parent
76 and nodes on the right having higher values. We then output the tree
77 in order.
79 For very small, suitable switch statements, we can generate a series
80 of simple bit test and branches instead. */
82 struct case_node GTY(())
84 struct case_node *left; /* Left son in binary tree */
85 struct case_node *right; /* Right son in binary tree; also node chain */
86 struct case_node *parent; /* Parent of node in binary tree */
87 tree low; /* Lowest index value for this label */
88 tree high; /* Highest index value for this label */
89 tree code_label; /* Label to jump to when node matches */
92 typedef struct case_node case_node;
93 typedef struct case_node *case_node_ptr;
95 /* These are used by estimate_case_costs and balance_case_nodes. */
97 /* This must be a signed type, and non-ANSI compilers lack signed char. */
98 static short cost_table_[129];
99 static int use_cost_table;
100 static int cost_table_initialized;
102 /* Special care is needed because we allow -1, but TREE_INT_CST_LOW
103 is unsigned. */
104 #define COST_TABLE(I) cost_table_[(unsigned HOST_WIDE_INT) ((I) + 1)]
106 static int n_occurrences (int, const char *);
107 static bool decl_conflicts_with_clobbers_p (tree, const HARD_REG_SET);
108 static void expand_nl_goto_receiver (void);
109 static bool check_operand_nalternatives (tree, tree);
110 static bool check_unique_operand_names (tree, tree);
111 static char *resolve_operand_name_1 (char *, tree, tree);
112 static void expand_null_return_1 (void);
113 static rtx shift_return_value (rtx);
114 static void expand_value_return (rtx);
115 static void do_jump_if_equal (rtx, rtx, rtx, int);
116 static int estimate_case_costs (case_node_ptr);
117 static bool lshift_cheap_p (void);
118 static int case_bit_test_cmp (const void *, const void *);
119 static void emit_case_bit_tests (tree, tree, tree, tree, case_node_ptr, rtx);
120 static void balance_case_nodes (case_node_ptr *, case_node_ptr);
121 static int node_has_low_bound (case_node_ptr, tree);
122 static int node_has_high_bound (case_node_ptr, tree);
123 static int node_is_bounded (case_node_ptr, tree);
124 static void emit_case_nodes (rtx, case_node_ptr, rtx, tree);
125 static struct case_node *add_case_node (struct case_node *, tree,
126 tree, tree, tree);
129 /* Return the rtx-label that corresponds to a LABEL_DECL,
130 creating it if necessary. */
133 label_rtx (tree label)
135 gcc_assert (TREE_CODE (label) == LABEL_DECL);
137 if (!DECL_RTL_SET_P (label))
139 rtx r = gen_label_rtx ();
140 SET_DECL_RTL (label, r);
141 if (FORCED_LABEL (label) || DECL_NONLOCAL (label))
142 LABEL_PRESERVE_P (r) = 1;
145 return DECL_RTL (label);
148 /* As above, but also put it on the forced-reference list of the
149 function that contains it. */
151 force_label_rtx (tree label)
153 rtx ref = label_rtx (label);
154 tree function = decl_function_context (label);
155 struct function *p;
157 gcc_assert (function);
159 if (function != current_function_decl)
160 p = find_function_data (function);
161 else
162 p = cfun;
164 p->expr->x_forced_labels = gen_rtx_EXPR_LIST (VOIDmode, ref,
165 p->expr->x_forced_labels);
166 return ref;
169 /* Add an unconditional jump to LABEL as the next sequential instruction. */
171 void
172 emit_jump (rtx label)
174 do_pending_stack_adjust ();
175 emit_jump_insn (gen_jump (label));
176 emit_barrier ();
179 /* Emit code to jump to the address
180 specified by the pointer expression EXP. */
182 void
183 expand_computed_goto (tree exp)
185 rtx x = expand_expr (exp, NULL_RTX, VOIDmode, 0);
187 x = convert_memory_address (Pmode, x);
189 do_pending_stack_adjust ();
190 emit_indirect_jump (x);
193 /* Handle goto statements and the labels that they can go to. */
195 /* Specify the location in the RTL code of a label LABEL,
196 which is a LABEL_DECL tree node.
198 This is used for the kind of label that the user can jump to with a
199 goto statement, and for alternatives of a switch or case statement.
200 RTL labels generated for loops and conditionals don't go through here;
201 they are generated directly at the RTL level, by other functions below.
203 Note that this has nothing to do with defining label *names*.
204 Languages vary in how they do that and what that even means. */
206 void
207 expand_label (tree label)
209 rtx label_r = label_rtx (label);
211 do_pending_stack_adjust ();
212 emit_label (label_r);
213 if (DECL_NAME (label))
214 LABEL_NAME (DECL_RTL (label)) = IDENTIFIER_POINTER (DECL_NAME (label));
216 if (DECL_NONLOCAL (label))
218 expand_nl_goto_receiver ();
219 nonlocal_goto_handler_labels
220 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
221 nonlocal_goto_handler_labels);
224 if (FORCED_LABEL (label))
225 forced_labels = gen_rtx_EXPR_LIST (VOIDmode, label_r, forced_labels);
227 if (DECL_NONLOCAL (label) || FORCED_LABEL (label))
228 maybe_set_first_label_num (label_r);
231 /* Generate RTL code for a `goto' statement with target label LABEL.
232 LABEL should be a LABEL_DECL tree node that was or will later be
233 defined with `expand_label'. */
235 void
236 expand_goto (tree label)
238 #ifdef ENABLE_CHECKING
239 /* Check for a nonlocal goto to a containing function. Should have
240 gotten translated to __builtin_nonlocal_goto. */
241 tree context = decl_function_context (label);
242 gcc_assert (!context || context == current_function_decl);
243 #endif
245 emit_jump (label_rtx (label));
248 /* Return the number of times character C occurs in string S. */
249 static int
250 n_occurrences (int c, const char *s)
252 int n = 0;
253 while (*s)
254 n += (*s++ == c);
255 return n;
258 /* Generate RTL for an asm statement (explicit assembler code).
259 STRING is a STRING_CST node containing the assembler code text,
260 or an ADDR_EXPR containing a STRING_CST. VOL nonzero means the
261 insn is volatile; don't optimize it. */
263 void
264 expand_asm (tree string, int vol)
266 rtx body;
268 if (TREE_CODE (string) == ADDR_EXPR)
269 string = TREE_OPERAND (string, 0);
271 body = gen_rtx_ASM_INPUT (VOIDmode,
272 ggc_strdup (TREE_STRING_POINTER (string)));
274 MEM_VOLATILE_P (body) = vol;
276 emit_insn (body);
279 /* Parse the output constraint pointed to by *CONSTRAINT_P. It is the
280 OPERAND_NUMth output operand, indexed from zero. There are NINPUTS
281 inputs and NOUTPUTS outputs to this extended-asm. Upon return,
282 *ALLOWS_MEM will be TRUE iff the constraint allows the use of a
283 memory operand. Similarly, *ALLOWS_REG will be TRUE iff the
284 constraint allows the use of a register operand. And, *IS_INOUT
285 will be true if the operand is read-write, i.e., if it is used as
286 an input as well as an output. If *CONSTRAINT_P is not in
287 canonical form, it will be made canonical. (Note that `+' will be
288 replaced with `=' as part of this process.)
290 Returns TRUE if all went well; FALSE if an error occurred. */
292 bool
293 parse_output_constraint (const char **constraint_p, int operand_num,
294 int ninputs, int noutputs, bool *allows_mem,
295 bool *allows_reg, bool *is_inout)
297 const char *constraint = *constraint_p;
298 const char *p;
300 /* Assume the constraint doesn't allow the use of either a register
301 or memory. */
302 *allows_mem = false;
303 *allows_reg = false;
305 /* Allow the `=' or `+' to not be at the beginning of the string,
306 since it wasn't explicitly documented that way, and there is a
307 large body of code that puts it last. Swap the character to
308 the front, so as not to uglify any place else. */
309 p = strchr (constraint, '=');
310 if (!p)
311 p = strchr (constraint, '+');
313 /* If the string doesn't contain an `=', issue an error
314 message. */
315 if (!p)
317 error ("output operand constraint lacks %<=%>");
318 return false;
321 /* If the constraint begins with `+', then the operand is both read
322 from and written to. */
323 *is_inout = (*p == '+');
325 /* Canonicalize the output constraint so that it begins with `='. */
326 if (p != constraint || is_inout)
328 char *buf;
329 size_t c_len = strlen (constraint);
331 if (p != constraint)
332 warning ("output constraint %qc for operand %d "
333 "is not at the beginning",
334 *p, operand_num);
336 /* Make a copy of the constraint. */
337 buf = alloca (c_len + 1);
338 strcpy (buf, constraint);
339 /* Swap the first character and the `=' or `+'. */
340 buf[p - constraint] = buf[0];
341 /* Make sure the first character is an `='. (Until we do this,
342 it might be a `+'.) */
343 buf[0] = '=';
344 /* Replace the constraint with the canonicalized string. */
345 *constraint_p = ggc_alloc_string (buf, c_len);
346 constraint = *constraint_p;
349 /* Loop through the constraint string. */
350 for (p = constraint + 1; *p; p += CONSTRAINT_LEN (*p, p))
351 switch (*p)
353 case '+':
354 case '=':
355 error ("operand constraint contains incorrectly positioned "
356 "%<+%> or %<=%>");
357 return false;
359 case '%':
360 if (operand_num + 1 == ninputs + noutputs)
362 error ("%<%%%> constraint used with last operand");
363 return false;
365 break;
367 case 'V': case 'm': case 'o':
368 *allows_mem = true;
369 break;
371 case '?': case '!': case '*': case '&': case '#':
372 case 'E': case 'F': case 'G': case 'H':
373 case 's': case 'i': case 'n':
374 case 'I': case 'J': case 'K': case 'L': case 'M':
375 case 'N': case 'O': case 'P': case ',':
376 break;
378 case '0': case '1': case '2': case '3': case '4':
379 case '5': case '6': case '7': case '8': case '9':
380 case '[':
381 error ("matching constraint not valid in output operand");
382 return false;
384 case '<': case '>':
385 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
386 excepting those that expand_call created. So match memory
387 and hope. */
388 *allows_mem = true;
389 break;
391 case 'g': case 'X':
392 *allows_reg = true;
393 *allows_mem = true;
394 break;
396 case 'p': case 'r':
397 *allows_reg = true;
398 break;
400 default:
401 if (!ISALPHA (*p))
402 break;
403 if (REG_CLASS_FROM_CONSTRAINT (*p, p) != NO_REGS)
404 *allows_reg = true;
405 #ifdef EXTRA_CONSTRAINT_STR
406 else if (EXTRA_ADDRESS_CONSTRAINT (*p, p))
407 *allows_reg = true;
408 else if (EXTRA_MEMORY_CONSTRAINT (*p, p))
409 *allows_mem = true;
410 else
412 /* Otherwise we can't assume anything about the nature of
413 the constraint except that it isn't purely registers.
414 Treat it like "g" and hope for the best. */
415 *allows_reg = true;
416 *allows_mem = true;
418 #endif
419 break;
422 return true;
425 /* Similar, but for input constraints. */
427 bool
428 parse_input_constraint (const char **constraint_p, int input_num,
429 int ninputs, int noutputs, int ninout,
430 const char * const * constraints,
431 bool *allows_mem, bool *allows_reg)
433 const char *constraint = *constraint_p;
434 const char *orig_constraint = constraint;
435 size_t c_len = strlen (constraint);
436 size_t j;
437 bool saw_match = false;
439 /* Assume the constraint doesn't allow the use of either
440 a register or memory. */
441 *allows_mem = false;
442 *allows_reg = false;
444 /* Make sure constraint has neither `=', `+', nor '&'. */
446 for (j = 0; j < c_len; j += CONSTRAINT_LEN (constraint[j], constraint+j))
447 switch (constraint[j])
449 case '+': case '=': case '&':
450 if (constraint == orig_constraint)
452 error ("input operand constraint contains %qc", constraint[j]);
453 return false;
455 break;
457 case '%':
458 if (constraint == orig_constraint
459 && input_num + 1 == ninputs - ninout)
461 error ("%<%%%> constraint used with last operand");
462 return false;
464 break;
466 case 'V': case 'm': case 'o':
467 *allows_mem = true;
468 break;
470 case '<': case '>':
471 case '?': case '!': case '*': case '#':
472 case 'E': case 'F': case 'G': case 'H':
473 case 's': case 'i': case 'n':
474 case 'I': case 'J': case 'K': case 'L': case 'M':
475 case 'N': case 'O': case 'P': case ',':
476 break;
478 /* Whether or not a numeric constraint allows a register is
479 decided by the matching constraint, and so there is no need
480 to do anything special with them. We must handle them in
481 the default case, so that we don't unnecessarily force
482 operands to memory. */
483 case '0': case '1': case '2': case '3': case '4':
484 case '5': case '6': case '7': case '8': case '9':
486 char *end;
487 unsigned long match;
489 saw_match = true;
491 match = strtoul (constraint + j, &end, 10);
492 if (match >= (unsigned long) noutputs)
494 error ("matching constraint references invalid operand number");
495 return false;
498 /* Try and find the real constraint for this dup. Only do this
499 if the matching constraint is the only alternative. */
500 if (*end == '\0'
501 && (j == 0 || (j == 1 && constraint[0] == '%')))
503 constraint = constraints[match];
504 *constraint_p = constraint;
505 c_len = strlen (constraint);
506 j = 0;
507 /* ??? At the end of the loop, we will skip the first part of
508 the matched constraint. This assumes not only that the
509 other constraint is an output constraint, but also that
510 the '=' or '+' come first. */
511 break;
513 else
514 j = end - constraint;
515 /* Anticipate increment at end of loop. */
516 j--;
518 /* Fall through. */
520 case 'p': case 'r':
521 *allows_reg = true;
522 break;
524 case 'g': case 'X':
525 *allows_reg = true;
526 *allows_mem = true;
527 break;
529 default:
530 if (! ISALPHA (constraint[j]))
532 error ("invalid punctuation %qc in constraint", constraint[j]);
533 return false;
535 if (REG_CLASS_FROM_CONSTRAINT (constraint[j], constraint + j)
536 != NO_REGS)
537 *allows_reg = true;
538 #ifdef EXTRA_CONSTRAINT_STR
539 else if (EXTRA_ADDRESS_CONSTRAINT (constraint[j], constraint + j))
540 *allows_reg = true;
541 else if (EXTRA_MEMORY_CONSTRAINT (constraint[j], constraint + j))
542 *allows_mem = true;
543 else
545 /* Otherwise we can't assume anything about the nature of
546 the constraint except that it isn't purely registers.
547 Treat it like "g" and hope for the best. */
548 *allows_reg = true;
549 *allows_mem = true;
551 #endif
552 break;
555 if (saw_match && !*allows_reg)
556 warning ("matching constraint does not allow a register");
558 return true;
561 /* INPUT is one of the input operands from EXPR, an ASM_EXPR. Returns true
562 if it is an operand which must be passed in memory (i.e. an "m"
563 constraint), false otherwise. */
565 bool
566 asm_op_is_mem_input (tree input, tree expr)
568 const char *constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (input)));
569 tree outputs = ASM_OUTPUTS (expr);
570 int noutputs = list_length (outputs);
571 const char **constraints
572 = (const char **) alloca ((noutputs) * sizeof (const char *));
573 int i = 0;
574 bool allows_mem, allows_reg;
575 tree t;
577 /* Collect output constraints. */
578 for (t = outputs; t ; t = TREE_CHAIN (t), i++)
579 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
581 /* We pass 0 for input_num, ninputs and ninout; they are only used for
582 error checking which will be done at expand time. */
583 parse_input_constraint (&constraint, 0, 0, noutputs, 0, constraints,
584 &allows_mem, &allows_reg);
585 return (!allows_reg && allows_mem);
588 /* Check for overlap between registers marked in CLOBBERED_REGS and
589 anything inappropriate in DECL. Emit error and return TRUE for error,
590 FALSE for ok. */
592 static bool
593 decl_conflicts_with_clobbers_p (tree decl, const HARD_REG_SET clobbered_regs)
595 /* Conflicts between asm-declared register variables and the clobber
596 list are not allowed. */
597 if ((TREE_CODE (decl) == VAR_DECL || TREE_CODE (decl) == PARM_DECL)
598 && DECL_REGISTER (decl)
599 && REG_P (DECL_RTL (decl))
600 && REGNO (DECL_RTL (decl)) < FIRST_PSEUDO_REGISTER)
602 rtx reg = DECL_RTL (decl);
603 unsigned int regno;
605 for (regno = REGNO (reg);
606 regno < (REGNO (reg)
607 + hard_regno_nregs[REGNO (reg)][GET_MODE (reg)]);
608 regno++)
609 if (TEST_HARD_REG_BIT (clobbered_regs, regno))
611 error ("asm-specifier for variable %qs conflicts with "
612 "asm clobber list",
613 IDENTIFIER_POINTER (DECL_NAME (decl)));
615 /* Reset registerness to stop multiple errors emitted for a
616 single variable. */
617 DECL_REGISTER (decl) = 0;
618 return true;
621 return false;
624 /* Generate RTL for an asm statement with arguments.
625 STRING is the instruction template.
626 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
627 Each output or input has an expression in the TREE_VALUE and
628 and a tree list in TREE_PURPOSE which in turn contains a constraint
629 name in TREE_VALUE (or NULL_TREE) and a constraint string
630 in TREE_PURPOSE.
631 CLOBBERS is a list of STRING_CST nodes each naming a hard register
632 that is clobbered by this insn.
634 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
635 Some elements of OUTPUTS may be replaced with trees representing temporary
636 values. The caller should copy those temporary values to the originally
637 specified lvalues.
639 VOL nonzero means the insn is volatile; don't optimize it. */
641 void
642 expand_asm_operands (tree string, tree outputs, tree inputs,
643 tree clobbers, int vol, location_t locus)
645 rtvec argvec, constraintvec;
646 rtx body;
647 int ninputs = list_length (inputs);
648 int noutputs = list_length (outputs);
649 int ninout;
650 int nclobbers;
651 HARD_REG_SET clobbered_regs;
652 int clobber_conflict_found = 0;
653 tree tail;
654 tree t;
655 int i;
656 /* Vector of RTX's of evaluated output operands. */
657 rtx *output_rtx = alloca (noutputs * sizeof (rtx));
658 int *inout_opnum = alloca (noutputs * sizeof (int));
659 rtx *real_output_rtx = alloca (noutputs * sizeof (rtx));
660 enum machine_mode *inout_mode
661 = alloca (noutputs * sizeof (enum machine_mode));
662 const char **constraints
663 = alloca ((noutputs + ninputs) * sizeof (const char *));
664 int old_generating_concat_p = generating_concat_p;
666 /* An ASM with no outputs needs to be treated as volatile, for now. */
667 if (noutputs == 0)
668 vol = 1;
670 if (! check_operand_nalternatives (outputs, inputs))
671 return;
673 string = resolve_asm_operand_names (string, outputs, inputs);
675 /* Collect constraints. */
676 i = 0;
677 for (t = outputs; t ; t = TREE_CHAIN (t), i++)
678 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
679 for (t = inputs; t ; t = TREE_CHAIN (t), i++)
680 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
682 /* Sometimes we wish to automatically clobber registers across an asm.
683 Case in point is when the i386 backend moved from cc0 to a hard reg --
684 maintaining source-level compatibility means automatically clobbering
685 the flags register. */
686 clobbers = targetm.md_asm_clobbers (clobbers);
688 /* Count the number of meaningful clobbered registers, ignoring what
689 we would ignore later. */
690 nclobbers = 0;
691 CLEAR_HARD_REG_SET (clobbered_regs);
692 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
694 const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
696 i = decode_reg_name (regname);
697 if (i >= 0 || i == -4)
698 ++nclobbers;
699 else if (i == -2)
700 error ("unknown register name %qs in %<asm%>", regname);
702 /* Mark clobbered registers. */
703 if (i >= 0)
705 /* Clobbering the PIC register is an error. */
706 if (i == (int) PIC_OFFSET_TABLE_REGNUM)
708 error ("PIC register %qs clobbered in %<asm%>", regname);
709 return;
712 SET_HARD_REG_BIT (clobbered_regs, i);
716 /* First pass over inputs and outputs checks validity and sets
717 mark_addressable if needed. */
719 ninout = 0;
720 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
722 tree val = TREE_VALUE (tail);
723 tree type = TREE_TYPE (val);
724 const char *constraint;
725 bool is_inout;
726 bool allows_reg;
727 bool allows_mem;
729 /* If there's an erroneous arg, emit no insn. */
730 if (type == error_mark_node)
731 return;
733 /* Try to parse the output constraint. If that fails, there's
734 no point in going further. */
735 constraint = constraints[i];
736 if (!parse_output_constraint (&constraint, i, ninputs, noutputs,
737 &allows_mem, &allows_reg, &is_inout))
738 return;
740 if (! allows_reg
741 && (allows_mem
742 || is_inout
743 || (DECL_P (val)
744 && REG_P (DECL_RTL (val))
745 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type))))
746 lang_hooks.mark_addressable (val);
748 if (is_inout)
749 ninout++;
752 ninputs += ninout;
753 if (ninputs + noutputs > MAX_RECOG_OPERANDS)
755 error ("more than %d operands in %<asm%>", MAX_RECOG_OPERANDS);
756 return;
759 for (i = 0, tail = inputs; tail; i++, tail = TREE_CHAIN (tail))
761 bool allows_reg, allows_mem;
762 const char *constraint;
764 /* If there's an erroneous arg, emit no insn, because the ASM_INPUT
765 would get VOIDmode and that could cause a crash in reload. */
766 if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
767 return;
769 constraint = constraints[i + noutputs];
770 if (! parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
771 constraints, &allows_mem, &allows_reg))
772 return;
774 if (! allows_reg && allows_mem)
775 lang_hooks.mark_addressable (TREE_VALUE (tail));
778 /* Second pass evaluates arguments. */
780 ninout = 0;
781 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
783 tree val = TREE_VALUE (tail);
784 tree type = TREE_TYPE (val);
785 bool is_inout;
786 bool allows_reg;
787 bool allows_mem;
788 rtx op;
789 bool ok;
791 ok = parse_output_constraint (&constraints[i], i, ninputs,
792 noutputs, &allows_mem, &allows_reg,
793 &is_inout);
794 gcc_assert (ok);
796 /* If an output operand is not a decl or indirect ref and our constraint
797 allows a register, make a temporary to act as an intermediate.
798 Make the asm insn write into that, then our caller will copy it to
799 the real output operand. Likewise for promoted variables. */
801 generating_concat_p = 0;
803 real_output_rtx[i] = NULL_RTX;
804 if ((TREE_CODE (val) == INDIRECT_REF
805 && allows_mem)
806 || (DECL_P (val)
807 && (allows_mem || REG_P (DECL_RTL (val)))
808 && ! (REG_P (DECL_RTL (val))
809 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
810 || ! allows_reg
811 || is_inout)
813 op = expand_expr (val, NULL_RTX, VOIDmode, EXPAND_WRITE);
814 if (MEM_P (op))
815 op = validize_mem (op);
817 if (! allows_reg && !MEM_P (op))
818 error ("output number %d not directly addressable", i);
819 if ((! allows_mem && MEM_P (op))
820 || GET_CODE (op) == CONCAT)
822 real_output_rtx[i] = op;
823 op = gen_reg_rtx (GET_MODE (op));
824 if (is_inout)
825 emit_move_insn (op, real_output_rtx[i]);
828 else
830 op = assign_temp (type, 0, 0, 1);
831 op = validize_mem (op);
832 TREE_VALUE (tail) = make_tree (type, op);
834 output_rtx[i] = op;
836 generating_concat_p = old_generating_concat_p;
838 if (is_inout)
840 inout_mode[ninout] = TYPE_MODE (type);
841 inout_opnum[ninout++] = i;
844 if (decl_conflicts_with_clobbers_p (val, clobbered_regs))
845 clobber_conflict_found = 1;
848 /* Make vectors for the expression-rtx, constraint strings,
849 and named operands. */
851 argvec = rtvec_alloc (ninputs);
852 constraintvec = rtvec_alloc (ninputs);
854 body = gen_rtx_ASM_OPERANDS ((noutputs == 0 ? VOIDmode
855 : GET_MODE (output_rtx[0])),
856 ggc_strdup (TREE_STRING_POINTER (string)),
857 empty_string, 0, argvec, constraintvec,
858 locus);
860 MEM_VOLATILE_P (body) = vol;
862 /* Eval the inputs and put them into ARGVEC.
863 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
865 for (i = 0, tail = inputs; tail; tail = TREE_CHAIN (tail), ++i)
867 bool allows_reg, allows_mem;
868 const char *constraint;
869 tree val, type;
870 rtx op;
871 bool ok;
873 constraint = constraints[i + noutputs];
874 ok = parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
875 constraints, &allows_mem, &allows_reg);
876 gcc_assert (ok);
878 generating_concat_p = 0;
880 val = TREE_VALUE (tail);
881 type = TREE_TYPE (val);
882 op = expand_expr (val, NULL_RTX, VOIDmode,
883 (allows_mem && !allows_reg
884 ? EXPAND_MEMORY : EXPAND_NORMAL));
886 /* Never pass a CONCAT to an ASM. */
887 if (GET_CODE (op) == CONCAT)
888 op = force_reg (GET_MODE (op), op);
889 else if (MEM_P (op))
890 op = validize_mem (op);
892 if (asm_operand_ok (op, constraint) <= 0)
894 if (allows_reg)
895 op = force_reg (TYPE_MODE (type), op);
896 else if (!allows_mem)
897 warning ("asm operand %d probably doesn%'t match constraints",
898 i + noutputs);
899 else if (MEM_P (op))
901 /* We won't recognize either volatile memory or memory
902 with a queued address as available a memory_operand
903 at this point. Ignore it: clearly this *is* a memory. */
905 else
907 warning ("use of memory input without lvalue in "
908 "asm operand %d is deprecated", i + noutputs);
910 if (CONSTANT_P (op))
912 rtx mem = force_const_mem (TYPE_MODE (type), op);
913 if (mem)
914 op = validize_mem (mem);
915 else
916 op = force_reg (TYPE_MODE (type), op);
918 if (REG_P (op)
919 || GET_CODE (op) == SUBREG
920 || GET_CODE (op) == CONCAT)
922 tree qual_type = build_qualified_type (type,
923 (TYPE_QUALS (type)
924 | TYPE_QUAL_CONST));
925 rtx memloc = assign_temp (qual_type, 1, 1, 1);
926 memloc = validize_mem (memloc);
927 emit_move_insn (memloc, op);
928 op = memloc;
933 generating_concat_p = old_generating_concat_p;
934 ASM_OPERANDS_INPUT (body, i) = op;
936 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, i)
937 = gen_rtx_ASM_INPUT (TYPE_MODE (type),
938 ggc_strdup (constraints[i + noutputs]));
940 if (decl_conflicts_with_clobbers_p (val, clobbered_regs))
941 clobber_conflict_found = 1;
944 /* Protect all the operands from the queue now that they have all been
945 evaluated. */
947 generating_concat_p = 0;
949 /* For in-out operands, copy output rtx to input rtx. */
950 for (i = 0; i < ninout; i++)
952 int j = inout_opnum[i];
953 char buffer[16];
955 ASM_OPERANDS_INPUT (body, ninputs - ninout + i)
956 = output_rtx[j];
958 sprintf (buffer, "%d", j);
959 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, ninputs - ninout + i)
960 = gen_rtx_ASM_INPUT (inout_mode[i], ggc_strdup (buffer));
963 generating_concat_p = old_generating_concat_p;
965 /* Now, for each output, construct an rtx
966 (set OUTPUT (asm_operands INSN OUTPUTCONSTRAINT OUTPUTNUMBER
967 ARGVEC CONSTRAINTS OPNAMES))
968 If there is more than one, put them inside a PARALLEL. */
970 if (noutputs == 1 && nclobbers == 0)
972 ASM_OPERANDS_OUTPUT_CONSTRAINT (body) = ggc_strdup (constraints[0]);
973 emit_insn (gen_rtx_SET (VOIDmode, output_rtx[0], body));
976 else if (noutputs == 0 && nclobbers == 0)
978 /* No output operands: put in a raw ASM_OPERANDS rtx. */
979 emit_insn (body);
982 else
984 rtx obody = body;
985 int num = noutputs;
987 if (num == 0)
988 num = 1;
990 body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
992 /* For each output operand, store a SET. */
993 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
995 XVECEXP (body, 0, i)
996 = gen_rtx_SET (VOIDmode,
997 output_rtx[i],
998 gen_rtx_ASM_OPERANDS
999 (GET_MODE (output_rtx[i]),
1000 ggc_strdup (TREE_STRING_POINTER (string)),
1001 ggc_strdup (constraints[i]),
1002 i, argvec, constraintvec, locus));
1004 MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
1007 /* If there are no outputs (but there are some clobbers)
1008 store the bare ASM_OPERANDS into the PARALLEL. */
1010 if (i == 0)
1011 XVECEXP (body, 0, i++) = obody;
1013 /* Store (clobber REG) for each clobbered register specified. */
1015 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1017 const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1018 int j = decode_reg_name (regname);
1019 rtx clobbered_reg;
1021 if (j < 0)
1023 if (j == -3) /* `cc', which is not a register */
1024 continue;
1026 if (j == -4) /* `memory', don't cache memory across asm */
1028 XVECEXP (body, 0, i++)
1029 = gen_rtx_CLOBBER (VOIDmode,
1030 gen_rtx_MEM
1031 (BLKmode,
1032 gen_rtx_SCRATCH (VOIDmode)));
1033 continue;
1036 /* Ignore unknown register, error already signaled. */
1037 continue;
1040 /* Use QImode since that's guaranteed to clobber just one reg. */
1041 clobbered_reg = gen_rtx_REG (QImode, j);
1043 /* Do sanity check for overlap between clobbers and respectively
1044 input and outputs that hasn't been handled. Such overlap
1045 should have been detected and reported above. */
1046 if (!clobber_conflict_found)
1048 int opno;
1050 /* We test the old body (obody) contents to avoid tripping
1051 over the under-construction body. */
1052 for (opno = 0; opno < noutputs; opno++)
1053 if (reg_overlap_mentioned_p (clobbered_reg, output_rtx[opno]))
1054 internal_error ("asm clobber conflict with output operand");
1056 for (opno = 0; opno < ninputs - ninout; opno++)
1057 if (reg_overlap_mentioned_p (clobbered_reg,
1058 ASM_OPERANDS_INPUT (obody, opno)))
1059 internal_error ("asm clobber conflict with input operand");
1062 XVECEXP (body, 0, i++)
1063 = gen_rtx_CLOBBER (VOIDmode, clobbered_reg);
1066 emit_insn (body);
1069 /* For any outputs that needed reloading into registers, spill them
1070 back to where they belong. */
1071 for (i = 0; i < noutputs; ++i)
1072 if (real_output_rtx[i])
1073 emit_move_insn (real_output_rtx[i], output_rtx[i]);
1075 free_temp_slots ();
1078 void
1079 expand_asm_expr (tree exp)
1081 int noutputs, i;
1082 tree outputs, tail;
1083 tree *o;
1085 if (ASM_INPUT_P (exp))
1087 expand_asm (ASM_STRING (exp), ASM_VOLATILE_P (exp));
1088 return;
1091 outputs = ASM_OUTPUTS (exp);
1092 noutputs = list_length (outputs);
1093 /* o[I] is the place that output number I should be written. */
1094 o = (tree *) alloca (noutputs * sizeof (tree));
1096 /* Record the contents of OUTPUTS before it is modified. */
1097 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1098 o[i] = TREE_VALUE (tail);
1100 /* Generate the ASM_OPERANDS insn; store into the TREE_VALUEs of
1101 OUTPUTS some trees for where the values were actually stored. */
1102 expand_asm_operands (ASM_STRING (exp), outputs, ASM_INPUTS (exp),
1103 ASM_CLOBBERS (exp), ASM_VOLATILE_P (exp),
1104 input_location);
1106 /* Copy all the intermediate outputs into the specified outputs. */
1107 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1109 if (o[i] != TREE_VALUE (tail))
1111 expand_assignment (o[i], TREE_VALUE (tail), 0);
1112 free_temp_slots ();
1114 /* Restore the original value so that it's correct the next
1115 time we expand this function. */
1116 TREE_VALUE (tail) = o[i];
1121 /* A subroutine of expand_asm_operands. Check that all operands have
1122 the same number of alternatives. Return true if so. */
1124 static bool
1125 check_operand_nalternatives (tree outputs, tree inputs)
1127 if (outputs || inputs)
1129 tree tmp = TREE_PURPOSE (outputs ? outputs : inputs);
1130 int nalternatives
1131 = n_occurrences (',', TREE_STRING_POINTER (TREE_VALUE (tmp)));
1132 tree next = inputs;
1134 if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
1136 error ("too many alternatives in %<asm%>");
1137 return false;
1140 tmp = outputs;
1141 while (tmp)
1143 const char *constraint
1144 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (tmp)));
1146 if (n_occurrences (',', constraint) != nalternatives)
1148 error ("operand constraints for %<asm%> differ "
1149 "in number of alternatives");
1150 return false;
1153 if (TREE_CHAIN (tmp))
1154 tmp = TREE_CHAIN (tmp);
1155 else
1156 tmp = next, next = 0;
1160 return true;
1163 /* A subroutine of expand_asm_operands. Check that all operand names
1164 are unique. Return true if so. We rely on the fact that these names
1165 are identifiers, and so have been canonicalized by get_identifier,
1166 so all we need are pointer comparisons. */
1168 static bool
1169 check_unique_operand_names (tree outputs, tree inputs)
1171 tree i, j;
1173 for (i = outputs; i ; i = TREE_CHAIN (i))
1175 tree i_name = TREE_PURPOSE (TREE_PURPOSE (i));
1176 if (! i_name)
1177 continue;
1179 for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
1180 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1181 goto failure;
1184 for (i = inputs; i ; i = TREE_CHAIN (i))
1186 tree i_name = TREE_PURPOSE (TREE_PURPOSE (i));
1187 if (! i_name)
1188 continue;
1190 for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
1191 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1192 goto failure;
1193 for (j = outputs; j ; j = TREE_CHAIN (j))
1194 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1195 goto failure;
1198 return true;
1200 failure:
1201 error ("duplicate asm operand name %qs",
1202 TREE_STRING_POINTER (TREE_PURPOSE (TREE_PURPOSE (i))));
1203 return false;
1206 /* A subroutine of expand_asm_operands. Resolve the names of the operands
1207 in *POUTPUTS and *PINPUTS to numbers, and replace the name expansions in
1208 STRING and in the constraints to those numbers. */
1210 tree
1211 resolve_asm_operand_names (tree string, tree outputs, tree inputs)
1213 char *buffer;
1214 char *p;
1215 const char *c;
1216 tree t;
1218 check_unique_operand_names (outputs, inputs);
1220 /* Substitute [<name>] in input constraint strings. There should be no
1221 named operands in output constraints. */
1222 for (t = inputs; t ; t = TREE_CHAIN (t))
1224 c = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
1225 if (strchr (c, '[') != NULL)
1227 p = buffer = xstrdup (c);
1228 while ((p = strchr (p, '[')) != NULL)
1229 p = resolve_operand_name_1 (p, outputs, inputs);
1230 TREE_VALUE (TREE_PURPOSE (t))
1231 = build_string (strlen (buffer), buffer);
1232 free (buffer);
1236 /* Now check for any needed substitutions in the template. */
1237 c = TREE_STRING_POINTER (string);
1238 while ((c = strchr (c, '%')) != NULL)
1240 if (c[1] == '[')
1241 break;
1242 else if (ISALPHA (c[1]) && c[2] == '[')
1243 break;
1244 else
1246 c += 1;
1247 continue;
1251 if (c)
1253 /* OK, we need to make a copy so we can perform the substitutions.
1254 Assume that we will not need extra space--we get to remove '['
1255 and ']', which means we cannot have a problem until we have more
1256 than 999 operands. */
1257 buffer = xstrdup (TREE_STRING_POINTER (string));
1258 p = buffer + (c - TREE_STRING_POINTER (string));
1260 while ((p = strchr (p, '%')) != NULL)
1262 if (p[1] == '[')
1263 p += 1;
1264 else if (ISALPHA (p[1]) && p[2] == '[')
1265 p += 2;
1266 else
1268 p += 1;
1269 continue;
1272 p = resolve_operand_name_1 (p, outputs, inputs);
1275 string = build_string (strlen (buffer), buffer);
1276 free (buffer);
1279 return string;
1282 /* A subroutine of resolve_operand_names. P points to the '[' for a
1283 potential named operand of the form [<name>]. In place, replace
1284 the name and brackets with a number. Return a pointer to the
1285 balance of the string after substitution. */
1287 static char *
1288 resolve_operand_name_1 (char *p, tree outputs, tree inputs)
1290 char *q;
1291 int op;
1292 tree t;
1293 size_t len;
1295 /* Collect the operand name. */
1296 q = strchr (p, ']');
1297 if (!q)
1299 error ("missing close brace for named operand");
1300 return strchr (p, '\0');
1302 len = q - p - 1;
1304 /* Resolve the name to a number. */
1305 for (op = 0, t = outputs; t ; t = TREE_CHAIN (t), op++)
1307 tree name = TREE_PURPOSE (TREE_PURPOSE (t));
1308 if (name)
1310 const char *c = TREE_STRING_POINTER (name);
1311 if (strncmp (c, p + 1, len) == 0 && c[len] == '\0')
1312 goto found;
1315 for (t = inputs; t ; t = TREE_CHAIN (t), op++)
1317 tree name = TREE_PURPOSE (TREE_PURPOSE (t));
1318 if (name)
1320 const char *c = TREE_STRING_POINTER (name);
1321 if (strncmp (c, p + 1, len) == 0 && c[len] == '\0')
1322 goto found;
1326 *q = '\0';
1327 error ("undefined named operand %qs", p + 1);
1328 op = 0;
1329 found:
1331 /* Replace the name with the number. Unfortunately, not all libraries
1332 get the return value of sprintf correct, so search for the end of the
1333 generated string by hand. */
1334 sprintf (p, "%d", op);
1335 p = strchr (p, '\0');
1337 /* Verify the no extra buffer space assumption. */
1338 gcc_assert (p <= q);
1340 /* Shift the rest of the buffer down to fill the gap. */
1341 memmove (p, q + 1, strlen (q + 1) + 1);
1343 return p;
1346 /* Generate RTL to evaluate the expression EXP. */
1348 void
1349 expand_expr_stmt (tree exp)
1351 rtx value;
1352 tree type;
1354 value = expand_expr (exp, const0_rtx, VOIDmode, 0);
1355 type = TREE_TYPE (exp);
1357 /* If all we do is reference a volatile value in memory,
1358 copy it to a register to be sure it is actually touched. */
1359 if (value && MEM_P (value) && TREE_THIS_VOLATILE (exp))
1361 if (TYPE_MODE (type) == VOIDmode)
1363 else if (TYPE_MODE (type) != BLKmode)
1364 value = copy_to_reg (value);
1365 else
1367 rtx lab = gen_label_rtx ();
1369 /* Compare the value with itself to reference it. */
1370 emit_cmp_and_jump_insns (value, value, EQ,
1371 expand_expr (TYPE_SIZE (type),
1372 NULL_RTX, VOIDmode, 0),
1373 BLKmode, 0, lab);
1374 emit_label (lab);
1378 /* Free any temporaries used to evaluate this expression. */
1379 free_temp_slots ();
1382 /* Warn if EXP contains any computations whose results are not used.
1383 Return 1 if a warning is printed; 0 otherwise. LOCUS is the
1384 (potential) location of the expression. */
1387 warn_if_unused_value (tree exp, location_t locus)
1389 restart:
1390 if (TREE_USED (exp))
1391 return 0;
1393 /* Don't warn about void constructs. This includes casting to void,
1394 void function calls, and statement expressions with a final cast
1395 to void. */
1396 if (VOID_TYPE_P (TREE_TYPE (exp)))
1397 return 0;
1399 if (EXPR_HAS_LOCATION (exp))
1400 locus = EXPR_LOCATION (exp);
1402 switch (TREE_CODE (exp))
1404 case PREINCREMENT_EXPR:
1405 case POSTINCREMENT_EXPR:
1406 case PREDECREMENT_EXPR:
1407 case POSTDECREMENT_EXPR:
1408 case MODIFY_EXPR:
1409 case INIT_EXPR:
1410 case TARGET_EXPR:
1411 case CALL_EXPR:
1412 case TRY_CATCH_EXPR:
1413 case WITH_CLEANUP_EXPR:
1414 case EXIT_EXPR:
1415 return 0;
1417 case BIND_EXPR:
1418 /* For a binding, warn if no side effect within it. */
1419 exp = BIND_EXPR_BODY (exp);
1420 goto restart;
1422 case SAVE_EXPR:
1423 exp = TREE_OPERAND (exp, 0);
1424 goto restart;
1426 case TRUTH_ORIF_EXPR:
1427 case TRUTH_ANDIF_EXPR:
1428 /* In && or ||, warn if 2nd operand has no side effect. */
1429 exp = TREE_OPERAND (exp, 1);
1430 goto restart;
1432 case COMPOUND_EXPR:
1433 if (TREE_NO_WARNING (exp))
1434 return 0;
1435 if (warn_if_unused_value (TREE_OPERAND (exp, 0), locus))
1436 return 1;
1437 /* Let people do `(foo (), 0)' without a warning. */
1438 if (TREE_CONSTANT (TREE_OPERAND (exp, 1)))
1439 return 0;
1440 exp = TREE_OPERAND (exp, 1);
1441 goto restart;
1443 case NOP_EXPR:
1444 case CONVERT_EXPR:
1445 case NON_LVALUE_EXPR:
1446 /* Don't warn about conversions not explicit in the user's program. */
1447 if (TREE_NO_WARNING (exp))
1448 return 0;
1449 /* Assignment to a cast usually results in a cast of a modify.
1450 Don't complain about that. There can be an arbitrary number of
1451 casts before the modify, so we must loop until we find the first
1452 non-cast expression and then test to see if that is a modify. */
1454 tree tem = TREE_OPERAND (exp, 0);
1456 while (TREE_CODE (tem) == CONVERT_EXPR || TREE_CODE (tem) == NOP_EXPR)
1457 tem = TREE_OPERAND (tem, 0);
1459 if (TREE_CODE (tem) == MODIFY_EXPR || TREE_CODE (tem) == INIT_EXPR
1460 || TREE_CODE (tem) == CALL_EXPR)
1461 return 0;
1463 goto maybe_warn;
1465 case INDIRECT_REF:
1466 /* Don't warn about automatic dereferencing of references, since
1467 the user cannot control it. */
1468 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == REFERENCE_TYPE)
1470 exp = TREE_OPERAND (exp, 0);
1471 goto restart;
1473 /* Fall through. */
1475 default:
1476 /* Referencing a volatile value is a side effect, so don't warn. */
1477 if ((DECL_P (exp) || REFERENCE_CLASS_P (exp))
1478 && TREE_THIS_VOLATILE (exp))
1479 return 0;
1481 /* If this is an expression which has no operands, there is no value
1482 to be unused. There are no such language-independent codes,
1483 but front ends may define such. */
1484 if (EXPRESSION_CLASS_P (exp) && TREE_CODE_LENGTH (TREE_CODE (exp)) == 0)
1485 return 0;
1487 maybe_warn:
1488 /* If this is an expression with side effects, don't warn. */
1489 if (TREE_SIDE_EFFECTS (exp))
1490 return 0;
1492 warning ("%Hvalue computed is not used", &locus);
1493 return 1;
1498 /* Generate RTL to return from the current function, with no value.
1499 (That is, we do not do anything about returning any value.) */
1501 void
1502 expand_null_return (void)
1504 /* If this function was declared to return a value, but we
1505 didn't, clobber the return registers so that they are not
1506 propagated live to the rest of the function. */
1507 clobber_return_register ();
1509 expand_null_return_1 ();
1512 /* Generate RTL to return directly from the current function.
1513 (That is, we bypass any return value.) */
1515 void
1516 expand_naked_return (void)
1518 rtx end_label;
1520 clear_pending_stack_adjust ();
1521 do_pending_stack_adjust ();
1523 end_label = naked_return_label;
1524 if (end_label == 0)
1525 end_label = naked_return_label = gen_label_rtx ();
1527 emit_jump (end_label);
1530 /* If the current function returns values in the most significant part
1531 of a register, shift return value VAL appropriately. The mode of
1532 the function's return type is known not to be BLKmode. */
1534 static rtx
1535 shift_return_value (rtx val)
1537 tree type;
1539 type = TREE_TYPE (DECL_RESULT (current_function_decl));
1540 if (targetm.calls.return_in_msb (type))
1542 rtx target;
1543 HOST_WIDE_INT shift;
1545 target = DECL_RTL (DECL_RESULT (current_function_decl));
1546 shift = (GET_MODE_BITSIZE (GET_MODE (target))
1547 - BITS_PER_UNIT * int_size_in_bytes (type));
1548 if (shift > 0)
1549 val = expand_shift (LSHIFT_EXPR, GET_MODE (target),
1550 gen_lowpart (GET_MODE (target), val),
1551 build_int_cst (NULL_TREE, shift), target, 1);
1553 return val;
1557 /* Generate RTL to return from the current function, with value VAL. */
1559 static void
1560 expand_value_return (rtx val)
1562 /* Copy the value to the return location
1563 unless it's already there. */
1565 rtx return_reg = DECL_RTL (DECL_RESULT (current_function_decl));
1566 if (return_reg != val)
1568 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
1569 if (targetm.calls.promote_function_return (TREE_TYPE (current_function_decl)))
1571 int unsignedp = TYPE_UNSIGNED (type);
1572 enum machine_mode old_mode
1573 = DECL_MODE (DECL_RESULT (current_function_decl));
1574 enum machine_mode mode
1575 = promote_mode (type, old_mode, &unsignedp, 1);
1577 if (mode != old_mode)
1578 val = convert_modes (mode, old_mode, val, unsignedp);
1580 if (GET_CODE (return_reg) == PARALLEL)
1581 emit_group_load (return_reg, val, type, int_size_in_bytes (type));
1582 else
1583 emit_move_insn (return_reg, val);
1586 expand_null_return_1 ();
1589 /* Output a return with no value. */
1591 static void
1592 expand_null_return_1 (void)
1594 rtx end_label;
1596 clear_pending_stack_adjust ();
1597 do_pending_stack_adjust ();
1599 end_label = return_label;
1600 if (end_label == 0)
1601 end_label = return_label = gen_label_rtx ();
1602 emit_jump (end_label);
1605 /* Generate RTL to evaluate the expression RETVAL and return it
1606 from the current function. */
1608 void
1609 expand_return (tree retval)
1611 rtx result_rtl;
1612 rtx val = 0;
1613 tree retval_rhs;
1615 /* If function wants no value, give it none. */
1616 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
1618 expand_expr (retval, NULL_RTX, VOIDmode, 0);
1619 expand_null_return ();
1620 return;
1623 if (retval == error_mark_node)
1625 /* Treat this like a return of no value from a function that
1626 returns a value. */
1627 expand_null_return ();
1628 return;
1630 else if ((TREE_CODE (retval) == MODIFY_EXPR
1631 || TREE_CODE (retval) == INIT_EXPR)
1632 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
1633 retval_rhs = TREE_OPERAND (retval, 1);
1634 else
1635 retval_rhs = retval;
1637 result_rtl = DECL_RTL (DECL_RESULT (current_function_decl));
1639 /* If we are returning the RESULT_DECL, then the value has already
1640 been stored into it, so we don't have to do anything special. */
1641 if (TREE_CODE (retval_rhs) == RESULT_DECL)
1642 expand_value_return (result_rtl);
1644 /* If the result is an aggregate that is being returned in one (or more)
1645 registers, load the registers here. The compiler currently can't handle
1646 copying a BLKmode value into registers. We could put this code in a
1647 more general area (for use by everyone instead of just function
1648 call/return), but until this feature is generally usable it is kept here
1649 (and in expand_call). */
1651 else if (retval_rhs != 0
1652 && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
1653 && REG_P (result_rtl))
1655 int i;
1656 unsigned HOST_WIDE_INT bitpos, xbitpos;
1657 unsigned HOST_WIDE_INT padding_correction = 0;
1658 unsigned HOST_WIDE_INT bytes
1659 = int_size_in_bytes (TREE_TYPE (retval_rhs));
1660 int n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1661 unsigned int bitsize
1662 = MIN (TYPE_ALIGN (TREE_TYPE (retval_rhs)), BITS_PER_WORD);
1663 rtx *result_pseudos = alloca (sizeof (rtx) * n_regs);
1664 rtx result_reg, src = NULL_RTX, dst = NULL_RTX;
1665 rtx result_val = expand_expr (retval_rhs, NULL_RTX, VOIDmode, 0);
1666 enum machine_mode tmpmode, result_reg_mode;
1668 if (bytes == 0)
1670 expand_null_return ();
1671 return;
1674 /* If the structure doesn't take up a whole number of words, see
1675 whether the register value should be padded on the left or on
1676 the right. Set PADDING_CORRECTION to the number of padding
1677 bits needed on the left side.
1679 In most ABIs, the structure will be returned at the least end of
1680 the register, which translates to right padding on little-endian
1681 targets and left padding on big-endian targets. The opposite
1682 holds if the structure is returned at the most significant
1683 end of the register. */
1684 if (bytes % UNITS_PER_WORD != 0
1685 && (targetm.calls.return_in_msb (TREE_TYPE (retval_rhs))
1686 ? !BYTES_BIG_ENDIAN
1687 : BYTES_BIG_ENDIAN))
1688 padding_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
1689 * BITS_PER_UNIT));
1691 /* Copy the structure BITSIZE bits at a time. */
1692 for (bitpos = 0, xbitpos = padding_correction;
1693 bitpos < bytes * BITS_PER_UNIT;
1694 bitpos += bitsize, xbitpos += bitsize)
1696 /* We need a new destination pseudo each time xbitpos is
1697 on a word boundary and when xbitpos == padding_correction
1698 (the first time through). */
1699 if (xbitpos % BITS_PER_WORD == 0
1700 || xbitpos == padding_correction)
1702 /* Generate an appropriate register. */
1703 dst = gen_reg_rtx (word_mode);
1704 result_pseudos[xbitpos / BITS_PER_WORD] = dst;
1706 /* Clear the destination before we move anything into it. */
1707 emit_move_insn (dst, CONST0_RTX (GET_MODE (dst)));
1710 /* We need a new source operand each time bitpos is on a word
1711 boundary. */
1712 if (bitpos % BITS_PER_WORD == 0)
1713 src = operand_subword_force (result_val,
1714 bitpos / BITS_PER_WORD,
1715 BLKmode);
1717 /* Use bitpos for the source extraction (left justified) and
1718 xbitpos for the destination store (right justified). */
1719 store_bit_field (dst, bitsize, xbitpos % BITS_PER_WORD, word_mode,
1720 extract_bit_field (src, bitsize,
1721 bitpos % BITS_PER_WORD, 1,
1722 NULL_RTX, word_mode, word_mode));
1725 tmpmode = GET_MODE (result_rtl);
1726 if (tmpmode == BLKmode)
1728 /* Find the smallest integer mode large enough to hold the
1729 entire structure and use that mode instead of BLKmode
1730 on the USE insn for the return register. */
1731 for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1732 tmpmode != VOIDmode;
1733 tmpmode = GET_MODE_WIDER_MODE (tmpmode))
1734 /* Have we found a large enough mode? */
1735 if (GET_MODE_SIZE (tmpmode) >= bytes)
1736 break;
1738 /* A suitable mode should have been found. */
1739 gcc_assert (tmpmode != VOIDmode);
1741 PUT_MODE (result_rtl, tmpmode);
1744 if (GET_MODE_SIZE (tmpmode) < GET_MODE_SIZE (word_mode))
1745 result_reg_mode = word_mode;
1746 else
1747 result_reg_mode = tmpmode;
1748 result_reg = gen_reg_rtx (result_reg_mode);
1750 for (i = 0; i < n_regs; i++)
1751 emit_move_insn (operand_subword (result_reg, i, 0, result_reg_mode),
1752 result_pseudos[i]);
1754 if (tmpmode != result_reg_mode)
1755 result_reg = gen_lowpart (tmpmode, result_reg);
1757 expand_value_return (result_reg);
1759 else if (retval_rhs != 0
1760 && !VOID_TYPE_P (TREE_TYPE (retval_rhs))
1761 && (REG_P (result_rtl)
1762 || (GET_CODE (result_rtl) == PARALLEL)))
1764 /* Calculate the return value into a temporary (usually a pseudo
1765 reg). */
1766 tree ot = TREE_TYPE (DECL_RESULT (current_function_decl));
1767 tree nt = build_qualified_type (ot, TYPE_QUALS (ot) | TYPE_QUAL_CONST);
1769 val = assign_temp (nt, 0, 0, 1);
1770 val = expand_expr (retval_rhs, val, GET_MODE (val), 0);
1771 val = force_not_mem (val);
1772 /* Return the calculated value. */
1773 expand_value_return (shift_return_value (val));
1775 else
1777 /* No hard reg used; calculate value into hard return reg. */
1778 expand_expr (retval, const0_rtx, VOIDmode, 0);
1779 expand_value_return (result_rtl);
1783 /* Given a pointer to a BLOCK node return nonzero if (and only if) the node
1784 in question represents the outermost pair of curly braces (i.e. the "body
1785 block") of a function or method.
1787 For any BLOCK node representing a "body block" of a function or method, the
1788 BLOCK_SUPERCONTEXT of the node will point to another BLOCK node which
1789 represents the outermost (function) scope for the function or method (i.e.
1790 the one which includes the formal parameters). The BLOCK_SUPERCONTEXT of
1791 *that* node in turn will point to the relevant FUNCTION_DECL node. */
1794 is_body_block (tree stmt)
1796 if (lang_hooks.no_body_blocks)
1797 return 0;
1799 if (TREE_CODE (stmt) == BLOCK)
1801 tree parent = BLOCK_SUPERCONTEXT (stmt);
1803 if (parent && TREE_CODE (parent) == BLOCK)
1805 tree grandparent = BLOCK_SUPERCONTEXT (parent);
1807 if (grandparent && TREE_CODE (grandparent) == FUNCTION_DECL)
1808 return 1;
1812 return 0;
1815 /* Emit code to restore vital registers at the beginning of a nonlocal goto
1816 handler. */
1817 static void
1818 expand_nl_goto_receiver (void)
1820 /* Clobber the FP when we get here, so we have to make sure it's
1821 marked as used by this function. */
1822 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
1824 /* Mark the static chain as clobbered here so life information
1825 doesn't get messed up for it. */
1826 emit_insn (gen_rtx_CLOBBER (VOIDmode, static_chain_rtx));
1828 #ifdef HAVE_nonlocal_goto
1829 if (! HAVE_nonlocal_goto)
1830 #endif
1831 /* First adjust our frame pointer to its actual value. It was
1832 previously set to the start of the virtual area corresponding to
1833 the stacked variables when we branched here and now needs to be
1834 adjusted to the actual hardware fp value.
1836 Assignments are to virtual registers are converted by
1837 instantiate_virtual_regs into the corresponding assignment
1838 to the underlying register (fp in this case) that makes
1839 the original assignment true.
1840 So the following insn will actually be
1841 decrementing fp by STARTING_FRAME_OFFSET. */
1842 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
1844 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
1845 if (fixed_regs[ARG_POINTER_REGNUM])
1847 #ifdef ELIMINABLE_REGS
1848 /* If the argument pointer can be eliminated in favor of the
1849 frame pointer, we don't need to restore it. We assume here
1850 that if such an elimination is present, it can always be used.
1851 This is the case on all known machines; if we don't make this
1852 assumption, we do unnecessary saving on many machines. */
1853 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
1854 size_t i;
1856 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
1857 if (elim_regs[i].from == ARG_POINTER_REGNUM
1858 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
1859 break;
1861 if (i == ARRAY_SIZE (elim_regs))
1862 #endif
1864 /* Now restore our arg pointer from the address at which it
1865 was saved in our stack frame. */
1866 emit_move_insn (virtual_incoming_args_rtx,
1867 copy_to_reg (get_arg_pointer_save_area (cfun)));
1870 #endif
1872 #ifdef HAVE_nonlocal_goto_receiver
1873 if (HAVE_nonlocal_goto_receiver)
1874 emit_insn (gen_nonlocal_goto_receiver ());
1875 #endif
1877 /* @@@ This is a kludge. Not all machine descriptions define a blockage
1878 insn, but we must not allow the code we just generated to be reordered
1879 by scheduling. Specifically, the update of the frame pointer must
1880 happen immediately, not later. So emit an ASM_INPUT to act as blockage
1881 insn. */
1882 emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
1885 /* Generate RTL for the automatic variable declaration DECL.
1886 (Other kinds of declarations are simply ignored if seen here.) */
1888 void
1889 expand_decl (tree decl)
1891 tree type;
1893 type = TREE_TYPE (decl);
1895 /* For a CONST_DECL, set mode, alignment, and sizes from those of the
1896 type in case this node is used in a reference. */
1897 if (TREE_CODE (decl) == CONST_DECL)
1899 DECL_MODE (decl) = TYPE_MODE (type);
1900 DECL_ALIGN (decl) = TYPE_ALIGN (type);
1901 DECL_SIZE (decl) = TYPE_SIZE (type);
1902 DECL_SIZE_UNIT (decl) = TYPE_SIZE_UNIT (type);
1903 return;
1906 /* Otherwise, only automatic variables need any expansion done. Static and
1907 external variables, and external functions, will be handled by
1908 `assemble_variable' (called from finish_decl). TYPE_DECL requires
1909 nothing. PARM_DECLs are handled in `assign_parms'. */
1910 if (TREE_CODE (decl) != VAR_DECL)
1911 return;
1913 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
1914 return;
1916 /* Create the RTL representation for the variable. */
1918 if (type == error_mark_node)
1919 SET_DECL_RTL (decl, gen_rtx_MEM (BLKmode, const0_rtx));
1921 else if (DECL_SIZE (decl) == 0)
1922 /* Variable with incomplete type. */
1924 rtx x;
1925 if (DECL_INITIAL (decl) == 0)
1926 /* Error message was already done; now avoid a crash. */
1927 x = gen_rtx_MEM (BLKmode, const0_rtx);
1928 else
1929 /* An initializer is going to decide the size of this array.
1930 Until we know the size, represent its address with a reg. */
1931 x = gen_rtx_MEM (BLKmode, gen_reg_rtx (Pmode));
1933 set_mem_attributes (x, decl, 1);
1934 SET_DECL_RTL (decl, x);
1936 else if (use_register_for_decl (decl))
1938 /* Automatic variable that can go in a register. */
1939 int unsignedp = TYPE_UNSIGNED (type);
1940 enum machine_mode reg_mode
1941 = promote_mode (type, DECL_MODE (decl), &unsignedp, 0);
1943 SET_DECL_RTL (decl, gen_reg_rtx (reg_mode));
1945 /* Note if the object is a user variable. */
1946 if (!DECL_ARTIFICIAL (decl))
1948 mark_user_reg (DECL_RTL (decl));
1950 /* Trust user variables which have a pointer type to really
1951 be pointers. Do not trust compiler generated temporaries
1952 as our type system is totally busted as it relates to
1953 pointer arithmetic which translates into lots of compiler
1954 generated objects with pointer types, but which are not really
1955 pointers. */
1956 if (POINTER_TYPE_P (type))
1957 mark_reg_pointer (DECL_RTL (decl),
1958 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (decl))));
1962 else if (TREE_CODE (DECL_SIZE_UNIT (decl)) == INTEGER_CST
1963 && ! (flag_stack_check && ! STACK_CHECK_BUILTIN
1964 && 0 < compare_tree_int (DECL_SIZE_UNIT (decl),
1965 STACK_CHECK_MAX_VAR_SIZE)))
1967 /* Variable of fixed size that goes on the stack. */
1968 rtx oldaddr = 0;
1969 rtx addr;
1970 rtx x;
1972 /* If we previously made RTL for this decl, it must be an array
1973 whose size was determined by the initializer.
1974 The old address was a register; set that register now
1975 to the proper address. */
1976 if (DECL_RTL_SET_P (decl))
1978 gcc_assert (MEM_P (DECL_RTL (decl)));
1979 gcc_assert (REG_P (XEXP (DECL_RTL (decl), 0)));
1980 oldaddr = XEXP (DECL_RTL (decl), 0);
1983 /* Set alignment we actually gave this decl. */
1984 DECL_ALIGN (decl) = (DECL_MODE (decl) == BLKmode ? BIGGEST_ALIGNMENT
1985 : GET_MODE_BITSIZE (DECL_MODE (decl)));
1986 DECL_USER_ALIGN (decl) = 0;
1988 x = assign_temp (decl, 1, 1, 1);
1989 set_mem_attributes (x, decl, 1);
1990 SET_DECL_RTL (decl, x);
1992 if (oldaddr)
1994 addr = force_operand (XEXP (DECL_RTL (decl), 0), oldaddr);
1995 if (addr != oldaddr)
1996 emit_move_insn (oldaddr, addr);
1999 else
2000 /* Dynamic-size object: must push space on the stack. */
2002 rtx address, size, x;
2004 /* Record the stack pointer on entry to block, if have
2005 not already done so. */
2006 do_pending_stack_adjust ();
2008 /* Compute the variable's size, in bytes. This will expand any
2009 needed SAVE_EXPRs for the first time. */
2010 size = expand_expr (DECL_SIZE_UNIT (decl), NULL_RTX, VOIDmode, 0);
2011 free_temp_slots ();
2013 /* Allocate space on the stack for the variable. Note that
2014 DECL_ALIGN says how the variable is to be aligned and we
2015 cannot use it to conclude anything about the alignment of
2016 the size. */
2017 address = allocate_dynamic_stack_space (size, NULL_RTX,
2018 TYPE_ALIGN (TREE_TYPE (decl)));
2020 /* Reference the variable indirect through that rtx. */
2021 x = gen_rtx_MEM (DECL_MODE (decl), address);
2022 set_mem_attributes (x, decl, 1);
2023 SET_DECL_RTL (decl, x);
2026 /* Indicate the alignment we actually gave this variable. */
2027 #ifdef STACK_BOUNDARY
2028 DECL_ALIGN (decl) = STACK_BOUNDARY;
2029 #else
2030 DECL_ALIGN (decl) = BIGGEST_ALIGNMENT;
2031 #endif
2032 DECL_USER_ALIGN (decl) = 0;
2036 /* Emit code to save the current value of stack. */
2038 expand_stack_save (void)
2040 rtx ret = NULL_RTX;
2042 do_pending_stack_adjust ();
2043 emit_stack_save (SAVE_BLOCK, &ret, NULL_RTX);
2044 return ret;
2047 /* Emit code to restore the current value of stack. */
2048 void
2049 expand_stack_restore (tree var)
2051 rtx sa = DECL_RTL (var);
2053 emit_stack_restore (SAVE_BLOCK, sa, NULL_RTX);
2056 /* DECL is an anonymous union. CLEANUP is a cleanup for DECL.
2057 DECL_ELTS is the list of elements that belong to DECL's type.
2058 In each, the TREE_VALUE is a VAR_DECL, and the TREE_PURPOSE a cleanup. */
2060 void
2061 expand_anon_union_decl (tree decl, tree cleanup ATTRIBUTE_UNUSED,
2062 tree decl_elts)
2064 rtx x;
2065 tree t;
2067 /* If any of the elements are addressable, so is the entire union. */
2068 for (t = decl_elts; t; t = TREE_CHAIN (t))
2069 if (TREE_ADDRESSABLE (TREE_VALUE (t)))
2071 TREE_ADDRESSABLE (decl) = 1;
2072 break;
2075 expand_decl (decl);
2076 x = DECL_RTL (decl);
2078 /* Go through the elements, assigning RTL to each. */
2079 for (t = decl_elts; t; t = TREE_CHAIN (t))
2081 tree decl_elt = TREE_VALUE (t);
2082 enum machine_mode mode = TYPE_MODE (TREE_TYPE (decl_elt));
2083 rtx decl_rtl;
2085 /* If any of the elements are addressable, so is the entire
2086 union. */
2087 if (TREE_USED (decl_elt))
2088 TREE_USED (decl) = 1;
2090 /* Propagate the union's alignment to the elements. */
2091 DECL_ALIGN (decl_elt) = DECL_ALIGN (decl);
2092 DECL_USER_ALIGN (decl_elt) = DECL_USER_ALIGN (decl);
2094 /* If the element has BLKmode and the union doesn't, the union is
2095 aligned such that the element doesn't need to have BLKmode, so
2096 change the element's mode to the appropriate one for its size. */
2097 if (mode == BLKmode && DECL_MODE (decl) != BLKmode)
2098 DECL_MODE (decl_elt) = mode
2099 = mode_for_size_tree (DECL_SIZE (decl_elt), MODE_INT, 1);
2101 if (mode == GET_MODE (x))
2102 decl_rtl = x;
2103 else if (MEM_P (x))
2104 /* (SUBREG (MEM ...)) at RTL generation time is invalid, so we
2105 instead create a new MEM rtx with the proper mode. */
2106 decl_rtl = adjust_address_nv (x, mode, 0);
2107 else
2109 gcc_assert (REG_P (x));
2110 decl_rtl = gen_lowpart_SUBREG (mode, x);
2112 SET_DECL_RTL (decl_elt, decl_rtl);
2116 /* Do the insertion of a case label into case_list. The labels are
2117 fed to us in descending order from the sorted vector of case labels used
2118 in the tree part of the middle end. So the list we construct is
2119 sorted in ascending order. The bounds on the case range, LOW and HIGH,
2120 are converted to case's index type TYPE. */
2122 static struct case_node *
2123 add_case_node (struct case_node *head, tree type, tree low, tree high,
2124 tree label)
2126 tree min_value, max_value;
2127 struct case_node *r;
2129 gcc_assert (TREE_CODE (low) == INTEGER_CST);
2130 gcc_assert (!high || TREE_CODE (high) == INTEGER_CST);
2132 min_value = TYPE_MIN_VALUE (type);
2133 max_value = TYPE_MAX_VALUE (type);
2135 /* If there's no HIGH value, then this is not a case range; it's
2136 just a simple case label. But that's just a degenerate case
2137 range.
2138 If the bounds are equal, turn this into the one-value case. */
2139 if (!high || tree_int_cst_equal (low, high))
2141 /* If the simple case value is unreachable, ignore it. */
2142 if (tree_int_cst_compare (low, min_value) < 0
2143 || tree_int_cst_compare (low, max_value) > 0)
2144 return head;
2145 low = fold_convert (type, low);
2146 high = low;
2148 else
2150 /* If the entire case range is unreachable, ignore it. */
2151 if (tree_int_cst_compare (high, min_value) < 0
2152 || tree_int_cst_compare (low, max_value) > 0)
2153 return head;
2155 /* If the lower bound is less than the index type's minimum
2156 value, truncate the range bounds. */
2157 if (tree_int_cst_compare (low, min_value) < 0)
2158 low = min_value;
2159 low = fold_convert (type, low);
2161 /* If the upper bound is greater than the index type's maximum
2162 value, truncate the range bounds. */
2163 if (tree_int_cst_compare (high, max_value) > 0)
2164 high = max_value;
2165 high = fold_convert (type, high);
2169 /* Add this label to the chain. */
2170 r = ggc_alloc (sizeof (struct case_node));
2171 r->low = low;
2172 r->high = high;
2173 r->code_label = label;
2174 r->parent = r->left = NULL;
2175 r->right = head;
2176 return r;
2179 /* Maximum number of case bit tests. */
2180 #define MAX_CASE_BIT_TESTS 3
2182 /* By default, enable case bit tests on targets with ashlsi3. */
2183 #ifndef CASE_USE_BIT_TESTS
2184 #define CASE_USE_BIT_TESTS (ashl_optab->handlers[word_mode].insn_code \
2185 != CODE_FOR_nothing)
2186 #endif
2189 /* A case_bit_test represents a set of case nodes that may be
2190 selected from using a bit-wise comparison. HI and LO hold
2191 the integer to be tested against, LABEL contains the label
2192 to jump to upon success and BITS counts the number of case
2193 nodes handled by this test, typically the number of bits
2194 set in HI:LO. */
2196 struct case_bit_test
2198 HOST_WIDE_INT hi;
2199 HOST_WIDE_INT lo;
2200 rtx label;
2201 int bits;
2204 /* Determine whether "1 << x" is relatively cheap in word_mode. */
2206 static
2207 bool lshift_cheap_p (void)
2209 static bool init = false;
2210 static bool cheap = true;
2212 if (!init)
2214 rtx reg = gen_rtx_REG (word_mode, 10000);
2215 int cost = rtx_cost (gen_rtx_ASHIFT (word_mode, const1_rtx, reg), SET);
2216 cheap = cost < COSTS_N_INSNS (3);
2217 init = true;
2220 return cheap;
2223 /* Comparison function for qsort to order bit tests by decreasing
2224 number of case nodes, i.e. the node with the most cases gets
2225 tested first. */
2227 static int
2228 case_bit_test_cmp (const void *p1, const void *p2)
2230 const struct case_bit_test *d1 = p1;
2231 const struct case_bit_test *d2 = p2;
2233 return d2->bits - d1->bits;
2236 /* Expand a switch statement by a short sequence of bit-wise
2237 comparisons. "switch(x)" is effectively converted into
2238 "if ((1 << (x-MINVAL)) & CST)" where CST and MINVAL are
2239 integer constants.
2241 INDEX_EXPR is the value being switched on, which is of
2242 type INDEX_TYPE. MINVAL is the lowest case value of in
2243 the case nodes, of INDEX_TYPE type, and RANGE is highest
2244 value minus MINVAL, also of type INDEX_TYPE. NODES is
2245 the set of case nodes, and DEFAULT_LABEL is the label to
2246 branch to should none of the cases match.
2248 There *MUST* be MAX_CASE_BIT_TESTS or less unique case
2249 node targets. */
2251 static void
2252 emit_case_bit_tests (tree index_type, tree index_expr, tree minval,
2253 tree range, case_node_ptr nodes, rtx default_label)
2255 struct case_bit_test test[MAX_CASE_BIT_TESTS];
2256 enum machine_mode mode;
2257 rtx expr, index, label;
2258 unsigned int i,j,lo,hi;
2259 struct case_node *n;
2260 unsigned int count;
2262 count = 0;
2263 for (n = nodes; n; n = n->right)
2265 label = label_rtx (n->code_label);
2266 for (i = 0; i < count; i++)
2267 if (label == test[i].label)
2268 break;
2270 if (i == count)
2272 gcc_assert (count < MAX_CASE_BIT_TESTS);
2273 test[i].hi = 0;
2274 test[i].lo = 0;
2275 test[i].label = label;
2276 test[i].bits = 1;
2277 count++;
2279 else
2280 test[i].bits++;
2282 lo = tree_low_cst (fold (build2 (MINUS_EXPR, index_type,
2283 n->low, minval)), 1);
2284 hi = tree_low_cst (fold (build2 (MINUS_EXPR, index_type,
2285 n->high, minval)), 1);
2286 for (j = lo; j <= hi; j++)
2287 if (j >= HOST_BITS_PER_WIDE_INT)
2288 test[i].hi |= (HOST_WIDE_INT) 1 << (j - HOST_BITS_PER_INT);
2289 else
2290 test[i].lo |= (HOST_WIDE_INT) 1 << j;
2293 qsort (test, count, sizeof(*test), case_bit_test_cmp);
2295 index_expr = fold (build2 (MINUS_EXPR, index_type,
2296 convert (index_type, index_expr),
2297 convert (index_type, minval)));
2298 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
2299 do_pending_stack_adjust ();
2301 mode = TYPE_MODE (index_type);
2302 expr = expand_expr (range, NULL_RTX, VOIDmode, 0);
2303 emit_cmp_and_jump_insns (index, expr, GTU, NULL_RTX, mode, 1,
2304 default_label);
2306 index = convert_to_mode (word_mode, index, 0);
2307 index = expand_binop (word_mode, ashl_optab, const1_rtx,
2308 index, NULL_RTX, 1, OPTAB_WIDEN);
2310 for (i = 0; i < count; i++)
2312 expr = immed_double_const (test[i].lo, test[i].hi, word_mode);
2313 expr = expand_binop (word_mode, and_optab, index, expr,
2314 NULL_RTX, 1, OPTAB_WIDEN);
2315 emit_cmp_and_jump_insns (expr, const0_rtx, NE, NULL_RTX,
2316 word_mode, 1, test[i].label);
2319 emit_jump (default_label);
2322 #ifndef HAVE_casesi
2323 #define HAVE_casesi 0
2324 #endif
2326 #ifndef HAVE_tablejump
2327 #define HAVE_tablejump 0
2328 #endif
2330 /* Terminate a case (Pascal) or switch (C) statement
2331 in which ORIG_INDEX is the expression to be tested.
2332 If ORIG_TYPE is not NULL, it is the original ORIG_INDEX
2333 type as given in the source before any compiler conversions.
2334 Generate the code to test it and jump to the right place. */
2336 void
2337 expand_case (tree exp)
2339 tree minval = NULL_TREE, maxval = NULL_TREE, range = NULL_TREE;
2340 rtx default_label = 0;
2341 struct case_node *n, *m;
2342 unsigned int count, uniq;
2343 rtx index;
2344 rtx table_label;
2345 int ncases;
2346 rtx *labelvec;
2347 int i, fail;
2348 rtx before_case, end, lab;
2350 tree vec = SWITCH_LABELS (exp);
2351 tree orig_type = TREE_TYPE (exp);
2352 tree index_expr = SWITCH_COND (exp);
2353 tree index_type = TREE_TYPE (index_expr);
2354 int unsignedp = TYPE_UNSIGNED (index_type);
2356 /* The insn after which the case dispatch should finally
2357 be emitted. Zero for a dummy. */
2358 rtx start;
2360 /* A list of case labels; it is first built as a list and it may then
2361 be rearranged into a nearly balanced binary tree. */
2362 struct case_node *case_list = 0;
2364 /* Label to jump to if no case matches. */
2365 tree default_label_decl = 0;
2367 /* The switch body is lowered in gimplify.c, we should never have
2368 switches with a non-NULL SWITCH_BODY here. */
2369 gcc_assert (!SWITCH_BODY (exp));
2370 gcc_assert (SWITCH_LABELS (exp));
2372 do_pending_stack_adjust ();
2374 /* An ERROR_MARK occurs for various reasons including invalid data type. */
2375 if (index_type != error_mark_node)
2377 for (i = TREE_VEC_LENGTH (vec); --i >= 0; )
2379 tree elt = TREE_VEC_ELT (vec, i);
2381 /* Handle default labels specially. */
2382 if (!CASE_HIGH (elt) && !CASE_LOW (elt))
2384 gcc_assert (!default_label_decl);
2385 default_label_decl = CASE_LABEL (elt);
2387 else
2388 case_list = add_case_node (case_list, index_type,
2389 CASE_LOW (elt), CASE_HIGH (elt),
2390 CASE_LABEL (elt));
2394 /* Make sure start points to something that won't need any
2395 transformation before the end of this function. */
2396 start = get_last_insn ();
2397 if (! NOTE_P (start))
2399 emit_note (NOTE_INSN_DELETED);
2400 start = get_last_insn ();
2403 /* If we don't have a default-label, create one here,
2404 after the body of the switch. */
2405 if (default_label_decl == 0)
2407 default_label_decl
2408 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
2409 expand_label (default_label_decl);
2411 default_label = label_rtx (default_label_decl);
2413 before_case = get_last_insn ();
2415 /* Get upper and lower bounds of case values.
2416 Also convert all the case values to the index expr's data type. */
2418 uniq = 0;
2419 count = 0;
2420 for (n = case_list; n; n = n->right)
2422 /* Count the elements and track the largest and smallest
2423 of them (treating them as signed even if they are not). */
2424 if (count++ == 0)
2426 minval = n->low;
2427 maxval = n->high;
2429 else
2431 if (INT_CST_LT (n->low, minval))
2432 minval = n->low;
2433 if (INT_CST_LT (maxval, n->high))
2434 maxval = n->high;
2436 /* A range counts double, since it requires two compares. */
2437 if (! tree_int_cst_equal (n->low, n->high))
2438 count++;
2440 /* Count the number of unique case node targets. */
2441 uniq++;
2442 lab = label_rtx (n->code_label);
2443 for (m = case_list; m != n; m = m->right)
2444 if (label_rtx (m->code_label) == lab)
2446 uniq--;
2447 break;
2451 /* Compute span of values. */
2452 if (count != 0)
2453 range = fold (build2 (MINUS_EXPR, index_type, maxval, minval));
2455 if (count == 0)
2457 expand_expr (index_expr, const0_rtx, VOIDmode, 0);
2458 emit_jump (default_label);
2461 /* Try implementing this switch statement by a short sequence of
2462 bit-wise comparisons. However, we let the binary-tree case
2463 below handle constant index expressions. */
2464 else if (CASE_USE_BIT_TESTS
2465 && ! TREE_CONSTANT (index_expr)
2466 && compare_tree_int (range, GET_MODE_BITSIZE (word_mode)) < 0
2467 && compare_tree_int (range, 0) > 0
2468 && lshift_cheap_p ()
2469 && ((uniq == 1 && count >= 3)
2470 || (uniq == 2 && count >= 5)
2471 || (uniq == 3 && count >= 6)))
2473 /* Optimize the case where all the case values fit in a
2474 word without having to subtract MINVAL. In this case,
2475 we can optimize away the subtraction. */
2476 if (compare_tree_int (minval, 0) > 0
2477 && compare_tree_int (maxval, GET_MODE_BITSIZE (word_mode)) < 0)
2479 minval = integer_zero_node;
2480 range = maxval;
2482 emit_case_bit_tests (index_type, index_expr, minval, range,
2483 case_list, default_label);
2486 /* If range of values is much bigger than number of values,
2487 make a sequence of conditional branches instead of a dispatch.
2488 If the switch-index is a constant, do it this way
2489 because we can optimize it. */
2491 else if (count < case_values_threshold ()
2492 || compare_tree_int (range,
2493 (optimize_size ? 3 : 10) * count) > 0
2494 /* RANGE may be signed, and really large ranges will show up
2495 as negative numbers. */
2496 || compare_tree_int (range, 0) < 0
2497 #ifndef ASM_OUTPUT_ADDR_DIFF_ELT
2498 || flag_pic
2499 #endif
2500 || TREE_CONSTANT (index_expr)
2501 /* If neither casesi or tablejump is available, we can
2502 only go this way. */
2503 || (!HAVE_casesi && !HAVE_tablejump))
2505 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
2507 /* If the index is a short or char that we do not have
2508 an insn to handle comparisons directly, convert it to
2509 a full integer now, rather than letting each comparison
2510 generate the conversion. */
2512 if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT
2513 && ! have_insn_for (COMPARE, GET_MODE (index)))
2515 enum machine_mode wider_mode;
2516 for (wider_mode = GET_MODE (index); wider_mode != VOIDmode;
2517 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
2518 if (have_insn_for (COMPARE, wider_mode))
2520 index = convert_to_mode (wider_mode, index, unsignedp);
2521 break;
2525 do_pending_stack_adjust ();
2527 if (MEM_P (index))
2528 index = copy_to_reg (index);
2529 if (GET_CODE (index) == CONST_INT
2530 || TREE_CODE (index_expr) == INTEGER_CST)
2532 /* Make a tree node with the proper constant value
2533 if we don't already have one. */
2534 if (TREE_CODE (index_expr) != INTEGER_CST)
2536 index_expr
2537 = build_int_cst_wide (NULL_TREE, INTVAL (index),
2538 unsignedp || INTVAL (index) >= 0
2539 ? 0 : -1);
2540 index_expr = convert (index_type, index_expr);
2543 /* For constant index expressions we need only
2544 issue an unconditional branch to the appropriate
2545 target code. The job of removing any unreachable
2546 code is left to the optimization phase if the
2547 "-O" option is specified. */
2548 for (n = case_list; n; n = n->right)
2549 if (! tree_int_cst_lt (index_expr, n->low)
2550 && ! tree_int_cst_lt (n->high, index_expr))
2551 break;
2553 if (n)
2554 emit_jump (label_rtx (n->code_label));
2555 else
2556 emit_jump (default_label);
2558 else
2560 /* If the index expression is not constant we generate
2561 a binary decision tree to select the appropriate
2562 target code. This is done as follows:
2564 The list of cases is rearranged into a binary tree,
2565 nearly optimal assuming equal probability for each case.
2567 The tree is transformed into RTL, eliminating
2568 redundant test conditions at the same time.
2570 If program flow could reach the end of the
2571 decision tree an unconditional jump to the
2572 default code is emitted. */
2574 use_cost_table
2575 = (TREE_CODE (orig_type) != ENUMERAL_TYPE
2576 && estimate_case_costs (case_list));
2577 balance_case_nodes (&case_list, NULL);
2578 emit_case_nodes (index, case_list, default_label, index_type);
2579 emit_jump (default_label);
2582 else
2584 table_label = gen_label_rtx ();
2585 if (! try_casesi (index_type, index_expr, minval, range,
2586 table_label, default_label))
2588 bool ok;
2589 index_type = integer_type_node;
2591 /* Index jumptables from zero for suitable values of
2592 minval to avoid a subtraction. */
2593 if (! optimize_size
2594 && compare_tree_int (minval, 0) > 0
2595 && compare_tree_int (minval, 3) < 0)
2597 minval = integer_zero_node;
2598 range = maxval;
2601 ok = try_tablejump (index_type, index_expr, minval, range,
2602 table_label, default_label);
2603 gcc_assert (ok);
2606 /* Get table of labels to jump to, in order of case index. */
2608 ncases = tree_low_cst (range, 0) + 1;
2609 labelvec = alloca (ncases * sizeof (rtx));
2610 memset (labelvec, 0, ncases * sizeof (rtx));
2612 for (n = case_list; n; n = n->right)
2614 /* Compute the low and high bounds relative to the minimum
2615 value since that should fit in a HOST_WIDE_INT while the
2616 actual values may not. */
2617 HOST_WIDE_INT i_low
2618 = tree_low_cst (fold (build2 (MINUS_EXPR, index_type,
2619 n->low, minval)), 1);
2620 HOST_WIDE_INT i_high
2621 = tree_low_cst (fold (build2 (MINUS_EXPR, index_type,
2622 n->high, minval)), 1);
2623 HOST_WIDE_INT i;
2625 for (i = i_low; i <= i_high; i ++)
2626 labelvec[i]
2627 = gen_rtx_LABEL_REF (Pmode, label_rtx (n->code_label));
2630 /* Fill in the gaps with the default. */
2631 for (i = 0; i < ncases; i++)
2632 if (labelvec[i] == 0)
2633 labelvec[i] = gen_rtx_LABEL_REF (Pmode, default_label);
2635 /* Output the table. */
2636 emit_label (table_label);
2638 if (CASE_VECTOR_PC_RELATIVE || flag_pic)
2639 emit_jump_insn (gen_rtx_ADDR_DIFF_VEC (CASE_VECTOR_MODE,
2640 gen_rtx_LABEL_REF (Pmode, table_label),
2641 gen_rtvec_v (ncases, labelvec),
2642 const0_rtx, const0_rtx));
2643 else
2644 emit_jump_insn (gen_rtx_ADDR_VEC (CASE_VECTOR_MODE,
2645 gen_rtvec_v (ncases, labelvec)));
2647 /* If the case insn drops through the table,
2648 after the table we must jump to the default-label.
2649 Otherwise record no drop-through after the table. */
2650 #ifdef CASE_DROPS_THROUGH
2651 emit_jump (default_label);
2652 #else
2653 emit_barrier ();
2654 #endif
2657 before_case = NEXT_INSN (before_case);
2658 end = get_last_insn ();
2659 fail = squeeze_notes (&before_case, &end);
2660 gcc_assert (!fail);
2661 reorder_insns (before_case, end, start);
2664 free_temp_slots ();
2667 /* Generate code to jump to LABEL if OP1 and OP2 are equal. */
2669 static void
2670 do_jump_if_equal (rtx op1, rtx op2, rtx label, int unsignedp)
2672 if (GET_CODE (op1) == CONST_INT && GET_CODE (op2) == CONST_INT)
2674 if (op1 == op2)
2675 emit_jump (label);
2677 else
2678 emit_cmp_and_jump_insns (op1, op2, EQ, NULL_RTX,
2679 (GET_MODE (op1) == VOIDmode
2680 ? GET_MODE (op2) : GET_MODE (op1)),
2681 unsignedp, label);
2684 /* Not all case values are encountered equally. This function
2685 uses a heuristic to weight case labels, in cases where that
2686 looks like a reasonable thing to do.
2688 Right now, all we try to guess is text, and we establish the
2689 following weights:
2691 chars above space: 16
2692 digits: 16
2693 default: 12
2694 space, punct: 8
2695 tab: 4
2696 newline: 2
2697 other "\" chars: 1
2698 remaining chars: 0
2700 If we find any cases in the switch that are not either -1 or in the range
2701 of valid ASCII characters, or are control characters other than those
2702 commonly used with "\", don't treat this switch scanning text.
2704 Return 1 if these nodes are suitable for cost estimation, otherwise
2705 return 0. */
2707 static int
2708 estimate_case_costs (case_node_ptr node)
2710 tree min_ascii = integer_minus_one_node;
2711 tree max_ascii = convert (TREE_TYPE (node->high),
2712 build_int_cst (NULL_TREE, 127));
2713 case_node_ptr n;
2714 int i;
2716 /* If we haven't already made the cost table, make it now. Note that the
2717 lower bound of the table is -1, not zero. */
2719 if (! cost_table_initialized)
2721 cost_table_initialized = 1;
2723 for (i = 0; i < 128; i++)
2725 if (ISALNUM (i))
2726 COST_TABLE (i) = 16;
2727 else if (ISPUNCT (i))
2728 COST_TABLE (i) = 8;
2729 else if (ISCNTRL (i))
2730 COST_TABLE (i) = -1;
2733 COST_TABLE (' ') = 8;
2734 COST_TABLE ('\t') = 4;
2735 COST_TABLE ('\0') = 4;
2736 COST_TABLE ('\n') = 2;
2737 COST_TABLE ('\f') = 1;
2738 COST_TABLE ('\v') = 1;
2739 COST_TABLE ('\b') = 1;
2742 /* See if all the case expressions look like text. It is text if the
2743 constant is >= -1 and the highest constant is <= 127. Do all comparisons
2744 as signed arithmetic since we don't want to ever access cost_table with a
2745 value less than -1. Also check that none of the constants in a range
2746 are strange control characters. */
2748 for (n = node; n; n = n->right)
2750 if ((INT_CST_LT (n->low, min_ascii)) || INT_CST_LT (max_ascii, n->high))
2751 return 0;
2753 for (i = (HOST_WIDE_INT) TREE_INT_CST_LOW (n->low);
2754 i <= (HOST_WIDE_INT) TREE_INT_CST_LOW (n->high); i++)
2755 if (COST_TABLE (i) < 0)
2756 return 0;
2759 /* All interesting values are within the range of interesting
2760 ASCII characters. */
2761 return 1;
2764 /* Take an ordered list of case nodes
2765 and transform them into a near optimal binary tree,
2766 on the assumption that any target code selection value is as
2767 likely as any other.
2769 The transformation is performed by splitting the ordered
2770 list into two equal sections plus a pivot. The parts are
2771 then attached to the pivot as left and right branches. Each
2772 branch is then transformed recursively. */
2774 static void
2775 balance_case_nodes (case_node_ptr *head, case_node_ptr parent)
2777 case_node_ptr np;
2779 np = *head;
2780 if (np)
2782 int cost = 0;
2783 int i = 0;
2784 int ranges = 0;
2785 case_node_ptr *npp;
2786 case_node_ptr left;
2788 /* Count the number of entries on branch. Also count the ranges. */
2790 while (np)
2792 if (!tree_int_cst_equal (np->low, np->high))
2794 ranges++;
2795 if (use_cost_table)
2796 cost += COST_TABLE (TREE_INT_CST_LOW (np->high));
2799 if (use_cost_table)
2800 cost += COST_TABLE (TREE_INT_CST_LOW (np->low));
2802 i++;
2803 np = np->right;
2806 if (i > 2)
2808 /* Split this list if it is long enough for that to help. */
2809 npp = head;
2810 left = *npp;
2811 if (use_cost_table)
2813 /* Find the place in the list that bisects the list's total cost,
2814 Here I gets half the total cost. */
2815 int n_moved = 0;
2816 i = (cost + 1) / 2;
2817 while (1)
2819 /* Skip nodes while their cost does not reach that amount. */
2820 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
2821 i -= COST_TABLE (TREE_INT_CST_LOW ((*npp)->high));
2822 i -= COST_TABLE (TREE_INT_CST_LOW ((*npp)->low));
2823 if (i <= 0)
2824 break;
2825 npp = &(*npp)->right;
2826 n_moved += 1;
2828 if (n_moved == 0)
2830 /* Leave this branch lopsided, but optimize left-hand
2831 side and fill in `parent' fields for right-hand side. */
2832 np = *head;
2833 np->parent = parent;
2834 balance_case_nodes (&np->left, np);
2835 for (; np->right; np = np->right)
2836 np->right->parent = np;
2837 return;
2840 /* If there are just three nodes, split at the middle one. */
2841 else if (i == 3)
2842 npp = &(*npp)->right;
2843 else
2845 /* Find the place in the list that bisects the list's total cost,
2846 where ranges count as 2.
2847 Here I gets half the total cost. */
2848 i = (i + ranges + 1) / 2;
2849 while (1)
2851 /* Skip nodes while their cost does not reach that amount. */
2852 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
2853 i--;
2854 i--;
2855 if (i <= 0)
2856 break;
2857 npp = &(*npp)->right;
2860 *head = np = *npp;
2861 *npp = 0;
2862 np->parent = parent;
2863 np->left = left;
2865 /* Optimize each of the two split parts. */
2866 balance_case_nodes (&np->left, np);
2867 balance_case_nodes (&np->right, np);
2869 else
2871 /* Else leave this branch as one level,
2872 but fill in `parent' fields. */
2873 np = *head;
2874 np->parent = parent;
2875 for (; np->right; np = np->right)
2876 np->right->parent = np;
2881 /* Search the parent sections of the case node tree
2882 to see if a test for the lower bound of NODE would be redundant.
2883 INDEX_TYPE is the type of the index expression.
2885 The instructions to generate the case decision tree are
2886 output in the same order as nodes are processed so it is
2887 known that if a parent node checks the range of the current
2888 node minus one that the current node is bounded at its lower
2889 span. Thus the test would be redundant. */
2891 static int
2892 node_has_low_bound (case_node_ptr node, tree index_type)
2894 tree low_minus_one;
2895 case_node_ptr pnode;
2897 /* If the lower bound of this node is the lowest value in the index type,
2898 we need not test it. */
2900 if (tree_int_cst_equal (node->low, TYPE_MIN_VALUE (index_type)))
2901 return 1;
2903 /* If this node has a left branch, the value at the left must be less
2904 than that at this node, so it cannot be bounded at the bottom and
2905 we need not bother testing any further. */
2907 if (node->left)
2908 return 0;
2910 low_minus_one = fold (build2 (MINUS_EXPR, TREE_TYPE (node->low),
2911 node->low, integer_one_node));
2913 /* If the subtraction above overflowed, we can't verify anything.
2914 Otherwise, look for a parent that tests our value - 1. */
2916 if (! tree_int_cst_lt (low_minus_one, node->low))
2917 return 0;
2919 for (pnode = node->parent; pnode; pnode = pnode->parent)
2920 if (tree_int_cst_equal (low_minus_one, pnode->high))
2921 return 1;
2923 return 0;
2926 /* Search the parent sections of the case node tree
2927 to see if a test for the upper bound of NODE would be redundant.
2928 INDEX_TYPE is the type of the index expression.
2930 The instructions to generate the case decision tree are
2931 output in the same order as nodes are processed so it is
2932 known that if a parent node checks the range of the current
2933 node plus one that the current node is bounded at its upper
2934 span. Thus the test would be redundant. */
2936 static int
2937 node_has_high_bound (case_node_ptr node, tree index_type)
2939 tree high_plus_one;
2940 case_node_ptr pnode;
2942 /* If there is no upper bound, obviously no test is needed. */
2944 if (TYPE_MAX_VALUE (index_type) == NULL)
2945 return 1;
2947 /* If the upper bound of this node is the highest value in the type
2948 of the index expression, we need not test against it. */
2950 if (tree_int_cst_equal (node->high, TYPE_MAX_VALUE (index_type)))
2951 return 1;
2953 /* If this node has a right branch, the value at the right must be greater
2954 than that at this node, so it cannot be bounded at the top and
2955 we need not bother testing any further. */
2957 if (node->right)
2958 return 0;
2960 high_plus_one = fold (build2 (PLUS_EXPR, TREE_TYPE (node->high),
2961 node->high, integer_one_node));
2963 /* If the addition above overflowed, we can't verify anything.
2964 Otherwise, look for a parent that tests our value + 1. */
2966 if (! tree_int_cst_lt (node->high, high_plus_one))
2967 return 0;
2969 for (pnode = node->parent; pnode; pnode = pnode->parent)
2970 if (tree_int_cst_equal (high_plus_one, pnode->low))
2971 return 1;
2973 return 0;
2976 /* Search the parent sections of the
2977 case node tree to see if both tests for the upper and lower
2978 bounds of NODE would be redundant. */
2980 static int
2981 node_is_bounded (case_node_ptr node, tree index_type)
2983 return (node_has_low_bound (node, index_type)
2984 && node_has_high_bound (node, index_type));
2987 /* Emit step-by-step code to select a case for the value of INDEX.
2988 The thus generated decision tree follows the form of the
2989 case-node binary tree NODE, whose nodes represent test conditions.
2990 INDEX_TYPE is the type of the index of the switch.
2992 Care is taken to prune redundant tests from the decision tree
2993 by detecting any boundary conditions already checked by
2994 emitted rtx. (See node_has_high_bound, node_has_low_bound
2995 and node_is_bounded, above.)
2997 Where the test conditions can be shown to be redundant we emit
2998 an unconditional jump to the target code. As a further
2999 optimization, the subordinates of a tree node are examined to
3000 check for bounded nodes. In this case conditional and/or
3001 unconditional jumps as a result of the boundary check for the
3002 current node are arranged to target the subordinates associated
3003 code for out of bound conditions on the current node.
3005 We can assume that when control reaches the code generated here,
3006 the index value has already been compared with the parents
3007 of this node, and determined to be on the same side of each parent
3008 as this node is. Thus, if this node tests for the value 51,
3009 and a parent tested for 52, we don't need to consider
3010 the possibility of a value greater than 51. If another parent
3011 tests for the value 50, then this node need not test anything. */
3013 static void
3014 emit_case_nodes (rtx index, case_node_ptr node, rtx default_label,
3015 tree index_type)
3017 /* If INDEX has an unsigned type, we must make unsigned branches. */
3018 int unsignedp = TYPE_UNSIGNED (index_type);
3019 enum machine_mode mode = GET_MODE (index);
3020 enum machine_mode imode = TYPE_MODE (index_type);
3022 /* See if our parents have already tested everything for us.
3023 If they have, emit an unconditional jump for this node. */
3024 if (node_is_bounded (node, index_type))
3025 emit_jump (label_rtx (node->code_label));
3027 else if (tree_int_cst_equal (node->low, node->high))
3029 /* Node is single valued. First see if the index expression matches
3030 this node and then check our children, if any. */
3032 do_jump_if_equal (index,
3033 convert_modes (mode, imode,
3034 expand_expr (node->low, NULL_RTX,
3035 VOIDmode, 0),
3036 unsignedp),
3037 label_rtx (node->code_label), unsignedp);
3039 if (node->right != 0 && node->left != 0)
3041 /* This node has children on both sides.
3042 Dispatch to one side or the other
3043 by comparing the index value with this node's value.
3044 If one subtree is bounded, check that one first,
3045 so we can avoid real branches in the tree. */
3047 if (node_is_bounded (node->right, index_type))
3049 emit_cmp_and_jump_insns (index,
3050 convert_modes
3051 (mode, imode,
3052 expand_expr (node->high, NULL_RTX,
3053 VOIDmode, 0),
3054 unsignedp),
3055 GT, NULL_RTX, mode, unsignedp,
3056 label_rtx (node->right->code_label));
3057 emit_case_nodes (index, node->left, default_label, index_type);
3060 else if (node_is_bounded (node->left, index_type))
3062 emit_cmp_and_jump_insns (index,
3063 convert_modes
3064 (mode, imode,
3065 expand_expr (node->high, NULL_RTX,
3066 VOIDmode, 0),
3067 unsignedp),
3068 LT, NULL_RTX, mode, unsignedp,
3069 label_rtx (node->left->code_label));
3070 emit_case_nodes (index, node->right, default_label, index_type);
3073 /* If both children are single-valued cases with no
3074 children, finish up all the work. This way, we can save
3075 one ordered comparison. */
3076 else if (tree_int_cst_equal (node->right->low, node->right->high)
3077 && node->right->left == 0
3078 && node->right->right == 0
3079 && tree_int_cst_equal (node->left->low, node->left->high)
3080 && node->left->left == 0
3081 && node->left->right == 0)
3083 /* Neither node is bounded. First distinguish the two sides;
3084 then emit the code for one side at a time. */
3086 /* See if the value matches what the right hand side
3087 wants. */
3088 do_jump_if_equal (index,
3089 convert_modes (mode, imode,
3090 expand_expr (node->right->low,
3091 NULL_RTX,
3092 VOIDmode, 0),
3093 unsignedp),
3094 label_rtx (node->right->code_label),
3095 unsignedp);
3097 /* See if the value matches what the left hand side
3098 wants. */
3099 do_jump_if_equal (index,
3100 convert_modes (mode, imode,
3101 expand_expr (node->left->low,
3102 NULL_RTX,
3103 VOIDmode, 0),
3104 unsignedp),
3105 label_rtx (node->left->code_label),
3106 unsignedp);
3109 else
3111 /* Neither node is bounded. First distinguish the two sides;
3112 then emit the code for one side at a time. */
3114 tree test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
3116 /* See if the value is on the right. */
3117 emit_cmp_and_jump_insns (index,
3118 convert_modes
3119 (mode, imode,
3120 expand_expr (node->high, NULL_RTX,
3121 VOIDmode, 0),
3122 unsignedp),
3123 GT, NULL_RTX, mode, unsignedp,
3124 label_rtx (test_label));
3126 /* Value must be on the left.
3127 Handle the left-hand subtree. */
3128 emit_case_nodes (index, node->left, default_label, index_type);
3129 /* If left-hand subtree does nothing,
3130 go to default. */
3131 emit_jump (default_label);
3133 /* Code branches here for the right-hand subtree. */
3134 expand_label (test_label);
3135 emit_case_nodes (index, node->right, default_label, index_type);
3139 else if (node->right != 0 && node->left == 0)
3141 /* Here we have a right child but no left so we issue conditional
3142 branch to default and process the right child.
3144 Omit the conditional branch to default if we it avoid only one
3145 right child; it costs too much space to save so little time. */
3147 if (node->right->right || node->right->left
3148 || !tree_int_cst_equal (node->right->low, node->right->high))
3150 if (!node_has_low_bound (node, index_type))
3152 emit_cmp_and_jump_insns (index,
3153 convert_modes
3154 (mode, imode,
3155 expand_expr (node->high, NULL_RTX,
3156 VOIDmode, 0),
3157 unsignedp),
3158 LT, NULL_RTX, mode, unsignedp,
3159 default_label);
3162 emit_case_nodes (index, node->right, default_label, index_type);
3164 else
3165 /* We cannot process node->right normally
3166 since we haven't ruled out the numbers less than
3167 this node's value. So handle node->right explicitly. */
3168 do_jump_if_equal (index,
3169 convert_modes
3170 (mode, imode,
3171 expand_expr (node->right->low, NULL_RTX,
3172 VOIDmode, 0),
3173 unsignedp),
3174 label_rtx (node->right->code_label), unsignedp);
3177 else if (node->right == 0 && node->left != 0)
3179 /* Just one subtree, on the left. */
3180 if (node->left->left || node->left->right
3181 || !tree_int_cst_equal (node->left->low, node->left->high))
3183 if (!node_has_high_bound (node, index_type))
3185 emit_cmp_and_jump_insns (index,
3186 convert_modes
3187 (mode, imode,
3188 expand_expr (node->high, NULL_RTX,
3189 VOIDmode, 0),
3190 unsignedp),
3191 GT, NULL_RTX, mode, unsignedp,
3192 default_label);
3195 emit_case_nodes (index, node->left, default_label, index_type);
3197 else
3198 /* We cannot process node->left normally
3199 since we haven't ruled out the numbers less than
3200 this node's value. So handle node->left explicitly. */
3201 do_jump_if_equal (index,
3202 convert_modes
3203 (mode, imode,
3204 expand_expr (node->left->low, NULL_RTX,
3205 VOIDmode, 0),
3206 unsignedp),
3207 label_rtx (node->left->code_label), unsignedp);
3210 else
3212 /* Node is a range. These cases are very similar to those for a single
3213 value, except that we do not start by testing whether this node
3214 is the one to branch to. */
3216 if (node->right != 0 && node->left != 0)
3218 /* Node has subtrees on both sides.
3219 If the right-hand subtree is bounded,
3220 test for it first, since we can go straight there.
3221 Otherwise, we need to make a branch in the control structure,
3222 then handle the two subtrees. */
3223 tree test_label = 0;
3225 if (node_is_bounded (node->right, index_type))
3226 /* Right hand node is fully bounded so we can eliminate any
3227 testing and branch directly to the target code. */
3228 emit_cmp_and_jump_insns (index,
3229 convert_modes
3230 (mode, imode,
3231 expand_expr (node->high, NULL_RTX,
3232 VOIDmode, 0),
3233 unsignedp),
3234 GT, NULL_RTX, mode, unsignedp,
3235 label_rtx (node->right->code_label));
3236 else
3238 /* Right hand node requires testing.
3239 Branch to a label where we will handle it later. */
3241 test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
3242 emit_cmp_and_jump_insns (index,
3243 convert_modes
3244 (mode, imode,
3245 expand_expr (node->high, NULL_RTX,
3246 VOIDmode, 0),
3247 unsignedp),
3248 GT, NULL_RTX, mode, unsignedp,
3249 label_rtx (test_label));
3252 /* Value belongs to this node or to the left-hand subtree. */
3254 emit_cmp_and_jump_insns (index,
3255 convert_modes
3256 (mode, imode,
3257 expand_expr (node->low, NULL_RTX,
3258 VOIDmode, 0),
3259 unsignedp),
3260 GE, NULL_RTX, mode, unsignedp,
3261 label_rtx (node->code_label));
3263 /* Handle the left-hand subtree. */
3264 emit_case_nodes (index, node->left, default_label, index_type);
3266 /* If right node had to be handled later, do that now. */
3268 if (test_label)
3270 /* If the left-hand subtree fell through,
3271 don't let it fall into the right-hand subtree. */
3272 emit_jump (default_label);
3274 expand_label (test_label);
3275 emit_case_nodes (index, node->right, default_label, index_type);
3279 else if (node->right != 0 && node->left == 0)
3281 /* Deal with values to the left of this node,
3282 if they are possible. */
3283 if (!node_has_low_bound (node, index_type))
3285 emit_cmp_and_jump_insns (index,
3286 convert_modes
3287 (mode, imode,
3288 expand_expr (node->low, NULL_RTX,
3289 VOIDmode, 0),
3290 unsignedp),
3291 LT, NULL_RTX, mode, unsignedp,
3292 default_label);
3295 /* Value belongs to this node or to the right-hand subtree. */
3297 emit_cmp_and_jump_insns (index,
3298 convert_modes
3299 (mode, imode,
3300 expand_expr (node->high, NULL_RTX,
3301 VOIDmode, 0),
3302 unsignedp),
3303 LE, NULL_RTX, mode, unsignedp,
3304 label_rtx (node->code_label));
3306 emit_case_nodes (index, node->right, default_label, index_type);
3309 else if (node->right == 0 && node->left != 0)
3311 /* Deal with values to the right of this node,
3312 if they are possible. */
3313 if (!node_has_high_bound (node, index_type))
3315 emit_cmp_and_jump_insns (index,
3316 convert_modes
3317 (mode, imode,
3318 expand_expr (node->high, NULL_RTX,
3319 VOIDmode, 0),
3320 unsignedp),
3321 GT, NULL_RTX, mode, unsignedp,
3322 default_label);
3325 /* Value belongs to this node or to the left-hand subtree. */
3327 emit_cmp_and_jump_insns (index,
3328 convert_modes
3329 (mode, imode,
3330 expand_expr (node->low, NULL_RTX,
3331 VOIDmode, 0),
3332 unsignedp),
3333 GE, NULL_RTX, mode, unsignedp,
3334 label_rtx (node->code_label));
3336 emit_case_nodes (index, node->left, default_label, index_type);
3339 else
3341 /* Node has no children so we check low and high bounds to remove
3342 redundant tests. Only one of the bounds can exist,
3343 since otherwise this node is bounded--a case tested already. */
3344 int high_bound = node_has_high_bound (node, index_type);
3345 int low_bound = node_has_low_bound (node, index_type);
3347 if (!high_bound && low_bound)
3349 emit_cmp_and_jump_insns (index,
3350 convert_modes
3351 (mode, imode,
3352 expand_expr (node->high, NULL_RTX,
3353 VOIDmode, 0),
3354 unsignedp),
3355 GT, NULL_RTX, mode, unsignedp,
3356 default_label);
3359 else if (!low_bound && high_bound)
3361 emit_cmp_and_jump_insns (index,
3362 convert_modes
3363 (mode, imode,
3364 expand_expr (node->low, NULL_RTX,
3365 VOIDmode, 0),
3366 unsignedp),
3367 LT, NULL_RTX, mode, unsignedp,
3368 default_label);
3370 else if (!low_bound && !high_bound)
3372 /* Widen LOW and HIGH to the same width as INDEX. */
3373 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
3374 tree low = build1 (CONVERT_EXPR, type, node->low);
3375 tree high = build1 (CONVERT_EXPR, type, node->high);
3376 rtx low_rtx, new_index, new_bound;
3378 /* Instead of doing two branches, emit one unsigned branch for
3379 (index-low) > (high-low). */
3380 low_rtx = expand_expr (low, NULL_RTX, mode, 0);
3381 new_index = expand_simple_binop (mode, MINUS, index, low_rtx,
3382 NULL_RTX, unsignedp,
3383 OPTAB_WIDEN);
3384 new_bound = expand_expr (fold (build2 (MINUS_EXPR, type,
3385 high, low)),
3386 NULL_RTX, mode, 0);
3388 emit_cmp_and_jump_insns (new_index, new_bound, GT, NULL_RTX,
3389 mode, 1, default_label);
3392 emit_jump (label_rtx (node->code_label));