* MAINTAINERS: Add self as a profile feedback maintainer.
[official-gcc.git] / gcc / stmt.c
blob1a99e2318904b85adabdff1b1843fa5b13b5c26d
1 /* Expands front end tree to back end RTL for GCC
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 /* This file handles the generation of rtl code from tree structure
23 above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
24 It also creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
27 The functions whose names start with `expand_' are called by the
28 parser to generate RTL instructions for various kinds of constructs.
30 Some control and binding constructs require calling several such
31 functions at different times. For example, a simple if-then
32 is expanded by calling `expand_start_cond' (with the condition-expression
33 as argument) before parsing the then-clause and calling `expand_end_cond'
34 after parsing the then-clause. */
36 #include "config.h"
37 #include "system.h"
38 #include "coretypes.h"
39 #include "tm.h"
41 #include "rtl.h"
42 #include "tree.h"
43 #include "tm_p.h"
44 #include "flags.h"
45 #include "except.h"
46 #include "function.h"
47 #include "insn-config.h"
48 #include "expr.h"
49 #include "libfuncs.h"
50 #include "hard-reg-set.h"
51 #include "loop.h"
52 #include "recog.h"
53 #include "machmode.h"
54 #include "toplev.h"
55 #include "output.h"
56 #include "ggc.h"
57 #include "langhooks.h"
58 #include "predict.h"
59 #include "optabs.h"
60 #include "target.h"
61 #include "regs.h"
63 /* Functions and data structures for expanding case statements. */
65 /* Case label structure, used to hold info on labels within case
66 statements. We handle "range" labels; for a single-value label
67 as in C, the high and low limits are the same.
69 An AVL tree of case nodes is initially created, and later transformed
70 to a list linked via the RIGHT fields in the nodes. Nodes with
71 higher case values are later in the list.
73 Switch statements can be output in one of two forms. A branch table
74 is used if there are more than a few labels and the labels are dense
75 within the range between the smallest and largest case value. If a
76 branch table is used, no further manipulations are done with the case
77 node chain.
79 The alternative to the use of a branch table is to generate a series
80 of compare and jump insns. When that is done, we use the LEFT, RIGHT,
81 and PARENT fields to hold a binary tree. Initially the tree is
82 totally unbalanced, with everything on the right. We balance the tree
83 with nodes on the left having lower case values than the parent
84 and nodes on the right having higher values. We then output the tree
85 in order. */
87 struct case_node GTY(())
89 struct case_node *left; /* Left son in binary tree */
90 struct case_node *right; /* Right son in binary tree; also node chain */
91 struct case_node *parent; /* Parent of node in binary tree */
92 tree low; /* Lowest index value for this label */
93 tree high; /* Highest index value for this label */
94 tree code_label; /* Label to jump to when node matches */
95 int balance;
98 typedef struct case_node case_node;
99 typedef struct case_node *case_node_ptr;
101 /* These are used by estimate_case_costs and balance_case_nodes. */
103 /* This must be a signed type, and non-ANSI compilers lack signed char. */
104 static short cost_table_[129];
105 static int use_cost_table;
106 static int cost_table_initialized;
108 /* Special care is needed because we allow -1, but TREE_INT_CST_LOW
109 is unsigned. */
110 #define COST_TABLE(I) cost_table_[(unsigned HOST_WIDE_INT) ((I) + 1)]
112 /* Stack of control and binding constructs we are currently inside.
114 These constructs begin when you call `expand_start_WHATEVER'
115 and end when you call `expand_end_WHATEVER'. This stack records
116 info about how the construct began that tells the end-function
117 what to do. It also may provide information about the construct
118 to alter the behavior of other constructs within the body.
119 For example, they may affect the behavior of C `break' and `continue'.
121 Each construct gets one `struct nesting' object.
122 All of these objects are chained through the `all' field.
123 `nesting_stack' points to the first object (innermost construct).
124 The position of an entry on `nesting_stack' is in its `depth' field.
126 Each type of construct has its own individual stack.
127 For example, loops have `cond_stack'. Each object points to the
128 next object of the same type through the `next' field.
130 Some constructs are visible to `break' exit-statements and others
131 are not. Which constructs are visible depends on the language.
132 Therefore, the data structure allows each construct to be visible
133 or not, according to the args given when the construct is started.
134 The construct is visible if the `exit_label' field is non-null.
135 In that case, the value should be a CODE_LABEL rtx. */
137 struct nesting GTY(())
139 struct nesting *all;
140 struct nesting *next;
141 int depth;
142 rtx exit_label;
143 enum nesting_desc {
144 COND_NESTING,
145 BLOCK_NESTING,
146 CASE_NESTING
147 } desc;
148 union nesting_u
150 /* For conds (if-then and if-then-else statements). */
151 struct nesting_cond
153 /* Label for the end of the if construct.
154 There is none if EXITFLAG was not set
155 and no `else' has been seen yet. */
156 rtx endif_label;
157 /* Label for the end of this alternative.
158 This may be the end of the if or the next else/elseif. */
159 rtx next_label;
160 } GTY ((tag ("COND_NESTING"))) cond;
161 /* For variable binding contours. */
162 struct nesting_block
164 /* Sequence number of this binding contour within the function,
165 in order of entry. */
166 int block_start_count;
167 /* Nonzero => value to restore stack to on exit. */
168 rtx stack_level;
169 /* The NOTE that starts this contour.
170 Used by expand_goto to check whether the destination
171 is within each contour or not. */
172 rtx first_insn;
173 /* Innermost containing binding contour that has a stack level. */
174 struct nesting *innermost_stack_block;
175 /* List of cleanups to be run on exit from this contour.
176 This is a list of expressions to be evaluated.
177 The TREE_PURPOSE of each link is the ..._DECL node
178 which the cleanup pertains to. */
179 tree cleanups;
180 /* List of cleanup-lists of blocks containing this block,
181 as they were at the locus where this block appears.
182 There is an element for each containing block,
183 ordered innermost containing block first.
184 The tail of this list can be 0,
185 if all remaining elements would be empty lists.
186 The element's TREE_VALUE is the cleanup-list of that block,
187 which may be null. */
188 tree outer_cleanups;
189 /* Chain of labels defined inside this binding contour.
190 For contours that have stack levels or cleanups. */
191 struct label_chain *label_chain;
192 /* Nonzero if this is associated with an EH region. */
193 int exception_region;
194 /* The saved target_temp_slot_level from our outer block.
195 We may reset target_temp_slot_level to be the level of
196 this block, if that is done, target_temp_slot_level
197 reverts to the saved target_temp_slot_level at the very
198 end of the block. */
199 int block_target_temp_slot_level;
200 /* True if we are currently emitting insns in an area of
201 output code that is controlled by a conditional
202 expression. This is used by the cleanup handling code to
203 generate conditional cleanup actions. */
204 int conditional_code;
205 /* A place to move the start of the exception region for any
206 of the conditional cleanups, must be at the end or after
207 the start of the last unconditional cleanup, and before any
208 conditional branch points. */
209 rtx last_unconditional_cleanup;
210 } GTY ((tag ("BLOCK_NESTING"))) block;
211 /* For switch (C) or case (Pascal) statements. */
212 struct nesting_case
214 /* The insn after which the case dispatch should finally
215 be emitted. Zero for a dummy. */
216 rtx start;
217 /* A list of case labels; it is first built as an AVL tree.
218 During expand_end_case, this is converted to a list, and may be
219 rearranged into a nearly balanced binary tree. */
220 struct case_node *case_list;
221 /* Label to jump to if no case matches. */
222 tree default_label;
223 /* The expression to be dispatched on. */
224 tree index_expr;
225 /* Type that INDEX_EXPR should be converted to. */
226 tree nominal_type;
227 /* Name of this kind of statement, for warnings. */
228 const char *printname;
229 /* Used to save no_line_numbers till we see the first case label.
230 We set this to -1 when we see the first case label in this
231 case statement. */
232 int line_number_status;
233 } GTY ((tag ("CASE_NESTING"))) case_stmt;
234 } GTY ((desc ("%1.desc"))) data;
237 /* Allocate and return a new `struct nesting'. */
239 #define ALLOC_NESTING() ggc_alloc (sizeof (struct nesting))
241 /* Pop the nesting stack element by element until we pop off
242 the element which is at the top of STACK.
243 Update all the other stacks, popping off elements from them
244 as we pop them from nesting_stack. */
246 #define POPSTACK(STACK) \
247 do { struct nesting *target = STACK; \
248 struct nesting *this; \
249 do { this = nesting_stack; \
250 if (cond_stack == this) \
251 cond_stack = cond_stack->next; \
252 if (block_stack == this) \
253 block_stack = block_stack->next; \
254 if (stack_block_stack == this) \
255 stack_block_stack = stack_block_stack->next; \
256 if (case_stack == this) \
257 case_stack = case_stack->next; \
258 nesting_depth = nesting_stack->depth - 1; \
259 nesting_stack = this->all; } \
260 while (this != target); } while (0)
262 /* In some cases it is impossible to generate code for a forward goto
263 until the label definition is seen. This happens when it may be necessary
264 for the goto to reset the stack pointer: we don't yet know how to do that.
265 So expand_goto puts an entry on this fixup list.
266 Each time a binding contour that resets the stack is exited,
267 we check each fixup.
268 If the target label has now been defined, we can insert the proper code. */
270 struct goto_fixup GTY(())
272 /* Points to following fixup. */
273 struct goto_fixup *next;
274 /* Points to the insn before the jump insn.
275 If more code must be inserted, it goes after this insn. */
276 rtx before_jump;
277 /* The LABEL_DECL that this jump is jumping to, or 0
278 for break, continue or return. */
279 tree target;
280 /* The BLOCK for the place where this goto was found. */
281 tree context;
282 /* The CODE_LABEL rtx that this is jumping to. */
283 rtx target_rtl;
284 /* Number of binding contours started in current function
285 before the label reference. */
286 int block_start_count;
287 /* The outermost stack level that should be restored for this jump.
288 Each time a binding contour that resets the stack is exited,
289 if the target label is *not* yet defined, this slot is updated. */
290 rtx stack_level;
291 /* List of lists of cleanup expressions to be run by this goto.
292 There is one element for each block that this goto is within.
293 The tail of this list can be 0,
294 if all remaining elements would be empty.
295 The TREE_VALUE contains the cleanup list of that block as of the
296 time this goto was seen.
297 The TREE_ADDRESSABLE flag is 1 for a block that has been exited. */
298 tree cleanup_list_list;
301 /* Within any binding contour that must restore a stack level,
302 all labels are recorded with a chain of these structures. */
304 struct label_chain GTY(())
306 /* Points to following fixup. */
307 struct label_chain *next;
308 tree label;
311 struct stmt_status GTY(())
313 /* Chain of all pending binding contours. */
314 struct nesting * x_block_stack;
316 /* If any new stacks are added here, add them to POPSTACKS too. */
318 /* Chain of all pending binding contours that restore stack levels
319 or have cleanups. */
320 struct nesting * x_stack_block_stack;
322 /* Chain of all pending conditional statements. */
323 struct nesting * x_cond_stack;
325 /* Chain of all pending case or switch statements. */
326 struct nesting * x_case_stack;
328 /* Separate chain including all of the above,
329 chained through the `all' field. */
330 struct nesting * x_nesting_stack;
332 /* Number of entries on nesting_stack now. */
333 int x_nesting_depth;
335 /* Number of binding contours started so far in this function. */
336 int x_block_start_count;
338 /* Location of last line-number note, whether we actually
339 emitted it or not. */
340 location_t x_emit_locus;
342 struct goto_fixup *x_goto_fixup_chain;
345 #define block_stack (cfun->stmt->x_block_stack)
346 #define stack_block_stack (cfun->stmt->x_stack_block_stack)
347 #define cond_stack (cfun->stmt->x_cond_stack)
348 #define case_stack (cfun->stmt->x_case_stack)
349 #define nesting_stack (cfun->stmt->x_nesting_stack)
350 #define nesting_depth (cfun->stmt->x_nesting_depth)
351 #define current_block_start_count (cfun->stmt->x_block_start_count)
352 #define emit_locus (cfun->stmt->x_emit_locus)
353 #define goto_fixup_chain (cfun->stmt->x_goto_fixup_chain)
355 /* Nonzero if we are using EH to handle cleanups. */
356 int using_eh_for_cleanups_p = 0;
358 static int n_occurrences (int, const char *);
359 static bool decl_conflicts_with_clobbers_p (tree, const HARD_REG_SET);
360 static void expand_goto_internal (tree, rtx, rtx);
361 static int expand_fixup (tree, rtx, rtx);
362 static void expand_nl_goto_receiver (void);
363 static void fixup_gotos (struct nesting *, rtx, tree, rtx, int);
364 static bool check_operand_nalternatives (tree, tree);
365 static bool check_unique_operand_names (tree, tree);
366 static char *resolve_operand_name_1 (char *, tree, tree);
367 static void expand_null_return_1 (rtx);
368 static enum br_predictor return_prediction (rtx);
369 static rtx shift_return_value (rtx);
370 static void expand_value_return (rtx);
371 static void expand_cleanups (tree, int, int);
372 static void do_jump_if_equal (rtx, rtx, rtx, int);
373 static int estimate_case_costs (case_node_ptr);
374 static bool same_case_target_p (rtx, rtx);
375 static void strip_default_case_nodes (case_node_ptr *, rtx);
376 static bool lshift_cheap_p (void);
377 static int case_bit_test_cmp (const void *, const void *);
378 static void emit_case_bit_tests (tree, tree, tree, tree, case_node_ptr, rtx);
379 static void group_case_nodes (case_node_ptr);
380 static void balance_case_nodes (case_node_ptr *, case_node_ptr);
381 static int node_has_low_bound (case_node_ptr, tree);
382 static int node_has_high_bound (case_node_ptr, tree);
383 static int node_is_bounded (case_node_ptr, tree);
384 static void emit_jump_if_reachable (rtx);
385 static void emit_case_nodes (rtx, case_node_ptr, rtx, tree);
386 static struct case_node *case_tree2list (case_node *, case_node *);
388 void
389 using_eh_for_cleanups (void)
391 using_eh_for_cleanups_p = 1;
394 void
395 init_stmt_for_function (void)
397 cfun->stmt = ggc_alloc_cleared (sizeof (struct stmt_status));
400 /* Record the current file and line. Called from emit_line_note. */
402 void
403 set_file_and_line_for_stmt (location_t location)
405 /* If we're outputting an inline function, and we add a line note,
406 there may be no CFUN->STMT information. So, there's no need to
407 update it. */
408 if (cfun->stmt)
409 emit_locus = location;
412 /* Emit a no-op instruction. */
414 void
415 emit_nop (void)
417 rtx last_insn;
419 last_insn = get_last_insn ();
420 if (!optimize
421 && (GET_CODE (last_insn) == CODE_LABEL
422 || (GET_CODE (last_insn) == NOTE
423 && prev_real_insn (last_insn) == 0)))
424 emit_insn (gen_nop ());
427 /* Return the rtx-label that corresponds to a LABEL_DECL,
428 creating it if necessary. */
431 label_rtx (tree label)
433 if (TREE_CODE (label) != LABEL_DECL)
434 abort ();
436 if (!DECL_RTL_SET_P (label))
438 rtx r = gen_label_rtx ();
439 SET_DECL_RTL (label, r);
440 if (FORCED_LABEL (label) || DECL_NONLOCAL (label))
441 LABEL_PRESERVE_P (r) = 1;
444 return DECL_RTL (label);
447 /* As above, but also put it on the forced-reference list of the
448 function that contains it. */
450 force_label_rtx (tree label)
452 rtx ref = label_rtx (label);
453 tree function = decl_function_context (label);
454 struct function *p;
456 if (!function)
457 abort ();
459 if (function != current_function_decl)
460 p = find_function_data (function);
461 else
462 p = cfun;
464 p->expr->x_forced_labels = gen_rtx_EXPR_LIST (VOIDmode, ref,
465 p->expr->x_forced_labels);
466 return ref;
469 /* Add an unconditional jump to LABEL as the next sequential instruction. */
471 void
472 emit_jump (rtx label)
474 do_pending_stack_adjust ();
475 emit_jump_insn (gen_jump (label));
476 emit_barrier ();
479 /* Emit code to jump to the address
480 specified by the pointer expression EXP. */
482 void
483 expand_computed_goto (tree exp)
485 rtx x = expand_expr (exp, NULL_RTX, VOIDmode, 0);
487 x = convert_memory_address (Pmode, x);
489 emit_queue ();
490 do_pending_stack_adjust ();
491 emit_indirect_jump (x);
494 /* Handle goto statements and the labels that they can go to. */
496 /* Specify the location in the RTL code of a label LABEL,
497 which is a LABEL_DECL tree node.
499 This is used for the kind of label that the user can jump to with a
500 goto statement, and for alternatives of a switch or case statement.
501 RTL labels generated for loops and conditionals don't go through here;
502 they are generated directly at the RTL level, by other functions below.
504 Note that this has nothing to do with defining label *names*.
505 Languages vary in how they do that and what that even means. */
507 void
508 expand_label (tree label)
510 struct label_chain *p;
511 rtx label_r = label_rtx (label);
513 do_pending_stack_adjust ();
514 emit_label (label_r);
515 if (DECL_NAME (label))
516 LABEL_NAME (DECL_RTL (label)) = IDENTIFIER_POINTER (DECL_NAME (label));
518 if (DECL_NONLOCAL (label))
520 expand_nl_goto_receiver ();
521 nonlocal_goto_handler_labels
522 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
523 nonlocal_goto_handler_labels);
526 if (FORCED_LABEL (label))
527 forced_labels = gen_rtx_EXPR_LIST (VOIDmode, label_r, forced_labels);
529 if (DECL_NONLOCAL (label) || FORCED_LABEL (label))
530 maybe_set_first_label_num (label_r);
532 if (stack_block_stack != 0)
534 p = ggc_alloc (sizeof (struct label_chain));
535 p->next = stack_block_stack->data.block.label_chain;
536 stack_block_stack->data.block.label_chain = p;
537 p->label = label;
541 /* Generate RTL code for a `goto' statement with target label LABEL.
542 LABEL should be a LABEL_DECL tree node that was or will later be
543 defined with `expand_label'. */
545 void
546 expand_goto (tree label)
548 #ifdef ENABLE_CHECKING
549 /* Check for a nonlocal goto to a containing function. Should have
550 gotten translated to __builtin_nonlocal_goto. */
551 tree context = decl_function_context (label);
552 if (context != 0 && context != current_function_decl)
553 abort ();
554 #endif
556 expand_goto_internal (label, label_rtx (label), NULL_RTX);
559 /* Generate RTL code for a `goto' statement with target label BODY.
560 LABEL should be a LABEL_REF.
561 LAST_INSN, if non-0, is the rtx we should consider as the last
562 insn emitted (for the purposes of cleaning up a return). */
564 static void
565 expand_goto_internal (tree body, rtx label, rtx last_insn)
567 struct nesting *block;
568 rtx stack_level = 0;
570 if (GET_CODE (label) != CODE_LABEL)
571 abort ();
573 /* If label has already been defined, we can tell now
574 whether and how we must alter the stack level. */
576 if (PREV_INSN (label) != 0)
578 /* Find the innermost pending block that contains the label.
579 (Check containment by comparing insn-uids.)
580 Then restore the outermost stack level within that block,
581 and do cleanups of all blocks contained in it. */
582 for (block = block_stack; block; block = block->next)
584 if (INSN_UID (block->data.block.first_insn) < INSN_UID (label))
585 break;
586 if (block->data.block.stack_level != 0)
587 stack_level = block->data.block.stack_level;
588 /* Execute the cleanups for blocks we are exiting. */
589 if (block->data.block.cleanups != 0)
591 expand_cleanups (block->data.block.cleanups, 1, 1);
592 do_pending_stack_adjust ();
596 if (stack_level)
598 /* Ensure stack adjust isn't done by emit_jump, as this
599 would clobber the stack pointer. This one should be
600 deleted as dead by flow. */
601 clear_pending_stack_adjust ();
602 do_pending_stack_adjust ();
604 /* Don't do this adjust if it's to the end label and this function
605 is to return with a depressed stack pointer. */
606 if (label == return_label
607 && (((TREE_CODE (TREE_TYPE (current_function_decl))
608 == FUNCTION_TYPE)
609 && (TYPE_RETURNS_STACK_DEPRESSED
610 (TREE_TYPE (current_function_decl))))))
612 else
613 emit_stack_restore (SAVE_BLOCK, stack_level, NULL_RTX);
616 if (body != 0 && DECL_TOO_LATE (body))
617 error ("jump to `%s' invalidly jumps into binding contour",
618 IDENTIFIER_POINTER (DECL_NAME (body)));
620 /* Label not yet defined: may need to put this goto
621 on the fixup list. */
622 else if (! expand_fixup (body, label, last_insn))
624 /* No fixup needed. Record that the label is the target
625 of at least one goto that has no fixup. */
626 if (body != 0)
627 TREE_ADDRESSABLE (body) = 1;
630 emit_jump (label);
633 /* Generate if necessary a fixup for a goto
634 whose target label in tree structure (if any) is TREE_LABEL
635 and whose target in rtl is RTL_LABEL.
637 If LAST_INSN is nonzero, we pretend that the jump appears
638 after insn LAST_INSN instead of at the current point in the insn stream.
640 The fixup will be used later to insert insns just before the goto.
641 Those insns will restore the stack level as appropriate for the
642 target label, and will (in the case of C++) also invoke any object
643 destructors which have to be invoked when we exit the scopes which
644 are exited by the goto.
646 Value is nonzero if a fixup is made. */
648 static int
649 expand_fixup (tree tree_label, rtx rtl_label, rtx last_insn)
651 struct nesting *block, *end_block;
653 /* See if we can recognize which block the label will be output in.
654 This is possible in some very common cases.
655 If we succeed, set END_BLOCK to that block.
656 Otherwise, set it to 0. */
658 if (cond_stack
659 && (rtl_label == cond_stack->data.cond.endif_label
660 || rtl_label == cond_stack->data.cond.next_label))
661 end_block = cond_stack;
662 else
663 end_block = 0;
665 /* Now set END_BLOCK to the binding level to which we will return. */
667 if (end_block)
669 struct nesting *next_block = end_block->all;
670 block = block_stack;
672 /* First see if the END_BLOCK is inside the innermost binding level.
673 If so, then no cleanups or stack levels are relevant. */
674 while (next_block && next_block != block)
675 next_block = next_block->all;
677 if (next_block)
678 return 0;
680 /* Otherwise, set END_BLOCK to the innermost binding level
681 which is outside the relevant control-structure nesting. */
682 next_block = block_stack->next;
683 for (block = block_stack; block != end_block; block = block->all)
684 if (block == next_block)
685 next_block = next_block->next;
686 end_block = next_block;
689 /* Does any containing block have a stack level or cleanups?
690 If not, no fixup is needed, and that is the normal case
691 (the only case, for standard C). */
692 for (block = block_stack; block != end_block; block = block->next)
693 if (block->data.block.stack_level != 0
694 || block->data.block.cleanups != 0)
695 break;
697 if (block != end_block)
699 /* Ok, a fixup is needed. Add a fixup to the list of such. */
700 struct goto_fixup *fixup = ggc_alloc (sizeof (struct goto_fixup));
701 /* In case an old stack level is restored, make sure that comes
702 after any pending stack adjust. */
703 /* ?? If the fixup isn't to come at the present position,
704 doing the stack adjust here isn't useful. Doing it with our
705 settings at that location isn't useful either. Let's hope
706 someone does it! */
707 if (last_insn == 0)
708 do_pending_stack_adjust ();
709 fixup->target = tree_label;
710 fixup->target_rtl = rtl_label;
712 /* Create a BLOCK node and a corresponding matched set of
713 NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes at
714 this point. The notes will encapsulate any and all fixup
715 code which we might later insert at this point in the insn
716 stream. Also, the BLOCK node will be the parent (i.e. the
717 `SUPERBLOCK') of any other BLOCK nodes which we might create
718 later on when we are expanding the fixup code.
720 Note that optimization passes might move the *_BLOCK notes away,
721 so we use a NOTE_INSN_DELETED as a placeholder. */
724 rtx original_before_jump
725 = last_insn ? last_insn : get_last_insn ();
726 rtx start;
727 rtx end;
728 tree block;
730 block = make_node (BLOCK);
731 TREE_USED (block) = 1;
733 BLOCK_CHAIN (block)
734 = BLOCK_CHAIN (DECL_INITIAL (current_function_decl));
735 BLOCK_CHAIN (DECL_INITIAL (current_function_decl))
736 = block;
738 start_sequence ();
739 start = emit_note (NOTE_INSN_BLOCK_BEG);
740 NOTE_BLOCK (start) = block;
741 fixup->before_jump = emit_note (NOTE_INSN_DELETED);
742 end = emit_note (NOTE_INSN_BLOCK_END);
743 NOTE_BLOCK (end) = block;
744 fixup->context = block;
745 end_sequence ();
746 emit_insn_after (start, original_before_jump);
749 fixup->block_start_count = current_block_start_count;
750 fixup->stack_level = 0;
751 fixup->cleanup_list_list
752 = ((block->data.block.outer_cleanups
753 || block->data.block.cleanups)
754 ? tree_cons (NULL_TREE, block->data.block.cleanups,
755 block->data.block.outer_cleanups)
756 : 0);
757 fixup->next = goto_fixup_chain;
758 goto_fixup_chain = fixup;
761 return block != 0;
764 /* Expand any needed fixups in the outputmost binding level of the
765 function. FIRST_INSN is the first insn in the function. */
767 void
768 expand_fixups (rtx first_insn)
770 fixup_gotos (NULL, NULL_RTX, NULL_TREE, first_insn, 0);
773 /* When exiting a binding contour, process all pending gotos requiring fixups.
774 THISBLOCK is the structure that describes the block being exited.
775 STACK_LEVEL is the rtx for the stack level to restore exiting this contour.
776 CLEANUP_LIST is a list of expressions to evaluate on exiting this contour.
777 FIRST_INSN is the insn that began this contour.
779 Gotos that jump out of this contour must restore the
780 stack level and do the cleanups before actually jumping.
782 DONT_JUMP_IN positive means report error if there is a jump into this
783 contour from before the beginning of the contour. This is also done if
784 STACK_LEVEL is nonzero unless DONT_JUMP_IN is negative. */
786 static void
787 fixup_gotos (struct nesting *thisblock, rtx stack_level,
788 tree cleanup_list, rtx first_insn, int dont_jump_in)
790 struct goto_fixup *f, *prev;
792 /* F is the fixup we are considering; PREV is the previous one. */
793 /* We run this loop in two passes so that cleanups of exited blocks
794 are run first, and blocks that are exited are marked so
795 afterwards. */
797 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
799 /* Test for a fixup that is inactive because it is already handled. */
800 if (f->before_jump == 0)
802 /* Delete inactive fixup from the chain, if that is easy to do. */
803 if (prev != 0)
804 prev->next = f->next;
806 /* Has this fixup's target label been defined?
807 If so, we can finalize it. */
808 else if (PREV_INSN (f->target_rtl) != 0)
810 rtx cleanup_insns;
812 /* If this fixup jumped into this contour from before the beginning
813 of this contour, report an error. This code used to use
814 the first non-label insn after f->target_rtl, but that's
815 wrong since such can be added, by things like put_var_into_stack
816 and have INSN_UIDs that are out of the range of the block. */
817 /* ??? Bug: this does not detect jumping in through intermediate
818 blocks that have stack levels or cleanups.
819 It detects only a problem with the innermost block
820 around the label. */
821 if (f->target != 0
822 && (dont_jump_in > 0 || (dont_jump_in == 0 && stack_level)
823 || cleanup_list)
824 && INSN_UID (first_insn) < INSN_UID (f->target_rtl)
825 && INSN_UID (first_insn) > INSN_UID (f->before_jump)
826 && ! DECL_ERROR_ISSUED (f->target))
828 error ("%Jlabel '%D' used before containing binding contour",
829 f->target, f->target);
830 /* Prevent multiple errors for one label. */
831 DECL_ERROR_ISSUED (f->target) = 1;
834 /* We will expand the cleanups into a sequence of their own and
835 then later on we will attach this new sequence to the insn
836 stream just ahead of the actual jump insn. */
838 start_sequence ();
840 /* Temporarily restore the lexical context where we will
841 logically be inserting the fixup code. We do this for the
842 sake of getting the debugging information right. */
844 lang_hooks.decls.pushlevel (0);
845 lang_hooks.decls.set_block (f->context);
847 /* Expand the cleanups for blocks this jump exits. */
848 if (f->cleanup_list_list)
850 tree lists;
851 for (lists = f->cleanup_list_list; lists; lists = TREE_CHAIN (lists))
852 /* Marked elements correspond to blocks that have been closed.
853 Do their cleanups. */
854 if (TREE_ADDRESSABLE (lists)
855 && TREE_VALUE (lists) != 0)
857 expand_cleanups (TREE_VALUE (lists), 1, 1);
858 /* Pop any pushes done in the cleanups,
859 in case function is about to return. */
860 do_pending_stack_adjust ();
864 /* Restore stack level for the biggest contour that this
865 jump jumps out of. */
866 if (f->stack_level
867 && ! (f->target_rtl == return_label
868 && ((TREE_CODE (TREE_TYPE (current_function_decl))
869 == FUNCTION_TYPE)
870 && (TYPE_RETURNS_STACK_DEPRESSED
871 (TREE_TYPE (current_function_decl))))))
872 emit_stack_restore (SAVE_BLOCK, f->stack_level, f->before_jump);
874 /* Finish up the sequence containing the insns which implement the
875 necessary cleanups, and then attach that whole sequence to the
876 insn stream just ahead of the actual jump insn. Attaching it
877 at that point insures that any cleanups which are in fact
878 implicit C++ object destructions (which must be executed upon
879 leaving the block) appear (to the debugger) to be taking place
880 in an area of the generated code where the object(s) being
881 destructed are still "in scope". */
883 cleanup_insns = get_insns ();
884 lang_hooks.decls.poplevel (1, 0, 0);
886 end_sequence ();
887 emit_insn_after (cleanup_insns, f->before_jump);
889 f->before_jump = 0;
893 /* For any still-undefined labels, do the cleanups for this block now.
894 We must do this now since items in the cleanup list may go out
895 of scope when the block ends. */
896 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
897 if (f->before_jump != 0
898 && PREV_INSN (f->target_rtl) == 0
899 /* Label has still not appeared. If we are exiting a block with
900 a stack level to restore, that started before the fixup,
901 mark this stack level as needing restoration
902 when the fixup is later finalized. */
903 && thisblock != 0
904 /* Note: if THISBLOCK == 0 and we have a label that hasn't appeared, it
905 means the label is undefined. That's erroneous, but possible. */
906 && (thisblock->data.block.block_start_count
907 <= f->block_start_count))
909 tree lists = f->cleanup_list_list;
910 rtx cleanup_insns;
912 for (; lists; lists = TREE_CHAIN (lists))
913 /* If the following elt. corresponds to our containing block
914 then the elt. must be for this block. */
915 if (TREE_CHAIN (lists) == thisblock->data.block.outer_cleanups)
917 start_sequence ();
918 lang_hooks.decls.pushlevel (0);
919 lang_hooks.decls.set_block (f->context);
920 expand_cleanups (TREE_VALUE (lists), 1, 1);
921 do_pending_stack_adjust ();
922 cleanup_insns = get_insns ();
923 lang_hooks.decls.poplevel (1, 0, 0);
924 end_sequence ();
925 if (cleanup_insns != 0)
926 f->before_jump
927 = emit_insn_after (cleanup_insns, f->before_jump);
929 f->cleanup_list_list = TREE_CHAIN (lists);
932 if (stack_level)
933 f->stack_level = stack_level;
937 /* Return the number of times character C occurs in string S. */
938 static int
939 n_occurrences (int c, const char *s)
941 int n = 0;
942 while (*s)
943 n += (*s++ == c);
944 return n;
947 /* Generate RTL for an asm statement (explicit assembler code).
948 STRING is a STRING_CST node containing the assembler code text,
949 or an ADDR_EXPR containing a STRING_CST. VOL nonzero means the
950 insn is volatile; don't optimize it. */
952 void
953 expand_asm (tree string, int vol)
955 rtx body;
957 if (TREE_CODE (string) == ADDR_EXPR)
958 string = TREE_OPERAND (string, 0);
960 body = gen_rtx_ASM_INPUT (VOIDmode, TREE_STRING_POINTER (string));
962 MEM_VOLATILE_P (body) = vol;
964 emit_insn (body);
967 /* Parse the output constraint pointed to by *CONSTRAINT_P. It is the
968 OPERAND_NUMth output operand, indexed from zero. There are NINPUTS
969 inputs and NOUTPUTS outputs to this extended-asm. Upon return,
970 *ALLOWS_MEM will be TRUE iff the constraint allows the use of a
971 memory operand. Similarly, *ALLOWS_REG will be TRUE iff the
972 constraint allows the use of a register operand. And, *IS_INOUT
973 will be true if the operand is read-write, i.e., if it is used as
974 an input as well as an output. If *CONSTRAINT_P is not in
975 canonical form, it will be made canonical. (Note that `+' will be
976 replaced with `=' as part of this process.)
978 Returns TRUE if all went well; FALSE if an error occurred. */
980 bool
981 parse_output_constraint (const char **constraint_p, int operand_num,
982 int ninputs, int noutputs, bool *allows_mem,
983 bool *allows_reg, bool *is_inout)
985 const char *constraint = *constraint_p;
986 const char *p;
988 /* Assume the constraint doesn't allow the use of either a register
989 or memory. */
990 *allows_mem = false;
991 *allows_reg = false;
993 /* Allow the `=' or `+' to not be at the beginning of the string,
994 since it wasn't explicitly documented that way, and there is a
995 large body of code that puts it last. Swap the character to
996 the front, so as not to uglify any place else. */
997 p = strchr (constraint, '=');
998 if (!p)
999 p = strchr (constraint, '+');
1001 /* If the string doesn't contain an `=', issue an error
1002 message. */
1003 if (!p)
1005 error ("output operand constraint lacks `='");
1006 return false;
1009 /* If the constraint begins with `+', then the operand is both read
1010 from and written to. */
1011 *is_inout = (*p == '+');
1013 /* Canonicalize the output constraint so that it begins with `='. */
1014 if (p != constraint || is_inout)
1016 char *buf;
1017 size_t c_len = strlen (constraint);
1019 if (p != constraint)
1020 warning ("output constraint `%c' for operand %d is not at the beginning",
1021 *p, operand_num);
1023 /* Make a copy of the constraint. */
1024 buf = alloca (c_len + 1);
1025 strcpy (buf, constraint);
1026 /* Swap the first character and the `=' or `+'. */
1027 buf[p - constraint] = buf[0];
1028 /* Make sure the first character is an `='. (Until we do this,
1029 it might be a `+'.) */
1030 buf[0] = '=';
1031 /* Replace the constraint with the canonicalized string. */
1032 *constraint_p = ggc_alloc_string (buf, c_len);
1033 constraint = *constraint_p;
1036 /* Loop through the constraint string. */
1037 for (p = constraint + 1; *p; p += CONSTRAINT_LEN (*p, p))
1038 switch (*p)
1040 case '+':
1041 case '=':
1042 error ("operand constraint contains incorrectly positioned '+' or '='");
1043 return false;
1045 case '%':
1046 if (operand_num + 1 == ninputs + noutputs)
1048 error ("`%%' constraint used with last operand");
1049 return false;
1051 break;
1053 case 'V': case 'm': case 'o':
1054 *allows_mem = true;
1055 break;
1057 case '?': case '!': case '*': case '&': case '#':
1058 case 'E': case 'F': case 'G': case 'H':
1059 case 's': case 'i': case 'n':
1060 case 'I': case 'J': case 'K': case 'L': case 'M':
1061 case 'N': case 'O': case 'P': case ',':
1062 break;
1064 case '0': case '1': case '2': case '3': case '4':
1065 case '5': case '6': case '7': case '8': case '9':
1066 case '[':
1067 error ("matching constraint not valid in output operand");
1068 return false;
1070 case '<': case '>':
1071 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
1072 excepting those that expand_call created. So match memory
1073 and hope. */
1074 *allows_mem = true;
1075 break;
1077 case 'g': case 'X':
1078 *allows_reg = true;
1079 *allows_mem = true;
1080 break;
1082 case 'p': case 'r':
1083 *allows_reg = true;
1084 break;
1086 default:
1087 if (!ISALPHA (*p))
1088 break;
1089 if (REG_CLASS_FROM_CONSTRAINT (*p, p) != NO_REGS)
1090 *allows_reg = true;
1091 #ifdef EXTRA_CONSTRAINT_STR
1092 else if (EXTRA_ADDRESS_CONSTRAINT (*p, p))
1093 *allows_reg = true;
1094 else if (EXTRA_MEMORY_CONSTRAINT (*p, p))
1095 *allows_mem = true;
1096 else
1098 /* Otherwise we can't assume anything about the nature of
1099 the constraint except that it isn't purely registers.
1100 Treat it like "g" and hope for the best. */
1101 *allows_reg = true;
1102 *allows_mem = true;
1104 #endif
1105 break;
1108 return true;
1111 /* Similar, but for input constraints. */
1113 bool
1114 parse_input_constraint (const char **constraint_p, int input_num,
1115 int ninputs, int noutputs, int ninout,
1116 const char * const * constraints,
1117 bool *allows_mem, bool *allows_reg)
1119 const char *constraint = *constraint_p;
1120 const char *orig_constraint = constraint;
1121 size_t c_len = strlen (constraint);
1122 size_t j;
1123 bool saw_match = false;
1125 /* Assume the constraint doesn't allow the use of either
1126 a register or memory. */
1127 *allows_mem = false;
1128 *allows_reg = false;
1130 /* Make sure constraint has neither `=', `+', nor '&'. */
1132 for (j = 0; j < c_len; j += CONSTRAINT_LEN (constraint[j], constraint+j))
1133 switch (constraint[j])
1135 case '+': case '=': case '&':
1136 if (constraint == orig_constraint)
1138 error ("input operand constraint contains `%c'", constraint[j]);
1139 return false;
1141 break;
1143 case '%':
1144 if (constraint == orig_constraint
1145 && input_num + 1 == ninputs - ninout)
1147 error ("`%%' constraint used with last operand");
1148 return false;
1150 break;
1152 case 'V': case 'm': case 'o':
1153 *allows_mem = true;
1154 break;
1156 case '<': case '>':
1157 case '?': case '!': case '*': case '#':
1158 case 'E': case 'F': case 'G': case 'H':
1159 case 's': case 'i': case 'n':
1160 case 'I': case 'J': case 'K': case 'L': case 'M':
1161 case 'N': case 'O': case 'P': case ',':
1162 break;
1164 /* Whether or not a numeric constraint allows a register is
1165 decided by the matching constraint, and so there is no need
1166 to do anything special with them. We must handle them in
1167 the default case, so that we don't unnecessarily force
1168 operands to memory. */
1169 case '0': case '1': case '2': case '3': case '4':
1170 case '5': case '6': case '7': case '8': case '9':
1172 char *end;
1173 unsigned long match;
1175 saw_match = true;
1177 match = strtoul (constraint + j, &end, 10);
1178 if (match >= (unsigned long) noutputs)
1180 error ("matching constraint references invalid operand number");
1181 return false;
1184 /* Try and find the real constraint for this dup. Only do this
1185 if the matching constraint is the only alternative. */
1186 if (*end == '\0'
1187 && (j == 0 || (j == 1 && constraint[0] == '%')))
1189 constraint = constraints[match];
1190 *constraint_p = constraint;
1191 c_len = strlen (constraint);
1192 j = 0;
1193 /* ??? At the end of the loop, we will skip the first part of
1194 the matched constraint. This assumes not only that the
1195 other constraint is an output constraint, but also that
1196 the '=' or '+' come first. */
1197 break;
1199 else
1200 j = end - constraint;
1201 /* Anticipate increment at end of loop. */
1202 j--;
1204 /* Fall through. */
1206 case 'p': case 'r':
1207 *allows_reg = true;
1208 break;
1210 case 'g': case 'X':
1211 *allows_reg = true;
1212 *allows_mem = true;
1213 break;
1215 default:
1216 if (! ISALPHA (constraint[j]))
1218 error ("invalid punctuation `%c' in constraint", constraint[j]);
1219 return false;
1221 if (REG_CLASS_FROM_CONSTRAINT (constraint[j], constraint + j)
1222 != NO_REGS)
1223 *allows_reg = true;
1224 #ifdef EXTRA_CONSTRAINT_STR
1225 else if (EXTRA_ADDRESS_CONSTRAINT (constraint[j], constraint + j))
1226 *allows_reg = true;
1227 else if (EXTRA_MEMORY_CONSTRAINT (constraint[j], constraint + j))
1228 *allows_mem = true;
1229 else
1231 /* Otherwise we can't assume anything about the nature of
1232 the constraint except that it isn't purely registers.
1233 Treat it like "g" and hope for the best. */
1234 *allows_reg = true;
1235 *allows_mem = true;
1237 #endif
1238 break;
1241 if (saw_match && !*allows_reg)
1242 warning ("matching constraint does not allow a register");
1244 return true;
1247 /* INPUT is one of the input operands from EXPR, an ASM_EXPR. Returns true
1248 if it is an operand which must be passed in memory (i.e. an "m"
1249 constraint), false otherwise. */
1251 bool
1252 asm_op_is_mem_input (tree input, tree expr)
1254 const char *constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (input)));
1255 tree outputs = ASM_OUTPUTS (expr);
1256 int noutputs = list_length (outputs);
1257 const char **constraints
1258 = (const char **) alloca ((noutputs) * sizeof (const char *));
1259 int i = 0;
1260 bool allows_mem, allows_reg;
1261 tree t;
1263 /* Collect output constraints. */
1264 for (t = outputs; t ; t = TREE_CHAIN (t), i++)
1265 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
1267 /* We pass 0 for input_num, ninputs and ninout; they are only used for
1268 error checking which will be done at expand time. */
1269 parse_input_constraint (&constraint, 0, 0, noutputs, 0, constraints,
1270 &allows_mem, &allows_reg);
1271 return (!allows_reg && allows_mem);
1274 /* Check for overlap between registers marked in CLOBBERED_REGS and
1275 anything inappropriate in DECL. Emit error and return TRUE for error,
1276 FALSE for ok. */
1278 static bool
1279 decl_conflicts_with_clobbers_p (tree decl, const HARD_REG_SET clobbered_regs)
1281 /* Conflicts between asm-declared register variables and the clobber
1282 list are not allowed. */
1283 if ((TREE_CODE (decl) == VAR_DECL || TREE_CODE (decl) == PARM_DECL)
1284 && DECL_REGISTER (decl)
1285 && REG_P (DECL_RTL (decl))
1286 && REGNO (DECL_RTL (decl)) < FIRST_PSEUDO_REGISTER)
1288 rtx reg = DECL_RTL (decl);
1289 unsigned int regno;
1291 for (regno = REGNO (reg);
1292 regno < (REGNO (reg)
1293 + hard_regno_nregs[REGNO (reg)][GET_MODE (reg)]);
1294 regno++)
1295 if (TEST_HARD_REG_BIT (clobbered_regs, regno))
1297 error ("asm-specifier for variable `%s' conflicts with asm clobber list",
1298 IDENTIFIER_POINTER (DECL_NAME (decl)));
1300 /* Reset registerness to stop multiple errors emitted for a
1301 single variable. */
1302 DECL_REGISTER (decl) = 0;
1303 return true;
1306 return false;
1309 /* Generate RTL for an asm statement with arguments.
1310 STRING is the instruction template.
1311 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
1312 Each output or input has an expression in the TREE_VALUE and
1313 and a tree list in TREE_PURPOSE which in turn contains a constraint
1314 name in TREE_VALUE (or NULL_TREE) and a constraint string
1315 in TREE_PURPOSE.
1316 CLOBBERS is a list of STRING_CST nodes each naming a hard register
1317 that is clobbered by this insn.
1319 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
1320 Some elements of OUTPUTS may be replaced with trees representing temporary
1321 values. The caller should copy those temporary values to the originally
1322 specified lvalues.
1324 VOL nonzero means the insn is volatile; don't optimize it. */
1326 void
1327 expand_asm_operands (tree string, tree outputs, tree inputs,
1328 tree clobbers, int vol, location_t locus)
1330 rtvec argvec, constraintvec;
1331 rtx body;
1332 int ninputs = list_length (inputs);
1333 int noutputs = list_length (outputs);
1334 int ninout;
1335 int nclobbers;
1336 HARD_REG_SET clobbered_regs;
1337 int clobber_conflict_found = 0;
1338 tree tail;
1339 tree t;
1340 int i;
1341 /* Vector of RTX's of evaluated output operands. */
1342 rtx *output_rtx = alloca (noutputs * sizeof (rtx));
1343 int *inout_opnum = alloca (noutputs * sizeof (int));
1344 rtx *real_output_rtx = alloca (noutputs * sizeof (rtx));
1345 enum machine_mode *inout_mode
1346 = alloca (noutputs * sizeof (enum machine_mode));
1347 const char **constraints
1348 = alloca ((noutputs + ninputs) * sizeof (const char *));
1349 int old_generating_concat_p = generating_concat_p;
1351 /* An ASM with no outputs needs to be treated as volatile, for now. */
1352 if (noutputs == 0)
1353 vol = 1;
1355 if (! check_operand_nalternatives (outputs, inputs))
1356 return;
1358 string = resolve_asm_operand_names (string, outputs, inputs);
1360 /* Collect constraints. */
1361 i = 0;
1362 for (t = outputs; t ; t = TREE_CHAIN (t), i++)
1363 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
1364 for (t = inputs; t ; t = TREE_CHAIN (t), i++)
1365 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
1367 /* Sometimes we wish to automatically clobber registers across an asm.
1368 Case in point is when the i386 backend moved from cc0 to a hard reg --
1369 maintaining source-level compatibility means automatically clobbering
1370 the flags register. */
1371 clobbers = targetm.md_asm_clobbers (clobbers);
1373 /* Count the number of meaningful clobbered registers, ignoring what
1374 we would ignore later. */
1375 nclobbers = 0;
1376 CLEAR_HARD_REG_SET (clobbered_regs);
1377 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1379 const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1381 i = decode_reg_name (regname);
1382 if (i >= 0 || i == -4)
1383 ++nclobbers;
1384 else if (i == -2)
1385 error ("unknown register name `%s' in `asm'", regname);
1387 /* Mark clobbered registers. */
1388 if (i >= 0)
1390 /* Clobbering the PIC register is an error */
1391 if (i == (int) PIC_OFFSET_TABLE_REGNUM)
1393 error ("PIC register `%s' clobbered in `asm'", regname);
1394 return;
1397 SET_HARD_REG_BIT (clobbered_regs, i);
1401 /* First pass over inputs and outputs checks validity and sets
1402 mark_addressable if needed. */
1404 ninout = 0;
1405 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1407 tree val = TREE_VALUE (tail);
1408 tree type = TREE_TYPE (val);
1409 const char *constraint;
1410 bool is_inout;
1411 bool allows_reg;
1412 bool allows_mem;
1414 /* If there's an erroneous arg, emit no insn. */
1415 if (type == error_mark_node)
1416 return;
1418 /* Try to parse the output constraint. If that fails, there's
1419 no point in going further. */
1420 constraint = constraints[i];
1421 if (!parse_output_constraint (&constraint, i, ninputs, noutputs,
1422 &allows_mem, &allows_reg, &is_inout))
1423 return;
1425 if (! allows_reg
1426 && (allows_mem
1427 || is_inout
1428 || (DECL_P (val)
1429 && REG_P (DECL_RTL (val))
1430 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type))))
1431 lang_hooks.mark_addressable (val);
1433 if (is_inout)
1434 ninout++;
1437 ninputs += ninout;
1438 if (ninputs + noutputs > MAX_RECOG_OPERANDS)
1440 error ("more than %d operands in `asm'", MAX_RECOG_OPERANDS);
1441 return;
1444 for (i = 0, tail = inputs; tail; i++, tail = TREE_CHAIN (tail))
1446 bool allows_reg, allows_mem;
1447 const char *constraint;
1449 /* If there's an erroneous arg, emit no insn, because the ASM_INPUT
1450 would get VOIDmode and that could cause a crash in reload. */
1451 if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
1452 return;
1454 constraint = constraints[i + noutputs];
1455 if (! parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
1456 constraints, &allows_mem, &allows_reg))
1457 return;
1459 if (! allows_reg && allows_mem)
1460 lang_hooks.mark_addressable (TREE_VALUE (tail));
1463 /* Second pass evaluates arguments. */
1465 ninout = 0;
1466 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1468 tree val = TREE_VALUE (tail);
1469 tree type = TREE_TYPE (val);
1470 bool is_inout;
1471 bool allows_reg;
1472 bool allows_mem;
1473 rtx op;
1475 if (!parse_output_constraint (&constraints[i], i, ninputs,
1476 noutputs, &allows_mem, &allows_reg,
1477 &is_inout))
1478 abort ();
1480 /* If an output operand is not a decl or indirect ref and our constraint
1481 allows a register, make a temporary to act as an intermediate.
1482 Make the asm insn write into that, then our caller will copy it to
1483 the real output operand. Likewise for promoted variables. */
1485 generating_concat_p = 0;
1487 real_output_rtx[i] = NULL_RTX;
1488 if ((TREE_CODE (val) == INDIRECT_REF
1489 && allows_mem)
1490 || (DECL_P (val)
1491 && (allows_mem || REG_P (DECL_RTL (val)))
1492 && ! (REG_P (DECL_RTL (val))
1493 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
1494 || ! allows_reg
1495 || is_inout)
1497 op = expand_expr (val, NULL_RTX, VOIDmode, EXPAND_WRITE);
1498 if (MEM_P (op))
1499 op = validize_mem (op);
1501 if (! allows_reg && !MEM_P (op))
1502 error ("output number %d not directly addressable", i);
1503 if ((! allows_mem && MEM_P (op))
1504 || GET_CODE (op) == CONCAT)
1506 real_output_rtx[i] = protect_from_queue (op, 1);
1507 op = gen_reg_rtx (GET_MODE (op));
1508 if (is_inout)
1509 emit_move_insn (op, real_output_rtx[i]);
1512 else
1514 op = assign_temp (type, 0, 0, 1);
1515 op = validize_mem (op);
1516 TREE_VALUE (tail) = make_tree (type, op);
1518 output_rtx[i] = op;
1520 generating_concat_p = old_generating_concat_p;
1522 if (is_inout)
1524 inout_mode[ninout] = TYPE_MODE (type);
1525 inout_opnum[ninout++] = i;
1528 if (decl_conflicts_with_clobbers_p (val, clobbered_regs))
1529 clobber_conflict_found = 1;
1532 /* Make vectors for the expression-rtx, constraint strings,
1533 and named operands. */
1535 argvec = rtvec_alloc (ninputs);
1536 constraintvec = rtvec_alloc (ninputs);
1538 body = gen_rtx_ASM_OPERANDS ((noutputs == 0 ? VOIDmode
1539 : GET_MODE (output_rtx[0])),
1540 TREE_STRING_POINTER (string),
1541 empty_string, 0, argvec, constraintvec,
1542 locus);
1544 MEM_VOLATILE_P (body) = vol;
1546 /* Eval the inputs and put them into ARGVEC.
1547 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
1549 for (i = 0, tail = inputs; tail; tail = TREE_CHAIN (tail), ++i)
1551 bool allows_reg, allows_mem;
1552 const char *constraint;
1553 tree val, type;
1554 rtx op;
1556 constraint = constraints[i + noutputs];
1557 if (! parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
1558 constraints, &allows_mem, &allows_reg))
1559 abort ();
1561 generating_concat_p = 0;
1563 val = TREE_VALUE (tail);
1564 type = TREE_TYPE (val);
1565 op = expand_expr (val, NULL_RTX, VOIDmode,
1566 (allows_mem && !allows_reg
1567 ? EXPAND_MEMORY : EXPAND_NORMAL));
1569 /* Never pass a CONCAT to an ASM. */
1570 if (GET_CODE (op) == CONCAT)
1571 op = force_reg (GET_MODE (op), op);
1572 else if (MEM_P (op))
1573 op = validize_mem (op);
1575 if (asm_operand_ok (op, constraint) <= 0)
1577 if (allows_reg)
1578 op = force_reg (TYPE_MODE (type), op);
1579 else if (!allows_mem)
1580 warning ("asm operand %d probably doesn't match constraints",
1581 i + noutputs);
1582 else if (MEM_P (op))
1584 /* We won't recognize either volatile memory or memory
1585 with a queued address as available a memory_operand
1586 at this point. Ignore it: clearly this *is* a memory. */
1588 else
1590 warning ("use of memory input without lvalue in "
1591 "asm operand %d is deprecated", i + noutputs);
1593 if (CONSTANT_P (op))
1595 rtx mem = force_const_mem (TYPE_MODE (type), op);
1596 if (mem)
1597 op = validize_mem (mem);
1598 else
1599 op = force_reg (TYPE_MODE (type), op);
1601 if (REG_P (op)
1602 || GET_CODE (op) == SUBREG
1603 || GET_CODE (op) == CONCAT)
1605 tree qual_type = build_qualified_type (type,
1606 (TYPE_QUALS (type)
1607 | TYPE_QUAL_CONST));
1608 rtx memloc = assign_temp (qual_type, 1, 1, 1);
1609 memloc = validize_mem (memloc);
1610 emit_move_insn (memloc, op);
1611 op = memloc;
1616 generating_concat_p = old_generating_concat_p;
1617 ASM_OPERANDS_INPUT (body, i) = op;
1619 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, i)
1620 = gen_rtx_ASM_INPUT (TYPE_MODE (type), constraints[i + noutputs]);
1622 if (decl_conflicts_with_clobbers_p (val, clobbered_regs))
1623 clobber_conflict_found = 1;
1626 /* Protect all the operands from the queue now that they have all been
1627 evaluated. */
1629 generating_concat_p = 0;
1631 for (i = 0; i < ninputs - ninout; i++)
1632 ASM_OPERANDS_INPUT (body, i)
1633 = protect_from_queue (ASM_OPERANDS_INPUT (body, i), 0);
1635 for (i = 0; i < noutputs; i++)
1636 output_rtx[i] = protect_from_queue (output_rtx[i], 1);
1638 /* For in-out operands, copy output rtx to input rtx. */
1639 for (i = 0; i < ninout; i++)
1641 int j = inout_opnum[i];
1642 char buffer[16];
1644 ASM_OPERANDS_INPUT (body, ninputs - ninout + i)
1645 = output_rtx[j];
1647 sprintf (buffer, "%d", j);
1648 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, ninputs - ninout + i)
1649 = gen_rtx_ASM_INPUT (inout_mode[i], ggc_strdup (buffer));
1652 generating_concat_p = old_generating_concat_p;
1654 /* Now, for each output, construct an rtx
1655 (set OUTPUT (asm_operands INSN OUTPUTCONSTRAINT OUTPUTNUMBER
1656 ARGVEC CONSTRAINTS OPNAMES))
1657 If there is more than one, put them inside a PARALLEL. */
1659 if (noutputs == 1 && nclobbers == 0)
1661 ASM_OPERANDS_OUTPUT_CONSTRAINT (body) = constraints[0];
1662 emit_insn (gen_rtx_SET (VOIDmode, output_rtx[0], body));
1665 else if (noutputs == 0 && nclobbers == 0)
1667 /* No output operands: put in a raw ASM_OPERANDS rtx. */
1668 emit_insn (body);
1671 else
1673 rtx obody = body;
1674 int num = noutputs;
1676 if (num == 0)
1677 num = 1;
1679 body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
1681 /* For each output operand, store a SET. */
1682 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1684 XVECEXP (body, 0, i)
1685 = gen_rtx_SET (VOIDmode,
1686 output_rtx[i],
1687 gen_rtx_ASM_OPERANDS
1688 (GET_MODE (output_rtx[i]),
1689 TREE_STRING_POINTER (string),
1690 constraints[i], i, argvec, constraintvec,
1691 locus));
1693 MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
1696 /* If there are no outputs (but there are some clobbers)
1697 store the bare ASM_OPERANDS into the PARALLEL. */
1699 if (i == 0)
1700 XVECEXP (body, 0, i++) = obody;
1702 /* Store (clobber REG) for each clobbered register specified. */
1704 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1706 const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1707 int j = decode_reg_name (regname);
1708 rtx clobbered_reg;
1710 if (j < 0)
1712 if (j == -3) /* `cc', which is not a register */
1713 continue;
1715 if (j == -4) /* `memory', don't cache memory across asm */
1717 XVECEXP (body, 0, i++)
1718 = gen_rtx_CLOBBER (VOIDmode,
1719 gen_rtx_MEM
1720 (BLKmode,
1721 gen_rtx_SCRATCH (VOIDmode)));
1722 continue;
1725 /* Ignore unknown register, error already signaled. */
1726 continue;
1729 /* Use QImode since that's guaranteed to clobber just one reg. */
1730 clobbered_reg = gen_rtx_REG (QImode, j);
1732 /* Do sanity check for overlap between clobbers and respectively
1733 input and outputs that hasn't been handled. Such overlap
1734 should have been detected and reported above. */
1735 if (!clobber_conflict_found)
1737 int opno;
1739 /* We test the old body (obody) contents to avoid tripping
1740 over the under-construction body. */
1741 for (opno = 0; opno < noutputs; opno++)
1742 if (reg_overlap_mentioned_p (clobbered_reg, output_rtx[opno]))
1743 internal_error ("asm clobber conflict with output operand");
1745 for (opno = 0; opno < ninputs - ninout; opno++)
1746 if (reg_overlap_mentioned_p (clobbered_reg,
1747 ASM_OPERANDS_INPUT (obody, opno)))
1748 internal_error ("asm clobber conflict with input operand");
1751 XVECEXP (body, 0, i++)
1752 = gen_rtx_CLOBBER (VOIDmode, clobbered_reg);
1755 emit_insn (body);
1758 /* For any outputs that needed reloading into registers, spill them
1759 back to where they belong. */
1760 for (i = 0; i < noutputs; ++i)
1761 if (real_output_rtx[i])
1762 emit_move_insn (real_output_rtx[i], output_rtx[i]);
1764 free_temp_slots ();
1767 void
1768 expand_asm_expr (tree exp)
1770 int noutputs, i;
1771 tree outputs, tail;
1772 tree *o;
1774 if (ASM_INPUT_P (exp))
1776 expand_asm (ASM_STRING (exp), ASM_VOLATILE_P (exp));
1777 return;
1780 outputs = ASM_OUTPUTS (exp);
1781 noutputs = list_length (outputs);
1782 /* o[I] is the place that output number I should be written. */
1783 o = (tree *) alloca (noutputs * sizeof (tree));
1785 /* Record the contents of OUTPUTS before it is modified. */
1786 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1787 o[i] = TREE_VALUE (tail);
1789 /* Generate the ASM_OPERANDS insn; store into the TREE_VALUEs of
1790 OUTPUTS some trees for where the values were actually stored. */
1791 expand_asm_operands (ASM_STRING (exp), outputs, ASM_INPUTS (exp),
1792 ASM_CLOBBERS (exp), ASM_VOLATILE_P (exp),
1793 input_location);
1795 /* Copy all the intermediate outputs into the specified outputs. */
1796 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1798 if (o[i] != TREE_VALUE (tail))
1800 expand_assignment (o[i], TREE_VALUE (tail), 0);
1801 free_temp_slots ();
1803 /* Restore the original value so that it's correct the next
1804 time we expand this function. */
1805 TREE_VALUE (tail) = o[i];
1809 /* Those MODIFY_EXPRs could do autoincrements. */
1810 emit_queue ();
1813 /* A subroutine of expand_asm_operands. Check that all operands have
1814 the same number of alternatives. Return true if so. */
1816 static bool
1817 check_operand_nalternatives (tree outputs, tree inputs)
1819 if (outputs || inputs)
1821 tree tmp = TREE_PURPOSE (outputs ? outputs : inputs);
1822 int nalternatives
1823 = n_occurrences (',', TREE_STRING_POINTER (TREE_VALUE (tmp)));
1824 tree next = inputs;
1826 if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
1828 error ("too many alternatives in `asm'");
1829 return false;
1832 tmp = outputs;
1833 while (tmp)
1835 const char *constraint
1836 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (tmp)));
1838 if (n_occurrences (',', constraint) != nalternatives)
1840 error ("operand constraints for `asm' differ in number of alternatives");
1841 return false;
1844 if (TREE_CHAIN (tmp))
1845 tmp = TREE_CHAIN (tmp);
1846 else
1847 tmp = next, next = 0;
1851 return true;
1854 /* A subroutine of expand_asm_operands. Check that all operand names
1855 are unique. Return true if so. We rely on the fact that these names
1856 are identifiers, and so have been canonicalized by get_identifier,
1857 so all we need are pointer comparisons. */
1859 static bool
1860 check_unique_operand_names (tree outputs, tree inputs)
1862 tree i, j;
1864 for (i = outputs; i ; i = TREE_CHAIN (i))
1866 tree i_name = TREE_PURPOSE (TREE_PURPOSE (i));
1867 if (! i_name)
1868 continue;
1870 for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
1871 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1872 goto failure;
1875 for (i = inputs; i ; i = TREE_CHAIN (i))
1877 tree i_name = TREE_PURPOSE (TREE_PURPOSE (i));
1878 if (! i_name)
1879 continue;
1881 for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
1882 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1883 goto failure;
1884 for (j = outputs; j ; j = TREE_CHAIN (j))
1885 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1886 goto failure;
1889 return true;
1891 failure:
1892 error ("duplicate asm operand name '%s'",
1893 TREE_STRING_POINTER (TREE_PURPOSE (TREE_PURPOSE (i))));
1894 return false;
1897 /* A subroutine of expand_asm_operands. Resolve the names of the operands
1898 in *POUTPUTS and *PINPUTS to numbers, and replace the name expansions in
1899 STRING and in the constraints to those numbers. */
1901 tree
1902 resolve_asm_operand_names (tree string, tree outputs, tree inputs)
1904 char *buffer;
1905 char *p;
1906 const char *c;
1907 tree t;
1909 check_unique_operand_names (outputs, inputs);
1911 /* Substitute [<name>] in input constraint strings. There should be no
1912 named operands in output constraints. */
1913 for (t = inputs; t ; t = TREE_CHAIN (t))
1915 c = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
1916 if (strchr (c, '[') != NULL)
1918 p = buffer = xstrdup (c);
1919 while ((p = strchr (p, '[')) != NULL)
1920 p = resolve_operand_name_1 (p, outputs, inputs);
1921 TREE_VALUE (TREE_PURPOSE (t))
1922 = build_string (strlen (buffer), buffer);
1923 free (buffer);
1927 /* Now check for any needed substitutions in the template. */
1928 c = TREE_STRING_POINTER (string);
1929 while ((c = strchr (c, '%')) != NULL)
1931 if (c[1] == '[')
1932 break;
1933 else if (ISALPHA (c[1]) && c[2] == '[')
1934 break;
1935 else
1937 c += 1;
1938 continue;
1942 if (c)
1944 /* OK, we need to make a copy so we can perform the substitutions.
1945 Assume that we will not need extra space--we get to remove '['
1946 and ']', which means we cannot have a problem until we have more
1947 than 999 operands. */
1948 buffer = xstrdup (TREE_STRING_POINTER (string));
1949 p = buffer + (c - TREE_STRING_POINTER (string));
1951 while ((p = strchr (p, '%')) != NULL)
1953 if (p[1] == '[')
1954 p += 1;
1955 else if (ISALPHA (p[1]) && p[2] == '[')
1956 p += 2;
1957 else
1959 p += 1;
1960 continue;
1963 p = resolve_operand_name_1 (p, outputs, inputs);
1966 string = build_string (strlen (buffer), buffer);
1967 free (buffer);
1970 return string;
1973 /* A subroutine of resolve_operand_names. P points to the '[' for a
1974 potential named operand of the form [<name>]. In place, replace
1975 the name and brackets with a number. Return a pointer to the
1976 balance of the string after substitution. */
1978 static char *
1979 resolve_operand_name_1 (char *p, tree outputs, tree inputs)
1981 char *q;
1982 int op;
1983 tree t;
1984 size_t len;
1986 /* Collect the operand name. */
1987 q = strchr (p, ']');
1988 if (!q)
1990 error ("missing close brace for named operand");
1991 return strchr (p, '\0');
1993 len = q - p - 1;
1995 /* Resolve the name to a number. */
1996 for (op = 0, t = outputs; t ; t = TREE_CHAIN (t), op++)
1998 tree name = TREE_PURPOSE (TREE_PURPOSE (t));
1999 if (name)
2001 const char *c = TREE_STRING_POINTER (name);
2002 if (strncmp (c, p + 1, len) == 0 && c[len] == '\0')
2003 goto found;
2006 for (t = inputs; t ; t = TREE_CHAIN (t), op++)
2008 tree name = TREE_PURPOSE (TREE_PURPOSE (t));
2009 if (name)
2011 const char *c = TREE_STRING_POINTER (name);
2012 if (strncmp (c, p + 1, len) == 0 && c[len] == '\0')
2013 goto found;
2017 *q = '\0';
2018 error ("undefined named operand '%s'", p + 1);
2019 op = 0;
2020 found:
2022 /* Replace the name with the number. Unfortunately, not all libraries
2023 get the return value of sprintf correct, so search for the end of the
2024 generated string by hand. */
2025 sprintf (p, "%d", op);
2026 p = strchr (p, '\0');
2028 /* Verify the no extra buffer space assumption. */
2029 if (p > q)
2030 abort ();
2032 /* Shift the rest of the buffer down to fill the gap. */
2033 memmove (p, q + 1, strlen (q + 1) + 1);
2035 return p;
2038 /* Generate RTL to evaluate the expression EXP. */
2040 void
2041 expand_expr_stmt (tree exp)
2043 rtx value;
2044 tree type;
2046 value = expand_expr (exp, const0_rtx, VOIDmode, 0);
2047 type = TREE_TYPE (exp);
2049 /* If all we do is reference a volatile value in memory,
2050 copy it to a register to be sure it is actually touched. */
2051 if (value && MEM_P (value) && TREE_THIS_VOLATILE (exp))
2053 if (TYPE_MODE (type) == VOIDmode)
2055 else if (TYPE_MODE (type) != BLKmode)
2056 value = copy_to_reg (value);
2057 else
2059 rtx lab = gen_label_rtx ();
2061 /* Compare the value with itself to reference it. */
2062 emit_cmp_and_jump_insns (value, value, EQ,
2063 expand_expr (TYPE_SIZE (type),
2064 NULL_RTX, VOIDmode, 0),
2065 BLKmode, 0, lab);
2066 emit_label (lab);
2070 /* Free any temporaries used to evaluate this expression. */
2071 free_temp_slots ();
2073 emit_queue ();
2076 /* Warn if EXP contains any computations whose results are not used.
2077 Return 1 if a warning is printed; 0 otherwise. LOCUS is the
2078 (potential) location of the expression. */
2081 warn_if_unused_value (tree exp, location_t locus)
2083 restart:
2084 if (TREE_USED (exp))
2085 return 0;
2087 /* Don't warn about void constructs. This includes casting to void,
2088 void function calls, and statement expressions with a final cast
2089 to void. */
2090 if (VOID_TYPE_P (TREE_TYPE (exp)))
2091 return 0;
2093 if (EXPR_LOCUS (exp))
2094 locus = *EXPR_LOCUS (exp);
2096 switch (TREE_CODE (exp))
2098 case PREINCREMENT_EXPR:
2099 case POSTINCREMENT_EXPR:
2100 case PREDECREMENT_EXPR:
2101 case POSTDECREMENT_EXPR:
2102 case MODIFY_EXPR:
2103 case INIT_EXPR:
2104 case TARGET_EXPR:
2105 case CALL_EXPR:
2106 case TRY_CATCH_EXPR:
2107 case WITH_CLEANUP_EXPR:
2108 case EXIT_EXPR:
2109 return 0;
2111 case BIND_EXPR:
2112 /* For a binding, warn if no side effect within it. */
2113 exp = BIND_EXPR_BODY (exp);
2114 goto restart;
2116 case SAVE_EXPR:
2117 exp = TREE_OPERAND (exp, 0);
2118 goto restart;
2120 case TRUTH_ORIF_EXPR:
2121 case TRUTH_ANDIF_EXPR:
2122 /* In && or ||, warn if 2nd operand has no side effect. */
2123 exp = TREE_OPERAND (exp, 1);
2124 goto restart;
2126 case COMPOUND_EXPR:
2127 if (TREE_NO_WARNING (exp))
2128 return 0;
2129 if (warn_if_unused_value (TREE_OPERAND (exp, 0), locus))
2130 return 1;
2131 /* Let people do `(foo (), 0)' without a warning. */
2132 if (TREE_CONSTANT (TREE_OPERAND (exp, 1)))
2133 return 0;
2134 exp = TREE_OPERAND (exp, 1);
2135 goto restart;
2137 case NOP_EXPR:
2138 case CONVERT_EXPR:
2139 case NON_LVALUE_EXPR:
2140 /* Don't warn about conversions not explicit in the user's program. */
2141 if (TREE_NO_WARNING (exp))
2142 return 0;
2143 /* Assignment to a cast usually results in a cast of a modify.
2144 Don't complain about that. There can be an arbitrary number of
2145 casts before the modify, so we must loop until we find the first
2146 non-cast expression and then test to see if that is a modify. */
2148 tree tem = TREE_OPERAND (exp, 0);
2150 while (TREE_CODE (tem) == CONVERT_EXPR || TREE_CODE (tem) == NOP_EXPR)
2151 tem = TREE_OPERAND (tem, 0);
2153 if (TREE_CODE (tem) == MODIFY_EXPR || TREE_CODE (tem) == INIT_EXPR
2154 || TREE_CODE (tem) == CALL_EXPR)
2155 return 0;
2157 goto maybe_warn;
2159 case INDIRECT_REF:
2160 /* Don't warn about automatic dereferencing of references, since
2161 the user cannot control it. */
2162 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == REFERENCE_TYPE)
2164 exp = TREE_OPERAND (exp, 0);
2165 goto restart;
2167 /* Fall through. */
2169 default:
2170 /* Referencing a volatile value is a side effect, so don't warn. */
2171 if ((DECL_P (exp)
2172 || TREE_CODE_CLASS (TREE_CODE (exp)) == 'r')
2173 && TREE_THIS_VOLATILE (exp))
2174 return 0;
2176 /* If this is an expression which has no operands, there is no value
2177 to be unused. There are no such language-independent codes,
2178 but front ends may define such. */
2179 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'e'
2180 && TREE_CODE_LENGTH (TREE_CODE (exp)) == 0)
2181 return 0;
2183 maybe_warn:
2184 /* If this is an expression with side effects, don't warn. */
2185 if (TREE_SIDE_EFFECTS (exp))
2186 return 0;
2188 warning ("%Hvalue computed is not used", &locus);
2189 return 1;
2193 /* Generate RTL for the start of an if-then. COND is the expression
2194 whose truth should be tested.
2196 If EXITFLAG is nonzero, this conditional is visible to
2197 `exit_something'. */
2199 void
2200 expand_start_cond (tree cond, int exitflag)
2202 struct nesting *thiscond = ALLOC_NESTING ();
2204 /* Make an entry on cond_stack for the cond we are entering. */
2206 thiscond->desc = COND_NESTING;
2207 thiscond->next = cond_stack;
2208 thiscond->all = nesting_stack;
2209 thiscond->depth = ++nesting_depth;
2210 thiscond->data.cond.next_label = gen_label_rtx ();
2211 /* Before we encounter an `else', we don't need a separate exit label
2212 unless there are supposed to be exit statements
2213 to exit this conditional. */
2214 thiscond->exit_label = exitflag ? gen_label_rtx () : 0;
2215 thiscond->data.cond.endif_label = thiscond->exit_label;
2216 cond_stack = thiscond;
2217 nesting_stack = thiscond;
2219 do_jump (cond, thiscond->data.cond.next_label, NULL_RTX);
2222 /* Generate RTL between then-clause and the elseif-clause
2223 of an if-then-elseif-.... */
2225 void
2226 expand_start_elseif (tree cond)
2228 if (cond_stack->data.cond.endif_label == 0)
2229 cond_stack->data.cond.endif_label = gen_label_rtx ();
2230 emit_jump (cond_stack->data.cond.endif_label);
2231 emit_label (cond_stack->data.cond.next_label);
2232 cond_stack->data.cond.next_label = gen_label_rtx ();
2233 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
2236 /* Generate RTL between the then-clause and the else-clause
2237 of an if-then-else. */
2239 void
2240 expand_start_else (void)
2242 if (cond_stack->data.cond.endif_label == 0)
2243 cond_stack->data.cond.endif_label = gen_label_rtx ();
2245 emit_jump (cond_stack->data.cond.endif_label);
2246 emit_label (cond_stack->data.cond.next_label);
2247 cond_stack->data.cond.next_label = 0; /* No more _else or _elseif calls. */
2250 /* After calling expand_start_else, turn this "else" into an "else if"
2251 by providing another condition. */
2253 void
2254 expand_elseif (tree cond)
2256 cond_stack->data.cond.next_label = gen_label_rtx ();
2257 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
2260 /* Generate RTL for the end of an if-then.
2261 Pop the record for it off of cond_stack. */
2263 void
2264 expand_end_cond (void)
2266 struct nesting *thiscond = cond_stack;
2268 do_pending_stack_adjust ();
2269 if (thiscond->data.cond.next_label)
2270 emit_label (thiscond->data.cond.next_label);
2271 if (thiscond->data.cond.endif_label)
2272 emit_label (thiscond->data.cond.endif_label);
2274 POPSTACK (cond_stack);
2277 /* Return nonzero if we should preserve sub-expressions as separate
2278 pseudos. We never do so if we aren't optimizing. We always do so
2279 if -fexpensive-optimizations. */
2282 preserve_subexpressions_p (void)
2284 if (flag_expensive_optimizations)
2285 return 1;
2287 if (optimize == 0 || cfun == 0 || cfun->stmt == 0)
2288 return 0;
2290 return 1;
2294 /* Generate RTL to return from the current function, with no value.
2295 (That is, we do not do anything about returning any value.) */
2297 void
2298 expand_null_return (void)
2300 rtx last_insn;
2302 last_insn = get_last_insn ();
2304 /* If this function was declared to return a value, but we
2305 didn't, clobber the return registers so that they are not
2306 propagated live to the rest of the function. */
2307 clobber_return_register ();
2309 expand_null_return_1 (last_insn);
2312 /* Generate RTL to return directly from the current function.
2313 (That is, we bypass any return value.) */
2315 void
2316 expand_naked_return (void)
2318 rtx last_insn, end_label;
2320 last_insn = get_last_insn ();
2321 end_label = naked_return_label;
2323 clear_pending_stack_adjust ();
2324 do_pending_stack_adjust ();
2326 if (end_label == 0)
2327 end_label = naked_return_label = gen_label_rtx ();
2328 expand_goto_internal (NULL_TREE, end_label, last_insn);
2331 /* Try to guess whether the value of return means error code. */
2332 static enum br_predictor
2333 return_prediction (rtx val)
2335 /* Different heuristics for pointers and scalars. */
2336 if (POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (current_function_decl))))
2338 /* NULL is usually not returned. */
2339 if (val == const0_rtx)
2340 return PRED_NULL_RETURN;
2342 else
2344 /* Negative return values are often used to indicate
2345 errors. */
2346 if (GET_CODE (val) == CONST_INT
2347 && INTVAL (val) < 0)
2348 return PRED_NEGATIVE_RETURN;
2349 /* Constant return values are also usually erors,
2350 zero/one often mean booleans so exclude them from the
2351 heuristics. */
2352 if (CONSTANT_P (val)
2353 && (val != const0_rtx && val != const1_rtx))
2354 return PRED_CONST_RETURN;
2356 return PRED_NO_PREDICTION;
2360 /* If the current function returns values in the most significant part
2361 of a register, shift return value VAL appropriately. The mode of
2362 the function's return type is known not to be BLKmode. */
2364 static rtx
2365 shift_return_value (rtx val)
2367 tree type;
2369 type = TREE_TYPE (DECL_RESULT (current_function_decl));
2370 if (targetm.calls.return_in_msb (type))
2372 rtx target;
2373 HOST_WIDE_INT shift;
2375 target = DECL_RTL (DECL_RESULT (current_function_decl));
2376 shift = (GET_MODE_BITSIZE (GET_MODE (target))
2377 - BITS_PER_UNIT * int_size_in_bytes (type));
2378 if (shift > 0)
2379 val = expand_shift (LSHIFT_EXPR, GET_MODE (target),
2380 gen_lowpart (GET_MODE (target), val),
2381 build_int_2 (shift, 0), target, 1);
2383 return val;
2387 /* Generate RTL to return from the current function, with value VAL. */
2389 static void
2390 expand_value_return (rtx val)
2392 rtx last_insn;
2393 rtx return_reg;
2394 enum br_predictor pred;
2396 if (flag_guess_branch_prob
2397 && (pred = return_prediction (val)) != PRED_NO_PREDICTION)
2399 /* Emit information for branch prediction. */
2400 rtx note;
2402 note = emit_note (NOTE_INSN_PREDICTION);
2404 NOTE_PREDICTION (note) = NOTE_PREDICT (pred, NOT_TAKEN);
2408 last_insn = get_last_insn ();
2409 return_reg = DECL_RTL (DECL_RESULT (current_function_decl));
2411 /* Copy the value to the return location
2412 unless it's already there. */
2414 if (return_reg != val)
2416 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
2417 if (targetm.calls.promote_function_return (TREE_TYPE (current_function_decl)))
2419 int unsignedp = TYPE_UNSIGNED (type);
2420 enum machine_mode old_mode
2421 = DECL_MODE (DECL_RESULT (current_function_decl));
2422 enum machine_mode mode
2423 = promote_mode (type, old_mode, &unsignedp, 1);
2425 if (mode != old_mode)
2426 val = convert_modes (mode, old_mode, val, unsignedp);
2428 if (GET_CODE (return_reg) == PARALLEL)
2429 emit_group_load (return_reg, val, type, int_size_in_bytes (type));
2430 else
2431 emit_move_insn (return_reg, val);
2434 expand_null_return_1 (last_insn);
2437 /* Output a return with no value. If LAST_INSN is nonzero,
2438 pretend that the return takes place after LAST_INSN. */
2440 static void
2441 expand_null_return_1 (rtx last_insn)
2443 rtx end_label = return_label;
2445 clear_pending_stack_adjust ();
2446 do_pending_stack_adjust ();
2448 if (end_label == 0)
2449 end_label = return_label = gen_label_rtx ();
2450 expand_goto_internal (NULL_TREE, end_label, last_insn);
2453 /* Generate RTL to evaluate the expression RETVAL and return it
2454 from the current function. */
2456 void
2457 expand_return (tree retval)
2459 /* If there are any cleanups to be performed, then they will
2460 be inserted following LAST_INSN. It is desirable
2461 that the last_insn, for such purposes, should be the
2462 last insn before computing the return value. Otherwise, cleanups
2463 which call functions can clobber the return value. */
2464 /* ??? rms: I think that is erroneous, because in C++ it would
2465 run destructors on variables that might be used in the subsequent
2466 computation of the return value. */
2467 rtx last_insn = 0;
2468 rtx result_rtl;
2469 rtx val = 0;
2470 tree retval_rhs;
2472 /* If function wants no value, give it none. */
2473 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
2475 expand_expr (retval, NULL_RTX, VOIDmode, 0);
2476 emit_queue ();
2477 expand_null_return ();
2478 return;
2481 if (retval == error_mark_node)
2483 /* Treat this like a return of no value from a function that
2484 returns a value. */
2485 expand_null_return ();
2486 return;
2488 else if (TREE_CODE (retval) == RESULT_DECL)
2489 retval_rhs = retval;
2490 else if ((TREE_CODE (retval) == MODIFY_EXPR || TREE_CODE (retval) == INIT_EXPR)
2491 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
2492 retval_rhs = TREE_OPERAND (retval, 1);
2493 else
2494 retval_rhs = retval;
2496 last_insn = get_last_insn ();
2498 result_rtl = DECL_RTL (DECL_RESULT (current_function_decl));
2500 /* If the result is an aggregate that is being returned in one (or more)
2501 registers, load the registers here. The compiler currently can't handle
2502 copying a BLKmode value into registers. We could put this code in a
2503 more general area (for use by everyone instead of just function
2504 call/return), but until this feature is generally usable it is kept here
2505 (and in expand_call). The value must go into a pseudo in case there
2506 are cleanups that will clobber the real return register. */
2508 if (retval_rhs != 0
2509 && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
2510 && REG_P (result_rtl))
2512 int i;
2513 unsigned HOST_WIDE_INT bitpos, xbitpos;
2514 unsigned HOST_WIDE_INT padding_correction = 0;
2515 unsigned HOST_WIDE_INT bytes
2516 = int_size_in_bytes (TREE_TYPE (retval_rhs));
2517 int n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
2518 unsigned int bitsize
2519 = MIN (TYPE_ALIGN (TREE_TYPE (retval_rhs)), BITS_PER_WORD);
2520 rtx *result_pseudos = alloca (sizeof (rtx) * n_regs);
2521 rtx result_reg, src = NULL_RTX, dst = NULL_RTX;
2522 rtx result_val = expand_expr (retval_rhs, NULL_RTX, VOIDmode, 0);
2523 enum machine_mode tmpmode, result_reg_mode;
2525 if (bytes == 0)
2527 expand_null_return ();
2528 return;
2531 /* If the structure doesn't take up a whole number of words, see
2532 whether the register value should be padded on the left or on
2533 the right. Set PADDING_CORRECTION to the number of padding
2534 bits needed on the left side.
2536 In most ABIs, the structure will be returned at the least end of
2537 the register, which translates to right padding on little-endian
2538 targets and left padding on big-endian targets. The opposite
2539 holds if the structure is returned at the most significant
2540 end of the register. */
2541 if (bytes % UNITS_PER_WORD != 0
2542 && (targetm.calls.return_in_msb (TREE_TYPE (retval_rhs))
2543 ? !BYTES_BIG_ENDIAN
2544 : BYTES_BIG_ENDIAN))
2545 padding_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2546 * BITS_PER_UNIT));
2548 /* Copy the structure BITSIZE bits at a time. */
2549 for (bitpos = 0, xbitpos = padding_correction;
2550 bitpos < bytes * BITS_PER_UNIT;
2551 bitpos += bitsize, xbitpos += bitsize)
2553 /* We need a new destination pseudo each time xbitpos is
2554 on a word boundary and when xbitpos == padding_correction
2555 (the first time through). */
2556 if (xbitpos % BITS_PER_WORD == 0
2557 || xbitpos == padding_correction)
2559 /* Generate an appropriate register. */
2560 dst = gen_reg_rtx (word_mode);
2561 result_pseudos[xbitpos / BITS_PER_WORD] = dst;
2563 /* Clear the destination before we move anything into it. */
2564 emit_move_insn (dst, CONST0_RTX (GET_MODE (dst)));
2567 /* We need a new source operand each time bitpos is on a word
2568 boundary. */
2569 if (bitpos % BITS_PER_WORD == 0)
2570 src = operand_subword_force (result_val,
2571 bitpos / BITS_PER_WORD,
2572 BLKmode);
2574 /* Use bitpos for the source extraction (left justified) and
2575 xbitpos for the destination store (right justified). */
2576 store_bit_field (dst, bitsize, xbitpos % BITS_PER_WORD, word_mode,
2577 extract_bit_field (src, bitsize,
2578 bitpos % BITS_PER_WORD, 1,
2579 NULL_RTX, word_mode, word_mode,
2580 BITS_PER_WORD),
2581 BITS_PER_WORD);
2584 tmpmode = GET_MODE (result_rtl);
2585 if (tmpmode == BLKmode)
2587 /* Find the smallest integer mode large enough to hold the
2588 entire structure and use that mode instead of BLKmode
2589 on the USE insn for the return register. */
2590 for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2591 tmpmode != VOIDmode;
2592 tmpmode = GET_MODE_WIDER_MODE (tmpmode))
2593 /* Have we found a large enough mode? */
2594 if (GET_MODE_SIZE (tmpmode) >= bytes)
2595 break;
2597 /* No suitable mode found. */
2598 if (tmpmode == VOIDmode)
2599 abort ();
2601 PUT_MODE (result_rtl, tmpmode);
2604 if (GET_MODE_SIZE (tmpmode) < GET_MODE_SIZE (word_mode))
2605 result_reg_mode = word_mode;
2606 else
2607 result_reg_mode = tmpmode;
2608 result_reg = gen_reg_rtx (result_reg_mode);
2610 emit_queue ();
2611 for (i = 0; i < n_regs; i++)
2612 emit_move_insn (operand_subword (result_reg, i, 0, result_reg_mode),
2613 result_pseudos[i]);
2615 if (tmpmode != result_reg_mode)
2616 result_reg = gen_lowpart (tmpmode, result_reg);
2618 expand_value_return (result_reg);
2620 else if (retval_rhs != 0
2621 && !VOID_TYPE_P (TREE_TYPE (retval_rhs))
2622 && (REG_P (result_rtl)
2623 || (GET_CODE (result_rtl) == PARALLEL)))
2625 /* Calculate the return value into a temporary (usually a pseudo
2626 reg). */
2627 tree ot = TREE_TYPE (DECL_RESULT (current_function_decl));
2628 tree nt = build_qualified_type (ot, TYPE_QUALS (ot) | TYPE_QUAL_CONST);
2630 val = assign_temp (nt, 0, 0, 1);
2631 val = expand_expr (retval_rhs, val, GET_MODE (val), 0);
2632 val = force_not_mem (val);
2633 emit_queue ();
2634 /* Return the calculated value, doing cleanups first. */
2635 expand_value_return (shift_return_value (val));
2637 else
2639 /* No cleanups or no hard reg used;
2640 calculate value into hard return reg. */
2641 expand_expr (retval, const0_rtx, VOIDmode, 0);
2642 emit_queue ();
2643 expand_value_return (result_rtl);
2647 /* Generate the RTL code for entering a binding contour.
2648 The variables are declared one by one, by calls to `expand_decl'.
2650 FLAGS is a bitwise or of the following flags:
2652 1 - Nonzero if this construct should be visible to
2653 `exit_something'.
2655 2 - Nonzero if this contour does not require a
2656 NOTE_INSN_BLOCK_BEG note. Virtually all calls from
2657 language-independent code should set this flag because they
2658 will not create corresponding BLOCK nodes. (There should be
2659 a one-to-one correspondence between NOTE_INSN_BLOCK_BEG notes
2660 and BLOCKs.) If this flag is set, MARK_ENDS should be zero
2661 when expand_end_bindings is called.
2663 If we are creating a NOTE_INSN_BLOCK_BEG note, a BLOCK may
2664 optionally be supplied. If so, it becomes the NOTE_BLOCK for the
2665 note. */
2667 void
2668 expand_start_bindings_and_block (int flags, tree block)
2670 struct nesting *thisblock = ALLOC_NESTING ();
2671 rtx note;
2672 int exit_flag = ((flags & 1) != 0);
2673 int block_flag = ((flags & 2) == 0);
2675 /* If a BLOCK is supplied, then the caller should be requesting a
2676 NOTE_INSN_BLOCK_BEG note. */
2677 if (!block_flag && block)
2678 abort ();
2680 /* Create a note to mark the beginning of the block. */
2681 note = emit_note (NOTE_INSN_DELETED);
2683 /* Make an entry on block_stack for the block we are entering. */
2685 thisblock->desc = BLOCK_NESTING;
2686 thisblock->next = block_stack;
2687 thisblock->all = nesting_stack;
2688 thisblock->depth = ++nesting_depth;
2689 thisblock->data.block.stack_level = 0;
2690 thisblock->data.block.cleanups = 0;
2691 thisblock->data.block.exception_region = 0;
2692 thisblock->data.block.block_target_temp_slot_level = target_temp_slot_level;
2694 thisblock->data.block.conditional_code = 0;
2695 thisblock->data.block.last_unconditional_cleanup = note;
2696 /* When we insert instructions after the last unconditional cleanup,
2697 we don't adjust last_insn. That means that a later add_insn will
2698 clobber the instructions we've just added. The easiest way to
2699 fix this is to just insert another instruction here, so that the
2700 instructions inserted after the last unconditional cleanup are
2701 never the last instruction. */
2702 emit_note (NOTE_INSN_DELETED);
2704 if (block_stack
2705 && !(block_stack->data.block.cleanups == NULL_TREE
2706 && block_stack->data.block.outer_cleanups == NULL_TREE))
2707 thisblock->data.block.outer_cleanups
2708 = tree_cons (NULL_TREE, block_stack->data.block.cleanups,
2709 block_stack->data.block.outer_cleanups);
2710 else
2711 thisblock->data.block.outer_cleanups = 0;
2712 thisblock->data.block.label_chain = 0;
2713 thisblock->data.block.innermost_stack_block = stack_block_stack;
2714 thisblock->data.block.first_insn = note;
2715 thisblock->data.block.block_start_count = ++current_block_start_count;
2716 thisblock->exit_label = exit_flag ? gen_label_rtx () : 0;
2717 block_stack = thisblock;
2718 nesting_stack = thisblock;
2720 /* Make a new level for allocating stack slots. */
2721 push_temp_slots ();
2724 /* Specify the scope of temporaries created by TARGET_EXPRs. Similar
2725 to CLEANUP_POINT_EXPR, but handles cases when a series of calls to
2726 expand_expr are made. After we end the region, we know that all
2727 space for all temporaries that were created by TARGET_EXPRs will be
2728 destroyed and their space freed for reuse. */
2730 void
2731 expand_start_target_temps (void)
2733 /* This is so that even if the result is preserved, the space
2734 allocated will be freed, as we know that it is no longer in use. */
2735 push_temp_slots ();
2737 /* Start a new binding layer that will keep track of all cleanup
2738 actions to be performed. */
2739 expand_start_bindings (2);
2741 target_temp_slot_level = temp_slot_level;
2744 void
2745 expand_end_target_temps (void)
2747 expand_end_bindings (NULL_TREE, 0, 0);
2749 /* This is so that even if the result is preserved, the space
2750 allocated will be freed, as we know that it is no longer in use. */
2751 pop_temp_slots ();
2754 /* Given a pointer to a BLOCK node return nonzero if (and only if) the node
2755 in question represents the outermost pair of curly braces (i.e. the "body
2756 block") of a function or method.
2758 For any BLOCK node representing a "body block" of a function or method, the
2759 BLOCK_SUPERCONTEXT of the node will point to another BLOCK node which
2760 represents the outermost (function) scope for the function or method (i.e.
2761 the one which includes the formal parameters). The BLOCK_SUPERCONTEXT of
2762 *that* node in turn will point to the relevant FUNCTION_DECL node. */
2765 is_body_block (tree stmt)
2767 if (lang_hooks.no_body_blocks)
2768 return 0;
2770 if (TREE_CODE (stmt) == BLOCK)
2772 tree parent = BLOCK_SUPERCONTEXT (stmt);
2774 if (parent && TREE_CODE (parent) == BLOCK)
2776 tree grandparent = BLOCK_SUPERCONTEXT (parent);
2778 if (grandparent && TREE_CODE (grandparent) == FUNCTION_DECL)
2779 return 1;
2783 return 0;
2786 /* True if we are currently emitting insns in an area of output code
2787 that is controlled by a conditional expression. This is used by
2788 the cleanup handling code to generate conditional cleanup actions. */
2791 conditional_context (void)
2793 return block_stack && block_stack->data.block.conditional_code;
2796 /* Return an opaque pointer to the current nesting level, so frontend code
2797 can check its own sanity. */
2799 struct nesting *
2800 current_nesting_level (void)
2802 return cfun ? block_stack : 0;
2805 /* Emit code to restore vital registers at the beginning of a nonlocal goto
2806 handler. */
2807 static void
2808 expand_nl_goto_receiver (void)
2810 /* Clobber the FP when we get here, so we have to make sure it's
2811 marked as used by this function. */
2812 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
2814 /* Mark the static chain as clobbered here so life information
2815 doesn't get messed up for it. */
2816 emit_insn (gen_rtx_CLOBBER (VOIDmode, static_chain_rtx));
2818 #ifdef HAVE_nonlocal_goto
2819 if (! HAVE_nonlocal_goto)
2820 #endif
2821 /* First adjust our frame pointer to its actual value. It was
2822 previously set to the start of the virtual area corresponding to
2823 the stacked variables when we branched here and now needs to be
2824 adjusted to the actual hardware fp value.
2826 Assignments are to virtual registers are converted by
2827 instantiate_virtual_regs into the corresponding assignment
2828 to the underlying register (fp in this case) that makes
2829 the original assignment true.
2830 So the following insn will actually be
2831 decrementing fp by STARTING_FRAME_OFFSET. */
2832 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
2834 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
2835 if (fixed_regs[ARG_POINTER_REGNUM])
2837 #ifdef ELIMINABLE_REGS
2838 /* If the argument pointer can be eliminated in favor of the
2839 frame pointer, we don't need to restore it. We assume here
2840 that if such an elimination is present, it can always be used.
2841 This is the case on all known machines; if we don't make this
2842 assumption, we do unnecessary saving on many machines. */
2843 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
2844 size_t i;
2846 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
2847 if (elim_regs[i].from == ARG_POINTER_REGNUM
2848 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
2849 break;
2851 if (i == ARRAY_SIZE (elim_regs))
2852 #endif
2854 /* Now restore our arg pointer from the address at which it
2855 was saved in our stack frame. */
2856 emit_move_insn (virtual_incoming_args_rtx,
2857 copy_to_reg (get_arg_pointer_save_area (cfun)));
2860 #endif
2862 #ifdef HAVE_nonlocal_goto_receiver
2863 if (HAVE_nonlocal_goto_receiver)
2864 emit_insn (gen_nonlocal_goto_receiver ());
2865 #endif
2867 /* @@@ This is a kludge. Not all machine descriptions define a blockage
2868 insn, but we must not allow the code we just generated to be reordered
2869 by scheduling. Specifically, the update of the frame pointer must
2870 happen immediately, not later. So emit an ASM_INPUT to act as blockage
2871 insn. */
2872 emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
2875 /* Warn about any unused VARS (which may contain nodes other than
2876 VAR_DECLs, but such nodes are ignored). The nodes are connected
2877 via the TREE_CHAIN field. */
2879 void
2880 warn_about_unused_variables (tree vars)
2882 tree decl;
2884 if (warn_unused_variable)
2885 for (decl = vars; decl; decl = TREE_CHAIN (decl))
2886 if (TREE_CODE (decl) == VAR_DECL
2887 && ! TREE_USED (decl)
2888 && ! DECL_IN_SYSTEM_HEADER (decl)
2889 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
2890 warning ("%Junused variable '%D'", decl, decl);
2893 /* Generate RTL code to terminate a binding contour.
2895 VARS is the chain of VAR_DECL nodes for the variables bound in this
2896 contour. There may actually be other nodes in this chain, but any
2897 nodes other than VAR_DECLS are ignored.
2899 MARK_ENDS is nonzero if we should put a note at the beginning
2900 and end of this binding contour.
2902 DONT_JUMP_IN is positive if it is not valid to jump into this contour,
2903 zero if we can jump into this contour only if it does not have a saved
2904 stack level, and negative if we are not to check for invalid use of
2905 labels (because the front end does that). */
2907 void
2908 expand_end_bindings (tree vars, int mark_ends ATTRIBUTE_UNUSED,
2909 int dont_jump_in)
2911 struct nesting *thisblock = block_stack;
2913 /* If any of the variables in this scope were not used, warn the
2914 user. */
2915 warn_about_unused_variables (vars);
2917 if (thisblock->exit_label)
2919 do_pending_stack_adjust ();
2920 emit_label (thisblock->exit_label);
2923 /* Don't allow jumping into a block that has a stack level.
2924 Cleanups are allowed, though. */
2925 if (dont_jump_in > 0
2926 || (dont_jump_in == 0 && thisblock->data.block.stack_level != 0))
2928 struct label_chain *chain;
2930 /* Any labels in this block are no longer valid to go to.
2931 Mark them to cause an error message. */
2932 for (chain = thisblock->data.block.label_chain; chain; chain = chain->next)
2934 DECL_TOO_LATE (chain->label) = 1;
2935 /* If any goto without a fixup came to this label,
2936 that must be an error, because gotos without fixups
2937 come from outside all saved stack-levels. */
2938 if (TREE_ADDRESSABLE (chain->label))
2939 error ("%Jlabel '%D' used before containing binding contour",
2940 chain->label, chain->label);
2944 /* Restore stack level in effect before the block
2945 (only if variable-size objects allocated). */
2946 /* Perform any cleanups associated with the block. */
2948 if (thisblock->data.block.stack_level != 0
2949 || thisblock->data.block.cleanups != 0)
2951 int reachable;
2952 rtx insn;
2954 /* Only clean up here if this point can actually be reached. */
2955 insn = get_last_insn ();
2956 if (GET_CODE (insn) == NOTE)
2957 insn = prev_nonnote_insn (insn);
2958 reachable = (! insn || GET_CODE (insn) != BARRIER);
2960 /* Do the cleanups. */
2961 expand_cleanups (thisblock->data.block.cleanups, 0, reachable);
2962 if (reachable)
2963 do_pending_stack_adjust ();
2965 /* Restore the stack level. */
2967 if (reachable && thisblock->data.block.stack_level != 0)
2969 emit_stack_restore (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
2970 thisblock->data.block.stack_level, NULL_RTX);
2971 if (cfun->nonlocal_goto_save_area)
2972 update_nonlocal_goto_save_area ();
2975 /* Any gotos out of this block must also do these things.
2976 Also report any gotos with fixups that came to labels in this
2977 level. */
2978 fixup_gotos (thisblock,
2979 thisblock->data.block.stack_level,
2980 thisblock->data.block.cleanups,
2981 thisblock->data.block.first_insn,
2982 dont_jump_in);
2985 /* Mark the beginning and end of the scope if requested.
2986 We do this now, after running cleanups on the variables
2987 just going out of scope, so they are in scope for their cleanups. */
2989 /* Get rid of the beginning-mark if we don't make an end-mark. */
2990 NOTE_LINE_NUMBER (thisblock->data.block.first_insn) = NOTE_INSN_DELETED;
2992 /* Restore the temporary level of TARGET_EXPRs. */
2993 target_temp_slot_level = thisblock->data.block.block_target_temp_slot_level;
2995 /* Restore block_stack level for containing block. */
2997 stack_block_stack = thisblock->data.block.innermost_stack_block;
2998 POPSTACK (block_stack);
3000 /* Pop the stack slot nesting and free any slots at this level. */
3001 pop_temp_slots ();
3004 /* Generate code to save the stack pointer at the start of the current block
3005 and set up to restore it on exit. */
3007 void
3008 save_stack_pointer (void)
3010 struct nesting *thisblock = block_stack;
3012 if (thisblock->data.block.stack_level == 0)
3014 emit_stack_save (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3015 &thisblock->data.block.stack_level,
3016 thisblock->data.block.first_insn);
3017 stack_block_stack = thisblock;
3021 /* Generate RTL for the automatic variable declaration DECL.
3022 (Other kinds of declarations are simply ignored if seen here.) */
3024 void
3025 expand_decl (tree decl)
3027 tree type;
3029 type = TREE_TYPE (decl);
3031 /* For a CONST_DECL, set mode, alignment, and sizes from those of the
3032 type in case this node is used in a reference. */
3033 if (TREE_CODE (decl) == CONST_DECL)
3035 DECL_MODE (decl) = TYPE_MODE (type);
3036 DECL_ALIGN (decl) = TYPE_ALIGN (type);
3037 DECL_SIZE (decl) = TYPE_SIZE (type);
3038 DECL_SIZE_UNIT (decl) = TYPE_SIZE_UNIT (type);
3039 return;
3042 /* Otherwise, only automatic variables need any expansion done. Static and
3043 external variables, and external functions, will be handled by
3044 `assemble_variable' (called from finish_decl). TYPE_DECL requires
3045 nothing. PARM_DECLs are handled in `assign_parms'. */
3046 if (TREE_CODE (decl) != VAR_DECL)
3047 return;
3049 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
3050 return;
3052 /* Create the RTL representation for the variable. */
3054 if (type == error_mark_node)
3055 SET_DECL_RTL (decl, gen_rtx_MEM (BLKmode, const0_rtx));
3057 else if (DECL_SIZE (decl) == 0)
3058 /* Variable with incomplete type. */
3060 rtx x;
3061 if (DECL_INITIAL (decl) == 0)
3062 /* Error message was already done; now avoid a crash. */
3063 x = gen_rtx_MEM (BLKmode, const0_rtx);
3064 else
3065 /* An initializer is going to decide the size of this array.
3066 Until we know the size, represent its address with a reg. */
3067 x = gen_rtx_MEM (BLKmode, gen_reg_rtx (Pmode));
3069 set_mem_attributes (x, decl, 1);
3070 SET_DECL_RTL (decl, x);
3072 else if (use_register_for_decl (decl))
3074 /* Automatic variable that can go in a register. */
3075 int unsignedp = TYPE_UNSIGNED (type);
3076 enum machine_mode reg_mode
3077 = promote_mode (type, DECL_MODE (decl), &unsignedp, 0);
3079 SET_DECL_RTL (decl, gen_reg_rtx (reg_mode));
3081 /* Note if the object is a user variable. */
3082 if (!DECL_ARTIFICIAL (decl))
3084 mark_user_reg (DECL_RTL (decl));
3086 /* Trust user variables which have a pointer type to really
3087 be pointers. Do not trust compiler generated temporaries
3088 as our type system is totally busted as it relates to
3089 pointer arithmetic which translates into lots of compiler
3090 generated objects with pointer types, but which are not really
3091 pointers. */
3092 if (POINTER_TYPE_P (type))
3093 mark_reg_pointer (DECL_RTL (decl),
3094 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (decl))));
3097 maybe_set_unchanging (DECL_RTL (decl), decl);
3100 else if (TREE_CODE (DECL_SIZE_UNIT (decl)) == INTEGER_CST
3101 && ! (flag_stack_check && ! STACK_CHECK_BUILTIN
3102 && 0 < compare_tree_int (DECL_SIZE_UNIT (decl),
3103 STACK_CHECK_MAX_VAR_SIZE)))
3105 /* Variable of fixed size that goes on the stack. */
3106 rtx oldaddr = 0;
3107 rtx addr;
3108 rtx x;
3110 /* If we previously made RTL for this decl, it must be an array
3111 whose size was determined by the initializer.
3112 The old address was a register; set that register now
3113 to the proper address. */
3114 if (DECL_RTL_SET_P (decl))
3116 if (!MEM_P (DECL_RTL (decl))
3117 || !REG_P (XEXP (DECL_RTL (decl), 0)))
3118 abort ();
3119 oldaddr = XEXP (DECL_RTL (decl), 0);
3122 /* Set alignment we actually gave this decl. */
3123 DECL_ALIGN (decl) = (DECL_MODE (decl) == BLKmode ? BIGGEST_ALIGNMENT
3124 : GET_MODE_BITSIZE (DECL_MODE (decl)));
3125 DECL_USER_ALIGN (decl) = 0;
3127 x = assign_temp (decl, 1, 1, 1);
3128 set_mem_attributes (x, decl, 1);
3129 SET_DECL_RTL (decl, x);
3131 if (oldaddr)
3133 addr = force_operand (XEXP (DECL_RTL (decl), 0), oldaddr);
3134 if (addr != oldaddr)
3135 emit_move_insn (oldaddr, addr);
3138 else
3139 /* Dynamic-size object: must push space on the stack. */
3141 rtx address, size, x;
3143 /* Record the stack pointer on entry to block, if have
3144 not already done so. */
3145 do_pending_stack_adjust ();
3146 save_stack_pointer ();
3148 /* Compute the variable's size, in bytes. This will expand any
3149 needed SAVE_EXPRs for the first time. */
3150 size = expand_expr (DECL_SIZE_UNIT (decl), NULL_RTX, VOIDmode, 0);
3151 free_temp_slots ();
3153 /* Allocate space on the stack for the variable. Note that
3154 DECL_ALIGN says how the variable is to be aligned and we
3155 cannot use it to conclude anything about the alignment of
3156 the size. */
3157 address = allocate_dynamic_stack_space (size, NULL_RTX,
3158 TYPE_ALIGN (TREE_TYPE (decl)));
3160 /* Reference the variable indirect through that rtx. */
3161 x = gen_rtx_MEM (DECL_MODE (decl), address);
3162 set_mem_attributes (x, decl, 1);
3163 SET_DECL_RTL (decl, x);
3166 /* Indicate the alignment we actually gave this variable. */
3167 #ifdef STACK_BOUNDARY
3168 DECL_ALIGN (decl) = STACK_BOUNDARY;
3169 #else
3170 DECL_ALIGN (decl) = BIGGEST_ALIGNMENT;
3171 #endif
3172 DECL_USER_ALIGN (decl) = 0;
3176 /* Emit code to allocate T_SIZE bytes of dynamic stack space for ALLOC. */
3177 void
3178 expand_stack_alloc (tree alloc, tree t_size)
3180 rtx address, dest, size;
3181 tree var, type;
3183 if (TREE_CODE (alloc) != ADDR_EXPR)
3184 abort ();
3185 var = TREE_OPERAND (alloc, 0);
3186 if (TREE_CODE (var) != VAR_DECL)
3187 abort ();
3189 type = TREE_TYPE (var);
3191 /* Compute the variable's size, in bytes. */
3192 size = expand_expr (t_size, NULL_RTX, VOIDmode, 0);
3193 free_temp_slots ();
3195 /* Allocate space on the stack for the variable. */
3196 address = XEXP (DECL_RTL (var), 0);
3197 dest = allocate_dynamic_stack_space (size, address, TYPE_ALIGN (type));
3198 if (dest != address)
3199 emit_move_insn (address, dest);
3201 /* Indicate the alignment we actually gave this variable. */
3202 #ifdef STACK_BOUNDARY
3203 DECL_ALIGN (var) = STACK_BOUNDARY;
3204 #else
3205 DECL_ALIGN (var) = BIGGEST_ALIGNMENT;
3206 #endif
3207 DECL_USER_ALIGN (var) = 0;
3210 /* Emit code to save the current value of stack. */
3212 expand_stack_save (void)
3214 rtx ret = NULL_RTX;
3216 do_pending_stack_adjust ();
3217 emit_stack_save (SAVE_BLOCK, &ret, NULL_RTX);
3218 return ret;
3221 /* Emit code to restore the current value of stack. */
3222 void
3223 expand_stack_restore (tree var)
3225 rtx sa = DECL_RTL (var);
3227 emit_stack_restore (SAVE_BLOCK, sa, NULL_RTX);
3230 /* Emit code to perform the initialization of a declaration DECL. */
3232 void
3233 expand_decl_init (tree decl)
3235 int was_used = TREE_USED (decl);
3237 /* If this is a CONST_DECL, we don't have to generate any code. Likewise
3238 for static decls. */
3239 if (TREE_CODE (decl) == CONST_DECL
3240 || TREE_STATIC (decl))
3241 return;
3243 /* Compute and store the initial value now. */
3245 push_temp_slots ();
3247 if (DECL_INITIAL (decl) == error_mark_node)
3249 enum tree_code code = TREE_CODE (TREE_TYPE (decl));
3251 if (code == INTEGER_TYPE || code == REAL_TYPE || code == ENUMERAL_TYPE
3252 || code == POINTER_TYPE || code == REFERENCE_TYPE)
3253 expand_assignment (decl, convert (TREE_TYPE (decl), integer_zero_node),
3255 emit_queue ();
3257 else if (DECL_INITIAL (decl) && TREE_CODE (DECL_INITIAL (decl)) != TREE_LIST)
3259 emit_line_note (DECL_SOURCE_LOCATION (decl));
3260 expand_assignment (decl, DECL_INITIAL (decl), 0);
3261 emit_queue ();
3264 /* Don't let the initialization count as "using" the variable. */
3265 TREE_USED (decl) = was_used;
3267 /* Free any temporaries we made while initializing the decl. */
3268 preserve_temp_slots (NULL_RTX);
3269 free_temp_slots ();
3270 pop_temp_slots ();
3273 /* CLEANUP is an expression to be executed at exit from this binding contour;
3274 for example, in C++, it might call the destructor for this variable.
3276 We wrap CLEANUP in an UNSAVE_EXPR node, so that we can expand the
3277 CLEANUP multiple times, and have the correct semantics. This
3278 happens in exception handling, for gotos, returns, breaks that
3279 leave the current scope.
3281 If CLEANUP is nonzero and DECL is zero, we record a cleanup
3282 that is not associated with any particular variable. */
3285 expand_decl_cleanup (tree decl, tree cleanup)
3287 struct nesting *thisblock;
3289 /* Error if we are not in any block. */
3290 if (cfun == 0 || block_stack == 0)
3291 return 0;
3293 thisblock = block_stack;
3295 /* Record the cleanup if there is one. */
3297 if (cleanup != 0)
3299 tree t;
3300 rtx seq;
3301 tree *cleanups = &thisblock->data.block.cleanups;
3302 int cond_context = conditional_context ();
3304 if (cond_context)
3306 rtx flag = gen_reg_rtx (word_mode);
3307 rtx set_flag_0;
3308 tree cond;
3310 start_sequence ();
3311 emit_move_insn (flag, const0_rtx);
3312 set_flag_0 = get_insns ();
3313 end_sequence ();
3315 thisblock->data.block.last_unconditional_cleanup
3316 = emit_insn_after (set_flag_0,
3317 thisblock->data.block.last_unconditional_cleanup);
3319 emit_move_insn (flag, const1_rtx);
3321 cond = build_decl (VAR_DECL, NULL_TREE,
3322 lang_hooks.types.type_for_mode (word_mode, 1));
3323 SET_DECL_RTL (cond, flag);
3325 /* Conditionalize the cleanup. */
3326 cleanup = build (COND_EXPR, void_type_node,
3327 lang_hooks.truthvalue_conversion (cond),
3328 cleanup, integer_zero_node);
3329 cleanup = fold (cleanup);
3331 cleanups = &thisblock->data.block.cleanups;
3334 cleanup = unsave_expr (cleanup);
3336 t = *cleanups = tree_cons (decl, cleanup, *cleanups);
3338 if (! cond_context)
3339 /* If this block has a cleanup, it belongs in stack_block_stack. */
3340 stack_block_stack = thisblock;
3342 if (cond_context)
3344 start_sequence ();
3347 if (! using_eh_for_cleanups_p)
3348 TREE_ADDRESSABLE (t) = 1;
3349 else
3350 expand_eh_region_start ();
3352 if (cond_context)
3354 seq = get_insns ();
3355 end_sequence ();
3356 if (seq)
3357 thisblock->data.block.last_unconditional_cleanup
3358 = emit_insn_after (seq,
3359 thisblock->data.block.last_unconditional_cleanup);
3361 else
3363 thisblock->data.block.last_unconditional_cleanup
3364 = get_last_insn ();
3365 /* When we insert instructions after the last unconditional cleanup,
3366 we don't adjust last_insn. That means that a later add_insn will
3367 clobber the instructions we've just added. The easiest way to
3368 fix this is to just insert another instruction here, so that the
3369 instructions inserted after the last unconditional cleanup are
3370 never the last instruction. */
3371 emit_note (NOTE_INSN_DELETED);
3374 return 1;
3377 /* Like expand_decl_cleanup, but maybe only run the cleanup if an exception
3378 is thrown. */
3381 expand_decl_cleanup_eh (tree decl, tree cleanup, int eh_only)
3383 int ret = expand_decl_cleanup (decl, cleanup);
3384 if (cleanup && ret)
3386 tree node = block_stack->data.block.cleanups;
3387 CLEANUP_EH_ONLY (node) = eh_only;
3389 return ret;
3392 /* DECL is an anonymous union. CLEANUP is a cleanup for DECL.
3393 DECL_ELTS is the list of elements that belong to DECL's type.
3394 In each, the TREE_VALUE is a VAR_DECL, and the TREE_PURPOSE a cleanup. */
3396 void
3397 expand_anon_union_decl (tree decl, tree cleanup, tree decl_elts)
3399 struct nesting *thisblock = cfun == 0 ? 0 : block_stack;
3400 rtx x;
3401 tree t;
3403 /* If any of the elements are addressable, so is the entire union. */
3404 for (t = decl_elts; t; t = TREE_CHAIN (t))
3405 if (TREE_ADDRESSABLE (TREE_VALUE (t)))
3407 TREE_ADDRESSABLE (decl) = 1;
3408 break;
3411 expand_decl (decl);
3412 expand_decl_cleanup (decl, cleanup);
3413 x = DECL_RTL (decl);
3415 /* Go through the elements, assigning RTL to each. */
3416 for (t = decl_elts; t; t = TREE_CHAIN (t))
3418 tree decl_elt = TREE_VALUE (t);
3419 tree cleanup_elt = TREE_PURPOSE (t);
3420 enum machine_mode mode = TYPE_MODE (TREE_TYPE (decl_elt));
3422 /* If any of the elements are addressable, so is the entire
3423 union. */
3424 if (TREE_USED (decl_elt))
3425 TREE_USED (decl) = 1;
3427 /* Propagate the union's alignment to the elements. */
3428 DECL_ALIGN (decl_elt) = DECL_ALIGN (decl);
3429 DECL_USER_ALIGN (decl_elt) = DECL_USER_ALIGN (decl);
3431 /* If the element has BLKmode and the union doesn't, the union is
3432 aligned such that the element doesn't need to have BLKmode, so
3433 change the element's mode to the appropriate one for its size. */
3434 if (mode == BLKmode && DECL_MODE (decl) != BLKmode)
3435 DECL_MODE (decl_elt) = mode
3436 = mode_for_size_tree (DECL_SIZE (decl_elt), MODE_INT, 1);
3438 /* (SUBREG (MEM ...)) at RTL generation time is invalid, so we
3439 instead create a new MEM rtx with the proper mode. */
3440 if (MEM_P (x))
3442 if (mode == GET_MODE (x))
3443 SET_DECL_RTL (decl_elt, x);
3444 else
3445 SET_DECL_RTL (decl_elt, adjust_address_nv (x, mode, 0));
3447 else if (REG_P (x))
3449 if (mode == GET_MODE (x))
3450 SET_DECL_RTL (decl_elt, x);
3451 else
3452 SET_DECL_RTL (decl_elt, gen_lowpart_SUBREG (mode, x));
3454 else
3455 abort ();
3457 /* Record the cleanup if there is one. */
3459 if (cleanup != 0)
3460 thisblock->data.block.cleanups
3461 = tree_cons (decl_elt, cleanup_elt,
3462 thisblock->data.block.cleanups);
3466 /* Expand a list of cleanups LIST.
3467 Elements may be expressions or may be nested lists.
3469 If IN_FIXUP is nonzero, we are generating this cleanup for a fixup
3470 goto and handle protection regions specially in that case.
3472 If REACHABLE, we emit code, otherwise just inform the exception handling
3473 code about this finalization. */
3475 static void
3476 expand_cleanups (tree list, int in_fixup, int reachable)
3478 tree tail;
3479 for (tail = list; tail; tail = TREE_CHAIN (tail))
3480 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3481 expand_cleanups (TREE_VALUE (tail), in_fixup, reachable);
3482 else
3484 if (! in_fixup && using_eh_for_cleanups_p)
3485 expand_eh_region_end_cleanup (TREE_VALUE (tail));
3487 if (reachable && !CLEANUP_EH_ONLY (tail))
3489 /* Cleanups may be run multiple times. For example,
3490 when exiting a binding contour, we expand the
3491 cleanups associated with that contour. When a goto
3492 within that binding contour has a target outside that
3493 contour, it will expand all cleanups from its scope to
3494 the target. Though the cleanups are expanded multiple
3495 times, the control paths are non-overlapping so the
3496 cleanups will not be executed twice. */
3498 /* We may need to protect from outer cleanups. */
3499 if (in_fixup && using_eh_for_cleanups_p)
3501 expand_eh_region_start ();
3503 expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
3505 expand_eh_region_end_fixup (TREE_VALUE (tail));
3507 else
3508 expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
3510 free_temp_slots ();
3515 /* Mark when the context we are emitting RTL for as a conditional
3516 context, so that any cleanup actions we register with
3517 expand_decl_init will be properly conditionalized when those
3518 cleanup actions are later performed. Must be called before any
3519 expression (tree) is expanded that is within a conditional context. */
3521 void
3522 start_cleanup_deferral (void)
3524 /* block_stack can be NULL if we are inside the parameter list. It is
3525 OK to do nothing, because cleanups aren't possible here. */
3526 if (block_stack)
3527 ++block_stack->data.block.conditional_code;
3530 /* Mark the end of a conditional region of code. Because cleanup
3531 deferrals may be nested, we may still be in a conditional region
3532 after we end the currently deferred cleanups, only after we end all
3533 deferred cleanups, are we back in unconditional code. */
3535 void
3536 end_cleanup_deferral (void)
3538 /* block_stack can be NULL if we are inside the parameter list. It is
3539 OK to do nothing, because cleanups aren't possible here. */
3540 if (block_stack)
3541 --block_stack->data.block.conditional_code;
3544 tree
3545 last_cleanup_this_contour (void)
3547 if (block_stack == 0)
3548 return 0;
3550 return block_stack->data.block.cleanups;
3554 /* Return nonzero if any containing block has a stack level or
3555 cleanups. */
3558 containing_blocks_have_cleanups_or_stack_level (void)
3560 struct nesting *block;
3562 for (block = block_stack; block; block = block->next)
3563 if (block->data.block.stack_level != 0
3564 || block->data.block.cleanups != 0)
3565 return 1;
3567 return 0;
3570 /* Return 1 if there are any pending cleanups at this point.
3571 Check the current contour as well as contours that enclose
3572 the current contour. */
3575 any_pending_cleanups (void)
3577 struct nesting *block;
3579 if (cfun == NULL || cfun->stmt == NULL || block_stack == 0)
3580 return 0;
3582 if (block_stack->data.block.cleanups != NULL)
3583 return 1;
3585 if (block_stack->data.block.outer_cleanups == 0)
3586 return 0;
3588 for (block = block_stack->next; block; block = block->next)
3589 if (block->data.block.cleanups != 0)
3590 return 1;
3592 return 0;
3595 /* Enter a case (Pascal) or switch (C) statement.
3596 Push a block onto case_stack and nesting_stack
3597 to accumulate the case-labels that are seen
3598 and to record the labels generated for the statement.
3600 EXIT_FLAG is nonzero if `exit_something' should exit this case stmt.
3601 Otherwise, this construct is transparent for `exit_something'.
3603 EXPR is the index-expression to be dispatched on.
3604 TYPE is its nominal type. We could simply convert EXPR to this type,
3605 but instead we take short cuts. */
3607 void
3608 expand_start_case (int exit_flag, tree expr, tree type,
3609 const char *printname)
3611 struct nesting *thiscase = ALLOC_NESTING ();
3613 /* Make an entry on case_stack for the case we are entering. */
3615 thiscase->desc = CASE_NESTING;
3616 thiscase->next = case_stack;
3617 thiscase->all = nesting_stack;
3618 thiscase->depth = ++nesting_depth;
3619 thiscase->exit_label = exit_flag ? gen_label_rtx () : 0;
3620 thiscase->data.case_stmt.case_list = 0;
3621 thiscase->data.case_stmt.index_expr = expr;
3622 thiscase->data.case_stmt.nominal_type = type;
3623 thiscase->data.case_stmt.default_label = 0;
3624 thiscase->data.case_stmt.printname = printname;
3625 thiscase->data.case_stmt.line_number_status = force_line_numbers ();
3626 case_stack = thiscase;
3627 nesting_stack = thiscase;
3629 do_pending_stack_adjust ();
3630 emit_queue ();
3632 /* Make sure case_stmt.start points to something that won't
3633 need any transformation before expand_end_case. */
3634 if (GET_CODE (get_last_insn ()) != NOTE)
3635 emit_note (NOTE_INSN_DELETED);
3637 thiscase->data.case_stmt.start = get_last_insn ();
3639 start_cleanup_deferral ();
3642 /* Accumulate one case or default label inside a case or switch statement.
3643 VALUE is the value of the case (a null pointer, for a default label).
3644 The function CONVERTER, when applied to arguments T and V,
3645 converts the value V to the type T.
3647 If not currently inside a case or switch statement, return 1 and do
3648 nothing. The caller will print a language-specific error message.
3649 If VALUE is a duplicate or overlaps, return 2 and do nothing
3650 except store the (first) duplicate node in *DUPLICATE.
3651 If VALUE is out of range, return 3 and do nothing.
3652 If we are jumping into the scope of a cleanup or var-sized array, return 5.
3653 Return 0 on success.
3655 Extended to handle range statements. */
3658 pushcase (tree value, tree (*converter) (tree, tree), tree label,
3659 tree *duplicate)
3661 tree index_type;
3662 tree nominal_type;
3664 /* Fail if not inside a real case statement. */
3665 if (! (case_stack && case_stack->data.case_stmt.start))
3666 return 1;
3668 if (stack_block_stack
3669 && stack_block_stack->depth > case_stack->depth)
3670 return 5;
3672 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
3673 nominal_type = case_stack->data.case_stmt.nominal_type;
3675 /* If the index is erroneous, avoid more problems: pretend to succeed. */
3676 if (index_type == error_mark_node)
3677 return 0;
3679 /* Convert VALUE to the type in which the comparisons are nominally done. */
3680 if (value != 0)
3681 value = (*converter) (nominal_type, value);
3683 /* Fail if this value is out of range for the actual type of the index
3684 (which may be narrower than NOMINAL_TYPE). */
3685 if (value != 0
3686 && (TREE_CONSTANT_OVERFLOW (value)
3687 || ! int_fits_type_p (value, index_type)))
3688 return 3;
3690 return add_case_node (value, value, label, duplicate, false);
3693 /* Like pushcase but this case applies to all values between VALUE1 and
3694 VALUE2 (inclusive). If VALUE1 is NULL, the range starts at the lowest
3695 value of the index type and ends at VALUE2. If VALUE2 is NULL, the range
3696 starts at VALUE1 and ends at the highest value of the index type.
3697 If both are NULL, this case applies to all values.
3699 The return value is the same as that of pushcase but there is one
3700 additional error code: 4 means the specified range was empty. */
3703 pushcase_range (tree value1, tree value2, tree (*converter) (tree, tree),
3704 tree label, tree *duplicate)
3706 tree index_type;
3707 tree nominal_type;
3709 /* Fail if not inside a real case statement. */
3710 if (! (case_stack && case_stack->data.case_stmt.start))
3711 return 1;
3713 if (stack_block_stack
3714 && stack_block_stack->depth > case_stack->depth)
3715 return 5;
3717 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
3718 nominal_type = case_stack->data.case_stmt.nominal_type;
3720 /* If the index is erroneous, avoid more problems: pretend to succeed. */
3721 if (index_type == error_mark_node)
3722 return 0;
3724 /* Convert VALUEs to type in which the comparisons are nominally done
3725 and replace any unspecified value with the corresponding bound. */
3726 if (value1 == 0)
3727 value1 = TYPE_MIN_VALUE (index_type);
3728 if (value2 == 0)
3729 value2 = TYPE_MAX_VALUE (index_type);
3731 /* Fail if the range is empty. Do this before any conversion since
3732 we want to allow out-of-range empty ranges. */
3733 if (value2 != 0 && tree_int_cst_lt (value2, value1))
3734 return 4;
3736 /* If the max was unbounded, use the max of the nominal_type we are
3737 converting to. Do this after the < check above to suppress false
3738 positives. */
3739 if (value2 == 0)
3740 value2 = TYPE_MAX_VALUE (nominal_type);
3742 value1 = (*converter) (nominal_type, value1);
3743 value2 = (*converter) (nominal_type, value2);
3745 /* Fail if these values are out of range. */
3746 if (TREE_CONSTANT_OVERFLOW (value1)
3747 || ! int_fits_type_p (value1, index_type))
3748 return 3;
3750 if (TREE_CONSTANT_OVERFLOW (value2)
3751 || ! int_fits_type_p (value2, index_type))
3752 return 3;
3754 return add_case_node (value1, value2, label, duplicate, false);
3757 /* Do the actual insertion of a case label for pushcase and pushcase_range
3758 into case_stack->data.case_stmt.case_list. Use an AVL tree to avoid
3759 slowdown for large switch statements. */
3762 add_case_node (tree low, tree high, tree label, tree *duplicate,
3763 bool dont_expand_label)
3765 struct case_node *p, **q, *r;
3767 /* If there's no HIGH value, then this is not a case range; it's
3768 just a simple case label. But that's just a degenerate case
3769 range. */
3770 if (!high)
3771 high = low;
3773 /* Handle default labels specially. */
3774 if (!high && !low)
3776 if (case_stack->data.case_stmt.default_label != 0)
3778 *duplicate = case_stack->data.case_stmt.default_label;
3779 return 2;
3781 case_stack->data.case_stmt.default_label = label;
3782 if (!dont_expand_label)
3783 expand_label (label);
3784 return 0;
3787 q = &case_stack->data.case_stmt.case_list;
3788 p = *q;
3790 while ((r = *q))
3792 p = r;
3794 /* Keep going past elements distinctly greater than HIGH. */
3795 if (tree_int_cst_lt (high, p->low))
3796 q = &p->left;
3798 /* or distinctly less than LOW. */
3799 else if (tree_int_cst_lt (p->high, low))
3800 q = &p->right;
3802 else
3804 /* We have an overlap; this is an error. */
3805 *duplicate = p->code_label;
3806 return 2;
3810 /* Add this label to the chain, and succeed. */
3812 r = ggc_alloc (sizeof (struct case_node));
3813 r->low = low;
3815 /* If the bounds are equal, turn this into the one-value case. */
3816 if (tree_int_cst_equal (low, high))
3817 r->high = r->low;
3818 else
3819 r->high = high;
3821 r->code_label = label;
3822 if (!dont_expand_label)
3823 expand_label (label);
3825 *q = r;
3826 r->parent = p;
3827 r->left = 0;
3828 r->right = 0;
3829 r->balance = 0;
3831 while (p)
3833 struct case_node *s;
3835 if (r == p->left)
3837 int b;
3839 if (! (b = p->balance))
3840 /* Growth propagation from left side. */
3841 p->balance = -1;
3842 else if (b < 0)
3844 if (r->balance < 0)
3846 /* R-Rotation */
3847 if ((p->left = s = r->right))
3848 s->parent = p;
3850 r->right = p;
3851 p->balance = 0;
3852 r->balance = 0;
3853 s = p->parent;
3854 p->parent = r;
3856 if ((r->parent = s))
3858 if (s->left == p)
3859 s->left = r;
3860 else
3861 s->right = r;
3863 else
3864 case_stack->data.case_stmt.case_list = r;
3866 else
3867 /* r->balance == +1 */
3869 /* LR-Rotation */
3871 int b2;
3872 struct case_node *t = r->right;
3874 if ((p->left = s = t->right))
3875 s->parent = p;
3877 t->right = p;
3878 if ((r->right = s = t->left))
3879 s->parent = r;
3881 t->left = r;
3882 b = t->balance;
3883 b2 = b < 0;
3884 p->balance = b2;
3885 b2 = -b2 - b;
3886 r->balance = b2;
3887 t->balance = 0;
3888 s = p->parent;
3889 p->parent = t;
3890 r->parent = t;
3892 if ((t->parent = s))
3894 if (s->left == p)
3895 s->left = t;
3896 else
3897 s->right = t;
3899 else
3900 case_stack->data.case_stmt.case_list = t;
3902 break;
3905 else
3907 /* p->balance == +1; growth of left side balances the node. */
3908 p->balance = 0;
3909 break;
3912 else
3913 /* r == p->right */
3915 int b;
3917 if (! (b = p->balance))
3918 /* Growth propagation from right side. */
3919 p->balance++;
3920 else if (b > 0)
3922 if (r->balance > 0)
3924 /* L-Rotation */
3926 if ((p->right = s = r->left))
3927 s->parent = p;
3929 r->left = p;
3930 p->balance = 0;
3931 r->balance = 0;
3932 s = p->parent;
3933 p->parent = r;
3934 if ((r->parent = s))
3936 if (s->left == p)
3937 s->left = r;
3938 else
3939 s->right = r;
3942 else
3943 case_stack->data.case_stmt.case_list = r;
3946 else
3947 /* r->balance == -1 */
3949 /* RL-Rotation */
3950 int b2;
3951 struct case_node *t = r->left;
3953 if ((p->right = s = t->left))
3954 s->parent = p;
3956 t->left = p;
3958 if ((r->left = s = t->right))
3959 s->parent = r;
3961 t->right = r;
3962 b = t->balance;
3963 b2 = b < 0;
3964 r->balance = b2;
3965 b2 = -b2 - b;
3966 p->balance = b2;
3967 t->balance = 0;
3968 s = p->parent;
3969 p->parent = t;
3970 r->parent = t;
3972 if ((t->parent = s))
3974 if (s->left == p)
3975 s->left = t;
3976 else
3977 s->right = t;
3980 else
3981 case_stack->data.case_stmt.case_list = t;
3983 break;
3985 else
3987 /* p->balance == -1; growth of right side balances the node. */
3988 p->balance = 0;
3989 break;
3993 r = p;
3994 p = p->parent;
3997 return 0;
4000 /* Maximum number of case bit tests. */
4001 #define MAX_CASE_BIT_TESTS 3
4003 /* By default, enable case bit tests on targets with ashlsi3. */
4004 #ifndef CASE_USE_BIT_TESTS
4005 #define CASE_USE_BIT_TESTS (ashl_optab->handlers[word_mode].insn_code \
4006 != CODE_FOR_nothing)
4007 #endif
4010 /* A case_bit_test represents a set of case nodes that may be
4011 selected from using a bit-wise comparison. HI and LO hold
4012 the integer to be tested against, LABEL contains the label
4013 to jump to upon success and BITS counts the number of case
4014 nodes handled by this test, typically the number of bits
4015 set in HI:LO. */
4017 struct case_bit_test
4019 HOST_WIDE_INT hi;
4020 HOST_WIDE_INT lo;
4021 rtx label;
4022 int bits;
4025 /* Determine whether "1 << x" is relatively cheap in word_mode. */
4027 static
4028 bool lshift_cheap_p (void)
4030 static bool init = false;
4031 static bool cheap = true;
4033 if (!init)
4035 rtx reg = gen_rtx_REG (word_mode, 10000);
4036 int cost = rtx_cost (gen_rtx_ASHIFT (word_mode, const1_rtx, reg), SET);
4037 cheap = cost < COSTS_N_INSNS (3);
4038 init = true;
4041 return cheap;
4044 /* Comparison function for qsort to order bit tests by decreasing
4045 number of case nodes, i.e. the node with the most cases gets
4046 tested first. */
4048 static int
4049 case_bit_test_cmp (const void *p1, const void *p2)
4051 const struct case_bit_test *d1 = p1;
4052 const struct case_bit_test *d2 = p2;
4054 return d2->bits - d1->bits;
4057 /* Expand a switch statement by a short sequence of bit-wise
4058 comparisons. "switch(x)" is effectively converted into
4059 "if ((1 << (x-MINVAL)) & CST)" where CST and MINVAL are
4060 integer constants.
4062 INDEX_EXPR is the value being switched on, which is of
4063 type INDEX_TYPE. MINVAL is the lowest case value of in
4064 the case nodes, of INDEX_TYPE type, and RANGE is highest
4065 value minus MINVAL, also of type INDEX_TYPE. NODES is
4066 the set of case nodes, and DEFAULT_LABEL is the label to
4067 branch to should none of the cases match.
4069 There *MUST* be MAX_CASE_BIT_TESTS or less unique case
4070 node targets. */
4072 static void
4073 emit_case_bit_tests (tree index_type, tree index_expr, tree minval,
4074 tree range, case_node_ptr nodes, rtx default_label)
4076 struct case_bit_test test[MAX_CASE_BIT_TESTS];
4077 enum machine_mode mode;
4078 rtx expr, index, label;
4079 unsigned int i,j,lo,hi;
4080 struct case_node *n;
4081 unsigned int count;
4083 count = 0;
4084 for (n = nodes; n; n = n->right)
4086 label = label_rtx (n->code_label);
4087 for (i = 0; i < count; i++)
4088 if (same_case_target_p (label, test[i].label))
4089 break;
4091 if (i == count)
4093 if (count >= MAX_CASE_BIT_TESTS)
4094 abort ();
4095 test[i].hi = 0;
4096 test[i].lo = 0;
4097 test[i].label = label;
4098 test[i].bits = 1;
4099 count++;
4101 else
4102 test[i].bits++;
4104 lo = tree_low_cst (fold (build (MINUS_EXPR, index_type,
4105 n->low, minval)), 1);
4106 hi = tree_low_cst (fold (build (MINUS_EXPR, index_type,
4107 n->high, minval)), 1);
4108 for (j = lo; j <= hi; j++)
4109 if (j >= HOST_BITS_PER_WIDE_INT)
4110 test[i].hi |= (HOST_WIDE_INT) 1 << (j - HOST_BITS_PER_INT);
4111 else
4112 test[i].lo |= (HOST_WIDE_INT) 1 << j;
4115 qsort (test, count, sizeof(*test), case_bit_test_cmp);
4117 index_expr = fold (build (MINUS_EXPR, index_type,
4118 convert (index_type, index_expr),
4119 convert (index_type, minval)));
4120 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
4121 emit_queue ();
4122 index = protect_from_queue (index, 0);
4123 do_pending_stack_adjust ();
4125 mode = TYPE_MODE (index_type);
4126 expr = expand_expr (range, NULL_RTX, VOIDmode, 0);
4127 emit_cmp_and_jump_insns (index, expr, GTU, NULL_RTX, mode, 1,
4128 default_label);
4130 index = convert_to_mode (word_mode, index, 0);
4131 index = expand_binop (word_mode, ashl_optab, const1_rtx,
4132 index, NULL_RTX, 1, OPTAB_WIDEN);
4134 for (i = 0; i < count; i++)
4136 expr = immed_double_const (test[i].lo, test[i].hi, word_mode);
4137 expr = expand_binop (word_mode, and_optab, index, expr,
4138 NULL_RTX, 1, OPTAB_WIDEN);
4139 emit_cmp_and_jump_insns (expr, const0_rtx, NE, NULL_RTX,
4140 word_mode, 1, test[i].label);
4143 emit_jump (default_label);
4146 #ifndef HAVE_casesi
4147 #define HAVE_casesi 0
4148 #endif
4150 #ifndef HAVE_tablejump
4151 #define HAVE_tablejump 0
4152 #endif
4154 /* Terminate a case (Pascal) or switch (C) statement
4155 in which ORIG_INDEX is the expression to be tested.
4156 If ORIG_TYPE is not NULL, it is the original ORIG_INDEX
4157 type as given in the source before any compiler conversions.
4158 Generate the code to test it and jump to the right place. */
4160 void
4161 expand_end_case_type (tree orig_index, tree orig_type)
4163 tree minval = NULL_TREE, maxval = NULL_TREE, range = NULL_TREE;
4164 rtx default_label = 0;
4165 struct case_node *n, *m;
4166 unsigned int count, uniq;
4167 rtx index;
4168 rtx table_label;
4169 int ncases;
4170 rtx *labelvec;
4171 int i;
4172 rtx before_case, end, lab;
4173 struct nesting *thiscase = case_stack;
4174 tree index_expr, index_type;
4175 bool exit_done = false;
4176 int unsignedp;
4178 /* Don't crash due to previous errors. */
4179 if (thiscase == NULL)
4180 return;
4182 index_expr = thiscase->data.case_stmt.index_expr;
4183 index_type = TREE_TYPE (index_expr);
4184 unsignedp = TYPE_UNSIGNED (index_type);
4185 if (orig_type == NULL)
4186 orig_type = TREE_TYPE (orig_index);
4188 do_pending_stack_adjust ();
4190 /* An ERROR_MARK occurs for various reasons including invalid data type. */
4191 if (index_type != error_mark_node)
4193 /* If we don't have a default-label, create one here,
4194 after the body of the switch. */
4195 if (thiscase->data.case_stmt.default_label == 0)
4197 thiscase->data.case_stmt.default_label
4198 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
4199 /* Share the exit label if possible. */
4200 if (thiscase->exit_label)
4202 SET_DECL_RTL (thiscase->data.case_stmt.default_label,
4203 thiscase->exit_label);
4204 exit_done = true;
4206 expand_label (thiscase->data.case_stmt.default_label);
4208 default_label = label_rtx (thiscase->data.case_stmt.default_label);
4210 before_case = get_last_insn ();
4212 if (thiscase->data.case_stmt.case_list
4213 && thiscase->data.case_stmt.case_list->left)
4214 thiscase->data.case_stmt.case_list
4215 = case_tree2list (thiscase->data.case_stmt.case_list, 0);
4217 /* Simplify the case-list before we count it. */
4218 group_case_nodes (thiscase->data.case_stmt.case_list);
4219 strip_default_case_nodes (&thiscase->data.case_stmt.case_list,
4220 default_label);
4222 /* Get upper and lower bounds of case values.
4223 Also convert all the case values to the index expr's data type. */
4225 uniq = 0;
4226 count = 0;
4227 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
4229 /* Check low and high label values are integers. */
4230 if (TREE_CODE (n->low) != INTEGER_CST)
4231 abort ();
4232 if (TREE_CODE (n->high) != INTEGER_CST)
4233 abort ();
4235 n->low = convert (index_type, n->low);
4236 n->high = convert (index_type, n->high);
4238 /* Count the elements and track the largest and smallest
4239 of them (treating them as signed even if they are not). */
4240 if (count++ == 0)
4242 minval = n->low;
4243 maxval = n->high;
4245 else
4247 if (INT_CST_LT (n->low, minval))
4248 minval = n->low;
4249 if (INT_CST_LT (maxval, n->high))
4250 maxval = n->high;
4252 /* A range counts double, since it requires two compares. */
4253 if (! tree_int_cst_equal (n->low, n->high))
4254 count++;
4256 /* Count the number of unique case node targets. */
4257 uniq++;
4258 lab = label_rtx (n->code_label);
4259 for (m = thiscase->data.case_stmt.case_list; m != n; m = m->right)
4260 if (same_case_target_p (label_rtx (m->code_label), lab))
4262 uniq--;
4263 break;
4267 /* Compute span of values. */
4268 if (count != 0)
4269 range = fold (build (MINUS_EXPR, index_type, maxval, minval));
4271 end_cleanup_deferral ();
4273 if (count == 0)
4275 expand_expr (index_expr, const0_rtx, VOIDmode, 0);
4276 emit_queue ();
4277 emit_jump (default_label);
4280 /* Try implementing this switch statement by a short sequence of
4281 bit-wise comparisons. However, we let the binary-tree case
4282 below handle constant index expressions. */
4283 else if (CASE_USE_BIT_TESTS
4284 && ! TREE_CONSTANT (index_expr)
4285 && compare_tree_int (range, GET_MODE_BITSIZE (word_mode)) < 0
4286 && compare_tree_int (range, 0) > 0
4287 && lshift_cheap_p ()
4288 && ((uniq == 1 && count >= 3)
4289 || (uniq == 2 && count >= 5)
4290 || (uniq == 3 && count >= 6)))
4292 /* Optimize the case where all the case values fit in a
4293 word without having to subtract MINVAL. In this case,
4294 we can optimize away the subtraction. */
4295 if (compare_tree_int (minval, 0) > 0
4296 && compare_tree_int (maxval, GET_MODE_BITSIZE (word_mode)) < 0)
4298 minval = integer_zero_node;
4299 range = maxval;
4301 emit_case_bit_tests (index_type, index_expr, minval, range,
4302 thiscase->data.case_stmt.case_list,
4303 default_label);
4306 /* If range of values is much bigger than number of values,
4307 make a sequence of conditional branches instead of a dispatch.
4308 If the switch-index is a constant, do it this way
4309 because we can optimize it. */
4311 else if (count < case_values_threshold ()
4312 || compare_tree_int (range,
4313 (optimize_size ? 3 : 10) * count) > 0
4314 /* RANGE may be signed, and really large ranges will show up
4315 as negative numbers. */
4316 || compare_tree_int (range, 0) < 0
4317 #ifndef ASM_OUTPUT_ADDR_DIFF_ELT
4318 || flag_pic
4319 #endif
4320 || TREE_CONSTANT (index_expr)
4321 /* If neither casesi or tablejump is available, we can
4322 only go this way. */
4323 || (!HAVE_casesi && !HAVE_tablejump))
4325 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
4327 /* If the index is a short or char that we do not have
4328 an insn to handle comparisons directly, convert it to
4329 a full integer now, rather than letting each comparison
4330 generate the conversion. */
4332 if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT
4333 && ! have_insn_for (COMPARE, GET_MODE (index)))
4335 enum machine_mode wider_mode;
4336 for (wider_mode = GET_MODE (index); wider_mode != VOIDmode;
4337 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
4338 if (have_insn_for (COMPARE, wider_mode))
4340 index = convert_to_mode (wider_mode, index, unsignedp);
4341 break;
4345 emit_queue ();
4346 do_pending_stack_adjust ();
4348 index = protect_from_queue (index, 0);
4349 if (MEM_P (index))
4350 index = copy_to_reg (index);
4351 if (GET_CODE (index) == CONST_INT
4352 || TREE_CODE (index_expr) == INTEGER_CST)
4354 /* Make a tree node with the proper constant value
4355 if we don't already have one. */
4356 if (TREE_CODE (index_expr) != INTEGER_CST)
4358 index_expr
4359 = build_int_2 (INTVAL (index),
4360 unsignedp || INTVAL (index) >= 0 ? 0 : -1);
4361 index_expr = convert (index_type, index_expr);
4364 /* For constant index expressions we need only
4365 issue an unconditional branch to the appropriate
4366 target code. The job of removing any unreachable
4367 code is left to the optimization phase if the
4368 "-O" option is specified. */
4369 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
4370 if (! tree_int_cst_lt (index_expr, n->low)
4371 && ! tree_int_cst_lt (n->high, index_expr))
4372 break;
4374 if (n)
4375 emit_jump (label_rtx (n->code_label));
4376 else
4377 emit_jump (default_label);
4379 else
4381 /* If the index expression is not constant we generate
4382 a binary decision tree to select the appropriate
4383 target code. This is done as follows:
4385 The list of cases is rearranged into a binary tree,
4386 nearly optimal assuming equal probability for each case.
4388 The tree is transformed into RTL, eliminating
4389 redundant test conditions at the same time.
4391 If program flow could reach the end of the
4392 decision tree an unconditional jump to the
4393 default code is emitted. */
4395 use_cost_table
4396 = (TREE_CODE (orig_type) != ENUMERAL_TYPE
4397 && estimate_case_costs (thiscase->data.case_stmt.case_list));
4398 balance_case_nodes (&thiscase->data.case_stmt.case_list, NULL);
4399 emit_case_nodes (index, thiscase->data.case_stmt.case_list,
4400 default_label, index_type);
4401 emit_jump_if_reachable (default_label);
4404 else
4406 table_label = gen_label_rtx ();
4407 if (! try_casesi (index_type, index_expr, minval, range,
4408 table_label, default_label))
4410 index_type = thiscase->data.case_stmt.nominal_type;
4412 /* Index jumptables from zero for suitable values of
4413 minval to avoid a subtraction. */
4414 if (! optimize_size
4415 && compare_tree_int (minval, 0) > 0
4416 && compare_tree_int (minval, 3) < 0)
4418 minval = integer_zero_node;
4419 range = maxval;
4422 if (! try_tablejump (index_type, index_expr, minval, range,
4423 table_label, default_label))
4424 abort ();
4427 /* Get table of labels to jump to, in order of case index. */
4429 ncases = tree_low_cst (range, 0) + 1;
4430 labelvec = alloca (ncases * sizeof (rtx));
4431 memset (labelvec, 0, ncases * sizeof (rtx));
4433 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
4435 /* Compute the low and high bounds relative to the minimum
4436 value since that should fit in a HOST_WIDE_INT while the
4437 actual values may not. */
4438 HOST_WIDE_INT i_low
4439 = tree_low_cst (fold (build (MINUS_EXPR, index_type,
4440 n->low, minval)), 1);
4441 HOST_WIDE_INT i_high
4442 = tree_low_cst (fold (build (MINUS_EXPR, index_type,
4443 n->high, minval)), 1);
4444 HOST_WIDE_INT i;
4446 for (i = i_low; i <= i_high; i ++)
4447 labelvec[i]
4448 = gen_rtx_LABEL_REF (Pmode, label_rtx (n->code_label));
4451 /* Fill in the gaps with the default. */
4452 for (i = 0; i < ncases; i++)
4453 if (labelvec[i] == 0)
4454 labelvec[i] = gen_rtx_LABEL_REF (Pmode, default_label);
4456 /* Output the table. */
4457 emit_label (table_label);
4459 if (CASE_VECTOR_PC_RELATIVE || flag_pic)
4460 emit_jump_insn (gen_rtx_ADDR_DIFF_VEC (CASE_VECTOR_MODE,
4461 gen_rtx_LABEL_REF (Pmode, table_label),
4462 gen_rtvec_v (ncases, labelvec),
4463 const0_rtx, const0_rtx));
4464 else
4465 emit_jump_insn (gen_rtx_ADDR_VEC (CASE_VECTOR_MODE,
4466 gen_rtvec_v (ncases, labelvec)));
4468 /* If the case insn drops through the table,
4469 after the table we must jump to the default-label.
4470 Otherwise record no drop-through after the table. */
4471 #ifdef CASE_DROPS_THROUGH
4472 emit_jump (default_label);
4473 #else
4474 emit_barrier ();
4475 #endif
4478 before_case = NEXT_INSN (before_case);
4479 end = get_last_insn ();
4480 if (squeeze_notes (&before_case, &end))
4481 abort ();
4482 reorder_insns (before_case, end,
4483 thiscase->data.case_stmt.start);
4485 else
4486 end_cleanup_deferral ();
4488 if (thiscase->exit_label && !exit_done)
4489 emit_label (thiscase->exit_label);
4491 POPSTACK (case_stack);
4493 free_temp_slots ();
4496 /* Convert the tree NODE into a list linked by the right field, with the left
4497 field zeroed. RIGHT is used for recursion; it is a list to be placed
4498 rightmost in the resulting list. */
4500 static struct case_node *
4501 case_tree2list (struct case_node *node, struct case_node *right)
4503 struct case_node *left;
4505 if (node->right)
4506 right = case_tree2list (node->right, right);
4508 node->right = right;
4509 if ((left = node->left))
4511 node->left = 0;
4512 return case_tree2list (left, node);
4515 return node;
4518 /* Generate code to jump to LABEL if OP1 and OP2 are equal. */
4520 static void
4521 do_jump_if_equal (rtx op1, rtx op2, rtx label, int unsignedp)
4523 if (GET_CODE (op1) == CONST_INT && GET_CODE (op2) == CONST_INT)
4525 if (op1 == op2)
4526 emit_jump (label);
4528 else
4529 emit_cmp_and_jump_insns (op1, op2, EQ, NULL_RTX,
4530 (GET_MODE (op1) == VOIDmode
4531 ? GET_MODE (op2) : GET_MODE (op1)),
4532 unsignedp, label);
4535 /* Not all case values are encountered equally. This function
4536 uses a heuristic to weight case labels, in cases where that
4537 looks like a reasonable thing to do.
4539 Right now, all we try to guess is text, and we establish the
4540 following weights:
4542 chars above space: 16
4543 digits: 16
4544 default: 12
4545 space, punct: 8
4546 tab: 4
4547 newline: 2
4548 other "\" chars: 1
4549 remaining chars: 0
4551 If we find any cases in the switch that are not either -1 or in the range
4552 of valid ASCII characters, or are control characters other than those
4553 commonly used with "\", don't treat this switch scanning text.
4555 Return 1 if these nodes are suitable for cost estimation, otherwise
4556 return 0. */
4558 static int
4559 estimate_case_costs (case_node_ptr node)
4561 tree min_ascii = integer_minus_one_node;
4562 tree max_ascii = convert (TREE_TYPE (node->high), build_int_2 (127, 0));
4563 case_node_ptr n;
4564 int i;
4566 /* If we haven't already made the cost table, make it now. Note that the
4567 lower bound of the table is -1, not zero. */
4569 if (! cost_table_initialized)
4571 cost_table_initialized = 1;
4573 for (i = 0; i < 128; i++)
4575 if (ISALNUM (i))
4576 COST_TABLE (i) = 16;
4577 else if (ISPUNCT (i))
4578 COST_TABLE (i) = 8;
4579 else if (ISCNTRL (i))
4580 COST_TABLE (i) = -1;
4583 COST_TABLE (' ') = 8;
4584 COST_TABLE ('\t') = 4;
4585 COST_TABLE ('\0') = 4;
4586 COST_TABLE ('\n') = 2;
4587 COST_TABLE ('\f') = 1;
4588 COST_TABLE ('\v') = 1;
4589 COST_TABLE ('\b') = 1;
4592 /* See if all the case expressions look like text. It is text if the
4593 constant is >= -1 and the highest constant is <= 127. Do all comparisons
4594 as signed arithmetic since we don't want to ever access cost_table with a
4595 value less than -1. Also check that none of the constants in a range
4596 are strange control characters. */
4598 for (n = node; n; n = n->right)
4600 if ((INT_CST_LT (n->low, min_ascii)) || INT_CST_LT (max_ascii, n->high))
4601 return 0;
4603 for (i = (HOST_WIDE_INT) TREE_INT_CST_LOW (n->low);
4604 i <= (HOST_WIDE_INT) TREE_INT_CST_LOW (n->high); i++)
4605 if (COST_TABLE (i) < 0)
4606 return 0;
4609 /* All interesting values are within the range of interesting
4610 ASCII characters. */
4611 return 1;
4614 /* Determine whether two case labels branch to the same target. */
4616 static bool
4617 same_case_target_p (rtx l1, rtx l2)
4619 #if 0
4620 rtx i1, i2;
4622 if (l1 == l2)
4623 return true;
4625 i1 = next_real_insn (l1);
4626 i2 = next_real_insn (l2);
4627 if (i1 == i2)
4628 return true;
4630 if (i1 && simplejump_p (i1))
4632 l1 = XEXP (SET_SRC (PATTERN (i1)), 0);
4635 if (i2 && simplejump_p (i2))
4637 l2 = XEXP (SET_SRC (PATTERN (i2)), 0);
4639 #endif
4640 /* When coming from gimple, we usually won't have emitted either
4641 the labels or the body of the switch statement. The job being
4642 done here should be done via jump threading at the tree level.
4643 Cases that go the same place should have the same label. */
4644 return l1 == l2;
4647 /* Delete nodes that branch to the default label from a list of
4648 case nodes. Eg. case 5: default: becomes just default: */
4650 static void
4651 strip_default_case_nodes (case_node_ptr *prev, rtx deflab)
4653 case_node_ptr ptr;
4655 while (*prev)
4657 ptr = *prev;
4658 if (same_case_target_p (label_rtx (ptr->code_label), deflab))
4659 *prev = ptr->right;
4660 else
4661 prev = &ptr->right;
4665 /* Scan an ordered list of case nodes
4666 combining those with consecutive values or ranges.
4668 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
4670 static void
4671 group_case_nodes (case_node_ptr head)
4673 case_node_ptr node = head;
4675 while (node)
4677 rtx lab;
4678 case_node_ptr np = node;
4680 lab = label_rtx (node->code_label);
4682 /* Try to group the successors of NODE with NODE. */
4683 while (((np = np->right) != 0)
4684 /* Do they jump to the same place? */
4685 && same_case_target_p (label_rtx (np->code_label), lab)
4686 /* Are their ranges consecutive? */
4687 && tree_int_cst_equal (np->low,
4688 fold (build (PLUS_EXPR,
4689 TREE_TYPE (node->high),
4690 node->high,
4691 integer_one_node)))
4692 /* An overflow is not consecutive. */
4693 && tree_int_cst_lt (node->high,
4694 fold (build (PLUS_EXPR,
4695 TREE_TYPE (node->high),
4696 node->high,
4697 integer_one_node))))
4699 node->high = np->high;
4701 /* NP is the first node after NODE which can't be grouped with it.
4702 Delete the nodes in between, and move on to that node. */
4703 node->right = np;
4704 node = np;
4708 /* Take an ordered list of case nodes
4709 and transform them into a near optimal binary tree,
4710 on the assumption that any target code selection value is as
4711 likely as any other.
4713 The transformation is performed by splitting the ordered
4714 list into two equal sections plus a pivot. The parts are
4715 then attached to the pivot as left and right branches. Each
4716 branch is then transformed recursively. */
4718 static void
4719 balance_case_nodes (case_node_ptr *head, case_node_ptr parent)
4721 case_node_ptr np;
4723 np = *head;
4724 if (np)
4726 int cost = 0;
4727 int i = 0;
4728 int ranges = 0;
4729 case_node_ptr *npp;
4730 case_node_ptr left;
4732 /* Count the number of entries on branch. Also count the ranges. */
4734 while (np)
4736 if (!tree_int_cst_equal (np->low, np->high))
4738 ranges++;
4739 if (use_cost_table)
4740 cost += COST_TABLE (TREE_INT_CST_LOW (np->high));
4743 if (use_cost_table)
4744 cost += COST_TABLE (TREE_INT_CST_LOW (np->low));
4746 i++;
4747 np = np->right;
4750 if (i > 2)
4752 /* Split this list if it is long enough for that to help. */
4753 npp = head;
4754 left = *npp;
4755 if (use_cost_table)
4757 /* Find the place in the list that bisects the list's total cost,
4758 Here I gets half the total cost. */
4759 int n_moved = 0;
4760 i = (cost + 1) / 2;
4761 while (1)
4763 /* Skip nodes while their cost does not reach that amount. */
4764 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
4765 i -= COST_TABLE (TREE_INT_CST_LOW ((*npp)->high));
4766 i -= COST_TABLE (TREE_INT_CST_LOW ((*npp)->low));
4767 if (i <= 0)
4768 break;
4769 npp = &(*npp)->right;
4770 n_moved += 1;
4772 if (n_moved == 0)
4774 /* Leave this branch lopsided, but optimize left-hand
4775 side and fill in `parent' fields for right-hand side. */
4776 np = *head;
4777 np->parent = parent;
4778 balance_case_nodes (&np->left, np);
4779 for (; np->right; np = np->right)
4780 np->right->parent = np;
4781 return;
4784 /* If there are just three nodes, split at the middle one. */
4785 else if (i == 3)
4786 npp = &(*npp)->right;
4787 else
4789 /* Find the place in the list that bisects the list's total cost,
4790 where ranges count as 2.
4791 Here I gets half the total cost. */
4792 i = (i + ranges + 1) / 2;
4793 while (1)
4795 /* Skip nodes while their cost does not reach that amount. */
4796 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
4797 i--;
4798 i--;
4799 if (i <= 0)
4800 break;
4801 npp = &(*npp)->right;
4804 *head = np = *npp;
4805 *npp = 0;
4806 np->parent = parent;
4807 np->left = left;
4809 /* Optimize each of the two split parts. */
4810 balance_case_nodes (&np->left, np);
4811 balance_case_nodes (&np->right, np);
4813 else
4815 /* Else leave this branch as one level,
4816 but fill in `parent' fields. */
4817 np = *head;
4818 np->parent = parent;
4819 for (; np->right; np = np->right)
4820 np->right->parent = np;
4825 /* Search the parent sections of the case node tree
4826 to see if a test for the lower bound of NODE would be redundant.
4827 INDEX_TYPE is the type of the index expression.
4829 The instructions to generate the case decision tree are
4830 output in the same order as nodes are processed so it is
4831 known that if a parent node checks the range of the current
4832 node minus one that the current node is bounded at its lower
4833 span. Thus the test would be redundant. */
4835 static int
4836 node_has_low_bound (case_node_ptr node, tree index_type)
4838 tree low_minus_one;
4839 case_node_ptr pnode;
4841 /* If the lower bound of this node is the lowest value in the index type,
4842 we need not test it. */
4844 if (tree_int_cst_equal (node->low, TYPE_MIN_VALUE (index_type)))
4845 return 1;
4847 /* If this node has a left branch, the value at the left must be less
4848 than that at this node, so it cannot be bounded at the bottom and
4849 we need not bother testing any further. */
4851 if (node->left)
4852 return 0;
4854 low_minus_one = fold (build (MINUS_EXPR, TREE_TYPE (node->low),
4855 node->low, integer_one_node));
4857 /* If the subtraction above overflowed, we can't verify anything.
4858 Otherwise, look for a parent that tests our value - 1. */
4860 if (! tree_int_cst_lt (low_minus_one, node->low))
4861 return 0;
4863 for (pnode = node->parent; pnode; pnode = pnode->parent)
4864 if (tree_int_cst_equal (low_minus_one, pnode->high))
4865 return 1;
4867 return 0;
4870 /* Search the parent sections of the case node tree
4871 to see if a test for the upper bound of NODE would be redundant.
4872 INDEX_TYPE is the type of the index expression.
4874 The instructions to generate the case decision tree are
4875 output in the same order as nodes are processed so it is
4876 known that if a parent node checks the range of the current
4877 node plus one that the current node is bounded at its upper
4878 span. Thus the test would be redundant. */
4880 static int
4881 node_has_high_bound (case_node_ptr node, tree index_type)
4883 tree high_plus_one;
4884 case_node_ptr pnode;
4886 /* If there is no upper bound, obviously no test is needed. */
4888 if (TYPE_MAX_VALUE (index_type) == NULL)
4889 return 1;
4891 /* If the upper bound of this node is the highest value in the type
4892 of the index expression, we need not test against it. */
4894 if (tree_int_cst_equal (node->high, TYPE_MAX_VALUE (index_type)))
4895 return 1;
4897 /* If this node has a right branch, the value at the right must be greater
4898 than that at this node, so it cannot be bounded at the top and
4899 we need not bother testing any further. */
4901 if (node->right)
4902 return 0;
4904 high_plus_one = fold (build (PLUS_EXPR, TREE_TYPE (node->high),
4905 node->high, integer_one_node));
4907 /* If the addition above overflowed, we can't verify anything.
4908 Otherwise, look for a parent that tests our value + 1. */
4910 if (! tree_int_cst_lt (node->high, high_plus_one))
4911 return 0;
4913 for (pnode = node->parent; pnode; pnode = pnode->parent)
4914 if (tree_int_cst_equal (high_plus_one, pnode->low))
4915 return 1;
4917 return 0;
4920 /* Search the parent sections of the
4921 case node tree to see if both tests for the upper and lower
4922 bounds of NODE would be redundant. */
4924 static int
4925 node_is_bounded (case_node_ptr node, tree index_type)
4927 return (node_has_low_bound (node, index_type)
4928 && node_has_high_bound (node, index_type));
4931 /* Emit an unconditional jump to LABEL unless it would be dead code. */
4933 static void
4934 emit_jump_if_reachable (rtx label)
4936 if (GET_CODE (get_last_insn ()) != BARRIER)
4937 emit_jump (label);
4940 /* Emit step-by-step code to select a case for the value of INDEX.
4941 The thus generated decision tree follows the form of the
4942 case-node binary tree NODE, whose nodes represent test conditions.
4943 INDEX_TYPE is the type of the index of the switch.
4945 Care is taken to prune redundant tests from the decision tree
4946 by detecting any boundary conditions already checked by
4947 emitted rtx. (See node_has_high_bound, node_has_low_bound
4948 and node_is_bounded, above.)
4950 Where the test conditions can be shown to be redundant we emit
4951 an unconditional jump to the target code. As a further
4952 optimization, the subordinates of a tree node are examined to
4953 check for bounded nodes. In this case conditional and/or
4954 unconditional jumps as a result of the boundary check for the
4955 current node are arranged to target the subordinates associated
4956 code for out of bound conditions on the current node.
4958 We can assume that when control reaches the code generated here,
4959 the index value has already been compared with the parents
4960 of this node, and determined to be on the same side of each parent
4961 as this node is. Thus, if this node tests for the value 51,
4962 and a parent tested for 52, we don't need to consider
4963 the possibility of a value greater than 51. If another parent
4964 tests for the value 50, then this node need not test anything. */
4966 static void
4967 emit_case_nodes (rtx index, case_node_ptr node, rtx default_label,
4968 tree index_type)
4970 /* If INDEX has an unsigned type, we must make unsigned branches. */
4971 int unsignedp = TYPE_UNSIGNED (index_type);
4972 enum machine_mode mode = GET_MODE (index);
4973 enum machine_mode imode = TYPE_MODE (index_type);
4975 /* See if our parents have already tested everything for us.
4976 If they have, emit an unconditional jump for this node. */
4977 if (node_is_bounded (node, index_type))
4978 emit_jump (label_rtx (node->code_label));
4980 else if (tree_int_cst_equal (node->low, node->high))
4982 /* Node is single valued. First see if the index expression matches
4983 this node and then check our children, if any. */
4985 do_jump_if_equal (index,
4986 convert_modes (mode, imode,
4987 expand_expr (node->low, NULL_RTX,
4988 VOIDmode, 0),
4989 unsignedp),
4990 label_rtx (node->code_label), unsignedp);
4992 if (node->right != 0 && node->left != 0)
4994 /* This node has children on both sides.
4995 Dispatch to one side or the other
4996 by comparing the index value with this node's value.
4997 If one subtree is bounded, check that one first,
4998 so we can avoid real branches in the tree. */
5000 if (node_is_bounded (node->right, index_type))
5002 emit_cmp_and_jump_insns (index,
5003 convert_modes
5004 (mode, imode,
5005 expand_expr (node->high, NULL_RTX,
5006 VOIDmode, 0),
5007 unsignedp),
5008 GT, NULL_RTX, mode, unsignedp,
5009 label_rtx (node->right->code_label));
5010 emit_case_nodes (index, node->left, default_label, index_type);
5013 else if (node_is_bounded (node->left, index_type))
5015 emit_cmp_and_jump_insns (index,
5016 convert_modes
5017 (mode, imode,
5018 expand_expr (node->high, NULL_RTX,
5019 VOIDmode, 0),
5020 unsignedp),
5021 LT, NULL_RTX, mode, unsignedp,
5022 label_rtx (node->left->code_label));
5023 emit_case_nodes (index, node->right, default_label, index_type);
5026 /* If both children are single-valued cases with no
5027 children, finish up all the work. This way, we can save
5028 one ordered comparison. */
5029 else if (tree_int_cst_equal (node->right->low, node->right->high)
5030 && node->right->left == 0
5031 && node->right->right == 0
5032 && tree_int_cst_equal (node->left->low, node->left->high)
5033 && node->left->left == 0
5034 && node->left->right == 0)
5036 /* Neither node is bounded. First distinguish the two sides;
5037 then emit the code for one side at a time. */
5039 /* See if the value matches what the right hand side
5040 wants. */
5041 do_jump_if_equal (index,
5042 convert_modes (mode, imode,
5043 expand_expr (node->right->low,
5044 NULL_RTX,
5045 VOIDmode, 0),
5046 unsignedp),
5047 label_rtx (node->right->code_label),
5048 unsignedp);
5050 /* See if the value matches what the left hand side
5051 wants. */
5052 do_jump_if_equal (index,
5053 convert_modes (mode, imode,
5054 expand_expr (node->left->low,
5055 NULL_RTX,
5056 VOIDmode, 0),
5057 unsignedp),
5058 label_rtx (node->left->code_label),
5059 unsignedp);
5062 else
5064 /* Neither node is bounded. First distinguish the two sides;
5065 then emit the code for one side at a time. */
5067 tree test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
5069 /* See if the value is on the right. */
5070 emit_cmp_and_jump_insns (index,
5071 convert_modes
5072 (mode, imode,
5073 expand_expr (node->high, NULL_RTX,
5074 VOIDmode, 0),
5075 unsignedp),
5076 GT, NULL_RTX, mode, unsignedp,
5077 label_rtx (test_label));
5079 /* Value must be on the left.
5080 Handle the left-hand subtree. */
5081 emit_case_nodes (index, node->left, default_label, index_type);
5082 /* If left-hand subtree does nothing,
5083 go to default. */
5084 emit_jump_if_reachable (default_label);
5086 /* Code branches here for the right-hand subtree. */
5087 expand_label (test_label);
5088 emit_case_nodes (index, node->right, default_label, index_type);
5092 else if (node->right != 0 && node->left == 0)
5094 /* Here we have a right child but no left so we issue conditional
5095 branch to default and process the right child.
5097 Omit the conditional branch to default if we it avoid only one
5098 right child; it costs too much space to save so little time. */
5100 if (node->right->right || node->right->left
5101 || !tree_int_cst_equal (node->right->low, node->right->high))
5103 if (!node_has_low_bound (node, index_type))
5105 emit_cmp_and_jump_insns (index,
5106 convert_modes
5107 (mode, imode,
5108 expand_expr (node->high, NULL_RTX,
5109 VOIDmode, 0),
5110 unsignedp),
5111 LT, NULL_RTX, mode, unsignedp,
5112 default_label);
5115 emit_case_nodes (index, node->right, default_label, index_type);
5117 else
5118 /* We cannot process node->right normally
5119 since we haven't ruled out the numbers less than
5120 this node's value. So handle node->right explicitly. */
5121 do_jump_if_equal (index,
5122 convert_modes
5123 (mode, imode,
5124 expand_expr (node->right->low, NULL_RTX,
5125 VOIDmode, 0),
5126 unsignedp),
5127 label_rtx (node->right->code_label), unsignedp);
5130 else if (node->right == 0 && node->left != 0)
5132 /* Just one subtree, on the left. */
5133 if (node->left->left || node->left->right
5134 || !tree_int_cst_equal (node->left->low, node->left->high))
5136 if (!node_has_high_bound (node, index_type))
5138 emit_cmp_and_jump_insns (index,
5139 convert_modes
5140 (mode, imode,
5141 expand_expr (node->high, NULL_RTX,
5142 VOIDmode, 0),
5143 unsignedp),
5144 GT, NULL_RTX, mode, unsignedp,
5145 default_label);
5148 emit_case_nodes (index, node->left, default_label, index_type);
5150 else
5151 /* We cannot process node->left normally
5152 since we haven't ruled out the numbers less than
5153 this node's value. So handle node->left explicitly. */
5154 do_jump_if_equal (index,
5155 convert_modes
5156 (mode, imode,
5157 expand_expr (node->left->low, NULL_RTX,
5158 VOIDmode, 0),
5159 unsignedp),
5160 label_rtx (node->left->code_label), unsignedp);
5163 else
5165 /* Node is a range. These cases are very similar to those for a single
5166 value, except that we do not start by testing whether this node
5167 is the one to branch to. */
5169 if (node->right != 0 && node->left != 0)
5171 /* Node has subtrees on both sides.
5172 If the right-hand subtree is bounded,
5173 test for it first, since we can go straight there.
5174 Otherwise, we need to make a branch in the control structure,
5175 then handle the two subtrees. */
5176 tree test_label = 0;
5178 if (node_is_bounded (node->right, index_type))
5179 /* Right hand node is fully bounded so we can eliminate any
5180 testing and branch directly to the target code. */
5181 emit_cmp_and_jump_insns (index,
5182 convert_modes
5183 (mode, imode,
5184 expand_expr (node->high, NULL_RTX,
5185 VOIDmode, 0),
5186 unsignedp),
5187 GT, NULL_RTX, mode, unsignedp,
5188 label_rtx (node->right->code_label));
5189 else
5191 /* Right hand node requires testing.
5192 Branch to a label where we will handle it later. */
5194 test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
5195 emit_cmp_and_jump_insns (index,
5196 convert_modes
5197 (mode, imode,
5198 expand_expr (node->high, NULL_RTX,
5199 VOIDmode, 0),
5200 unsignedp),
5201 GT, NULL_RTX, mode, unsignedp,
5202 label_rtx (test_label));
5205 /* Value belongs to this node or to the left-hand subtree. */
5207 emit_cmp_and_jump_insns (index,
5208 convert_modes
5209 (mode, imode,
5210 expand_expr (node->low, NULL_RTX,
5211 VOIDmode, 0),
5212 unsignedp),
5213 GE, NULL_RTX, mode, unsignedp,
5214 label_rtx (node->code_label));
5216 /* Handle the left-hand subtree. */
5217 emit_case_nodes (index, node->left, default_label, index_type);
5219 /* If right node had to be handled later, do that now. */
5221 if (test_label)
5223 /* If the left-hand subtree fell through,
5224 don't let it fall into the right-hand subtree. */
5225 emit_jump_if_reachable (default_label);
5227 expand_label (test_label);
5228 emit_case_nodes (index, node->right, default_label, index_type);
5232 else if (node->right != 0 && node->left == 0)
5234 /* Deal with values to the left of this node,
5235 if they are possible. */
5236 if (!node_has_low_bound (node, index_type))
5238 emit_cmp_and_jump_insns (index,
5239 convert_modes
5240 (mode, imode,
5241 expand_expr (node->low, NULL_RTX,
5242 VOIDmode, 0),
5243 unsignedp),
5244 LT, NULL_RTX, mode, unsignedp,
5245 default_label);
5248 /* Value belongs to this node or to the right-hand subtree. */
5250 emit_cmp_and_jump_insns (index,
5251 convert_modes
5252 (mode, imode,
5253 expand_expr (node->high, NULL_RTX,
5254 VOIDmode, 0),
5255 unsignedp),
5256 LE, NULL_RTX, mode, unsignedp,
5257 label_rtx (node->code_label));
5259 emit_case_nodes (index, node->right, default_label, index_type);
5262 else if (node->right == 0 && node->left != 0)
5264 /* Deal with values to the right of this node,
5265 if they are possible. */
5266 if (!node_has_high_bound (node, index_type))
5268 emit_cmp_and_jump_insns (index,
5269 convert_modes
5270 (mode, imode,
5271 expand_expr (node->high, NULL_RTX,
5272 VOIDmode, 0),
5273 unsignedp),
5274 GT, NULL_RTX, mode, unsignedp,
5275 default_label);
5278 /* Value belongs to this node or to the left-hand subtree. */
5280 emit_cmp_and_jump_insns (index,
5281 convert_modes
5282 (mode, imode,
5283 expand_expr (node->low, NULL_RTX,
5284 VOIDmode, 0),
5285 unsignedp),
5286 GE, NULL_RTX, mode, unsignedp,
5287 label_rtx (node->code_label));
5289 emit_case_nodes (index, node->left, default_label, index_type);
5292 else
5294 /* Node has no children so we check low and high bounds to remove
5295 redundant tests. Only one of the bounds can exist,
5296 since otherwise this node is bounded--a case tested already. */
5297 int high_bound = node_has_high_bound (node, index_type);
5298 int low_bound = node_has_low_bound (node, index_type);
5300 if (!high_bound && low_bound)
5302 emit_cmp_and_jump_insns (index,
5303 convert_modes
5304 (mode, imode,
5305 expand_expr (node->high, NULL_RTX,
5306 VOIDmode, 0),
5307 unsignedp),
5308 GT, NULL_RTX, mode, unsignedp,
5309 default_label);
5312 else if (!low_bound && high_bound)
5314 emit_cmp_and_jump_insns (index,
5315 convert_modes
5316 (mode, imode,
5317 expand_expr (node->low, NULL_RTX,
5318 VOIDmode, 0),
5319 unsignedp),
5320 LT, NULL_RTX, mode, unsignedp,
5321 default_label);
5323 else if (!low_bound && !high_bound)
5325 /* Widen LOW and HIGH to the same width as INDEX. */
5326 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
5327 tree low = build1 (CONVERT_EXPR, type, node->low);
5328 tree high = build1 (CONVERT_EXPR, type, node->high);
5329 rtx low_rtx, new_index, new_bound;
5331 /* Instead of doing two branches, emit one unsigned branch for
5332 (index-low) > (high-low). */
5333 low_rtx = expand_expr (low, NULL_RTX, mode, 0);
5334 new_index = expand_simple_binop (mode, MINUS, index, low_rtx,
5335 NULL_RTX, unsignedp,
5336 OPTAB_WIDEN);
5337 new_bound = expand_expr (fold (build (MINUS_EXPR, type,
5338 high, low)),
5339 NULL_RTX, mode, 0);
5341 emit_cmp_and_jump_insns (new_index, new_bound, GT, NULL_RTX,
5342 mode, 1, default_label);
5345 emit_jump (label_rtx (node->code_label));
5350 #include "gt-stmt.h"