* decl.c (gfc_match_implicit_range): Don't use typespec.
[official-gcc.git] / gcc / stmt.c
blob543c92a09dc84adae0d8d7f8edc45f664c292bc9
1 /* Expands front end tree to back end RTL for GCC
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 /* This file handles the generation of rtl code from tree structure
23 above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
24 It also creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
27 The functions whose names start with `expand_' are called by the
28 parser to generate RTL instructions for various kinds of constructs.
30 Some control and binding constructs require calling several such
31 functions at different times. For example, a simple if-then
32 is expanded by calling `expand_start_cond' (with the condition-expression
33 as argument) before parsing the then-clause and calling `expand_end_cond'
34 after parsing the then-clause. */
36 #include "config.h"
37 #include "system.h"
38 #include "coretypes.h"
39 #include "tm.h"
41 #include "rtl.h"
42 #include "tree.h"
43 #include "tm_p.h"
44 #include "flags.h"
45 #include "except.h"
46 #include "function.h"
47 #include "insn-config.h"
48 #include "expr.h"
49 #include "libfuncs.h"
50 #include "hard-reg-set.h"
51 #include "loop.h"
52 #include "recog.h"
53 #include "machmode.h"
54 #include "toplev.h"
55 #include "output.h"
56 #include "ggc.h"
57 #include "langhooks.h"
58 #include "predict.h"
59 #include "optabs.h"
60 #include "target.h"
61 #include "regs.h"
63 /* Functions and data structures for expanding case statements. */
65 /* Case label structure, used to hold info on labels within case
66 statements. We handle "range" labels; for a single-value label
67 as in C, the high and low limits are the same.
69 An AVL tree of case nodes is initially created, and later transformed
70 to a list linked via the RIGHT fields in the nodes. Nodes with
71 higher case values are later in the list.
73 Switch statements can be output in one of two forms. A branch table
74 is used if there are more than a few labels and the labels are dense
75 within the range between the smallest and largest case value. If a
76 branch table is used, no further manipulations are done with the case
77 node chain.
79 The alternative to the use of a branch table is to generate a series
80 of compare and jump insns. When that is done, we use the LEFT, RIGHT,
81 and PARENT fields to hold a binary tree. Initially the tree is
82 totally unbalanced, with everything on the right. We balance the tree
83 with nodes on the left having lower case values than the parent
84 and nodes on the right having higher values. We then output the tree
85 in order. */
87 struct case_node GTY(())
89 struct case_node *left; /* Left son in binary tree */
90 struct case_node *right; /* Right son in binary tree; also node chain */
91 struct case_node *parent; /* Parent of node in binary tree */
92 tree low; /* Lowest index value for this label */
93 tree high; /* Highest index value for this label */
94 tree code_label; /* Label to jump to when node matches */
95 int balance;
98 typedef struct case_node case_node;
99 typedef struct case_node *case_node_ptr;
101 /* These are used by estimate_case_costs and balance_case_nodes. */
103 /* This must be a signed type, and non-ANSI compilers lack signed char. */
104 static short cost_table_[129];
105 static int use_cost_table;
106 static int cost_table_initialized;
108 /* Special care is needed because we allow -1, but TREE_INT_CST_LOW
109 is unsigned. */
110 #define COST_TABLE(I) cost_table_[(unsigned HOST_WIDE_INT) ((I) + 1)]
112 /* Stack of control and binding constructs we are currently inside.
114 These constructs begin when you call `expand_start_WHATEVER'
115 and end when you call `expand_end_WHATEVER'. This stack records
116 info about how the construct began that tells the end-function
117 what to do. It also may provide information about the construct
118 to alter the behavior of other constructs within the body.
119 For example, they may affect the behavior of C `break' and `continue'.
121 Each construct gets one `struct nesting' object.
122 All of these objects are chained through the `all' field.
123 `nesting_stack' points to the first object (innermost construct).
124 The position of an entry on `nesting_stack' is in its `depth' field.
126 Each type of construct has its own individual stack.
127 For example, loops have `cond_stack'. Each object points to the
128 next object of the same type through the `next' field.
130 Some constructs are visible to `break' exit-statements and others
131 are not. Which constructs are visible depends on the language.
132 Therefore, the data structure allows each construct to be visible
133 or not, according to the args given when the construct is started.
134 The construct is visible if the `exit_label' field is non-null.
135 In that case, the value should be a CODE_LABEL rtx. */
137 struct nesting GTY(())
139 struct nesting *all;
140 struct nesting *next;
141 int depth;
142 rtx exit_label;
143 enum nesting_desc {
144 COND_NESTING,
145 BLOCK_NESTING,
146 CASE_NESTING
147 } desc;
148 union nesting_u
150 /* For conds (if-then and if-then-else statements). */
151 struct nesting_cond
153 /* Label for the end of the if construct.
154 There is none if EXITFLAG was not set
155 and no `else' has been seen yet. */
156 rtx endif_label;
157 /* Label for the end of this alternative.
158 This may be the end of the if or the next else/elseif. */
159 rtx next_label;
160 } GTY ((tag ("COND_NESTING"))) cond;
161 /* For variable binding contours. */
162 struct nesting_block
164 /* Sequence number of this binding contour within the function,
165 in order of entry. */
166 int block_start_count;
167 /* Nonzero => value to restore stack to on exit. */
168 rtx stack_level;
169 /* The NOTE that starts this contour.
170 Used by expand_goto to check whether the destination
171 is within each contour or not. */
172 rtx first_insn;
173 /* Innermost containing binding contour that has a stack level. */
174 struct nesting *innermost_stack_block;
175 /* List of cleanups to be run on exit from this contour.
176 This is a list of expressions to be evaluated.
177 The TREE_PURPOSE of each link is the ..._DECL node
178 which the cleanup pertains to. */
179 tree cleanups;
180 /* List of cleanup-lists of blocks containing this block,
181 as they were at the locus where this block appears.
182 There is an element for each containing block,
183 ordered innermost containing block first.
184 The tail of this list can be 0,
185 if all remaining elements would be empty lists.
186 The element's TREE_VALUE is the cleanup-list of that block,
187 which may be null. */
188 tree outer_cleanups;
189 /* Chain of labels defined inside this binding contour.
190 For contours that have stack levels or cleanups. */
191 struct label_chain *label_chain;
192 /* Nonzero if this is associated with an EH region. */
193 int exception_region;
194 /* The saved target_temp_slot_level from our outer block.
195 We may reset target_temp_slot_level to be the level of
196 this block, if that is done, target_temp_slot_level
197 reverts to the saved target_temp_slot_level at the very
198 end of the block. */
199 int block_target_temp_slot_level;
200 /* True if we are currently emitting insns in an area of
201 output code that is controlled by a conditional
202 expression. This is used by the cleanup handling code to
203 generate conditional cleanup actions. */
204 int conditional_code;
205 /* A place to move the start of the exception region for any
206 of the conditional cleanups, must be at the end or after
207 the start of the last unconditional cleanup, and before any
208 conditional branch points. */
209 rtx last_unconditional_cleanup;
210 } GTY ((tag ("BLOCK_NESTING"))) block;
211 /* For switch (C) or case (Pascal) statements. */
212 struct nesting_case
214 /* The insn after which the case dispatch should finally
215 be emitted. Zero for a dummy. */
216 rtx start;
217 /* A list of case labels; it is first built as an AVL tree.
218 During expand_end_case, this is converted to a list, and may be
219 rearranged into a nearly balanced binary tree. */
220 struct case_node *case_list;
221 /* Label to jump to if no case matches. */
222 tree default_label;
223 /* The expression to be dispatched on. */
224 tree index_expr;
225 /* Type that INDEX_EXPR should be converted to. */
226 tree nominal_type;
227 /* Name of this kind of statement, for warnings. */
228 const char *printname;
229 /* Used to save no_line_numbers till we see the first case label.
230 We set this to -1 when we see the first case label in this
231 case statement. */
232 int line_number_status;
233 } GTY ((tag ("CASE_NESTING"))) case_stmt;
234 } GTY ((desc ("%1.desc"))) data;
237 /* Allocate and return a new `struct nesting'. */
239 #define ALLOC_NESTING() ggc_alloc (sizeof (struct nesting))
241 /* Pop the nesting stack element by element until we pop off
242 the element which is at the top of STACK.
243 Update all the other stacks, popping off elements from them
244 as we pop them from nesting_stack. */
246 #define POPSTACK(STACK) \
247 do { struct nesting *target = STACK; \
248 struct nesting *this; \
249 do { this = nesting_stack; \
250 if (cond_stack == this) \
251 cond_stack = cond_stack->next; \
252 if (block_stack == this) \
253 block_stack = block_stack->next; \
254 if (stack_block_stack == this) \
255 stack_block_stack = stack_block_stack->next; \
256 if (case_stack == this) \
257 case_stack = case_stack->next; \
258 nesting_depth = nesting_stack->depth - 1; \
259 nesting_stack = this->all; } \
260 while (this != target); } while (0)
262 /* In some cases it is impossible to generate code for a forward goto
263 until the label definition is seen. This happens when it may be necessary
264 for the goto to reset the stack pointer: we don't yet know how to do that.
265 So expand_goto puts an entry on this fixup list.
266 Each time a binding contour that resets the stack is exited,
267 we check each fixup.
268 If the target label has now been defined, we can insert the proper code. */
270 struct goto_fixup GTY(())
272 /* Points to following fixup. */
273 struct goto_fixup *next;
274 /* Points to the insn before the jump insn.
275 If more code must be inserted, it goes after this insn. */
276 rtx before_jump;
277 /* The LABEL_DECL that this jump is jumping to, or 0
278 for break, continue or return. */
279 tree target;
280 /* The BLOCK for the place where this goto was found. */
281 tree context;
282 /* The CODE_LABEL rtx that this is jumping to. */
283 rtx target_rtl;
284 /* Number of binding contours started in current function
285 before the label reference. */
286 int block_start_count;
287 /* The outermost stack level that should be restored for this jump.
288 Each time a binding contour that resets the stack is exited,
289 if the target label is *not* yet defined, this slot is updated. */
290 rtx stack_level;
291 /* List of lists of cleanup expressions to be run by this goto.
292 There is one element for each block that this goto is within.
293 The tail of this list can be 0,
294 if all remaining elements would be empty.
295 The TREE_VALUE contains the cleanup list of that block as of the
296 time this goto was seen.
297 The TREE_ADDRESSABLE flag is 1 for a block that has been exited. */
298 tree cleanup_list_list;
301 /* Within any binding contour that must restore a stack level,
302 all labels are recorded with a chain of these structures. */
304 struct label_chain GTY(())
306 /* Points to following fixup. */
307 struct label_chain *next;
308 tree label;
311 struct stmt_status GTY(())
313 /* Chain of all pending binding contours. */
314 struct nesting * x_block_stack;
316 /* If any new stacks are added here, add them to POPSTACKS too. */
318 /* Chain of all pending binding contours that restore stack levels
319 or have cleanups. */
320 struct nesting * x_stack_block_stack;
322 /* Chain of all pending conditional statements. */
323 struct nesting * x_cond_stack;
325 /* Chain of all pending case or switch statements. */
326 struct nesting * x_case_stack;
328 /* Separate chain including all of the above,
329 chained through the `all' field. */
330 struct nesting * x_nesting_stack;
332 /* Number of entries on nesting_stack now. */
333 int x_nesting_depth;
335 /* Number of binding contours started so far in this function. */
336 int x_block_start_count;
338 /* Location of last line-number note, whether we actually
339 emitted it or not. */
340 location_t x_emit_locus;
342 struct goto_fixup *x_goto_fixup_chain;
345 #define block_stack (cfun->stmt->x_block_stack)
346 #define stack_block_stack (cfun->stmt->x_stack_block_stack)
347 #define cond_stack (cfun->stmt->x_cond_stack)
348 #define case_stack (cfun->stmt->x_case_stack)
349 #define nesting_stack (cfun->stmt->x_nesting_stack)
350 #define nesting_depth (cfun->stmt->x_nesting_depth)
351 #define current_block_start_count (cfun->stmt->x_block_start_count)
352 #define emit_locus (cfun->stmt->x_emit_locus)
353 #define goto_fixup_chain (cfun->stmt->x_goto_fixup_chain)
355 /* Nonzero if we are using EH to handle cleanups. */
356 int using_eh_for_cleanups_p = 0;
358 static int n_occurrences (int, const char *);
359 static bool decl_conflicts_with_clobbers_p (tree, const HARD_REG_SET);
360 static void expand_goto_internal (tree, rtx, rtx);
361 static int expand_fixup (tree, rtx, rtx);
362 static void expand_nl_goto_receiver (void);
363 static void fixup_gotos (struct nesting *, rtx, tree, rtx, int);
364 static bool check_operand_nalternatives (tree, tree);
365 static bool check_unique_operand_names (tree, tree);
366 static char *resolve_operand_name_1 (char *, tree, tree);
367 static void expand_null_return_1 (rtx);
368 static enum br_predictor return_prediction (rtx);
369 static rtx shift_return_value (rtx);
370 static void expand_value_return (rtx);
371 static void expand_cleanups (tree, int, int);
372 static void do_jump_if_equal (rtx, rtx, rtx, int);
373 static int estimate_case_costs (case_node_ptr);
374 static bool same_case_target_p (rtx, rtx);
375 static void strip_default_case_nodes (case_node_ptr *, rtx);
376 static bool lshift_cheap_p (void);
377 static int case_bit_test_cmp (const void *, const void *);
378 static void emit_case_bit_tests (tree, tree, tree, tree, case_node_ptr, rtx);
379 static void group_case_nodes (case_node_ptr);
380 static void balance_case_nodes (case_node_ptr *, case_node_ptr);
381 static int node_has_low_bound (case_node_ptr, tree);
382 static int node_has_high_bound (case_node_ptr, tree);
383 static int node_is_bounded (case_node_ptr, tree);
384 static void emit_jump_if_reachable (rtx);
385 static void emit_case_nodes (rtx, case_node_ptr, rtx, tree);
386 static struct case_node *case_tree2list (case_node *, case_node *);
388 void
389 using_eh_for_cleanups (void)
391 using_eh_for_cleanups_p = 1;
394 void
395 init_stmt_for_function (void)
397 cfun->stmt = ggc_alloc_cleared (sizeof (struct stmt_status));
400 /* Record the current file and line. Called from emit_line_note. */
402 void
403 set_file_and_line_for_stmt (location_t location)
405 /* If we're outputting an inline function, and we add a line note,
406 there may be no CFUN->STMT information. So, there's no need to
407 update it. */
408 if (cfun->stmt)
409 emit_locus = location;
412 /* Emit a no-op instruction. */
414 void
415 emit_nop (void)
417 rtx last_insn;
419 last_insn = get_last_insn ();
420 if (!optimize
421 && (GET_CODE (last_insn) == CODE_LABEL
422 || (GET_CODE (last_insn) == NOTE
423 && prev_real_insn (last_insn) == 0)))
424 emit_insn (gen_nop ());
427 /* Return the rtx-label that corresponds to a LABEL_DECL,
428 creating it if necessary. */
431 label_rtx (tree label)
433 if (TREE_CODE (label) != LABEL_DECL)
434 abort ();
436 if (!DECL_RTL_SET_P (label))
438 rtx r = gen_label_rtx ();
439 SET_DECL_RTL (label, r);
440 if (FORCED_LABEL (label) || DECL_NONLOCAL (label))
441 LABEL_PRESERVE_P (r) = 1;
444 return DECL_RTL (label);
447 /* As above, but also put it on the forced-reference list of the
448 function that contains it. */
450 force_label_rtx (tree label)
452 rtx ref = label_rtx (label);
453 tree function = decl_function_context (label);
454 struct function *p;
456 if (!function)
457 abort ();
459 if (function != current_function_decl)
460 p = find_function_data (function);
461 else
462 p = cfun;
464 p->expr->x_forced_labels = gen_rtx_EXPR_LIST (VOIDmode, ref,
465 p->expr->x_forced_labels);
466 return ref;
469 /* Add an unconditional jump to LABEL as the next sequential instruction. */
471 void
472 emit_jump (rtx label)
474 do_pending_stack_adjust ();
475 emit_jump_insn (gen_jump (label));
476 emit_barrier ();
479 /* Emit code to jump to the address
480 specified by the pointer expression EXP. */
482 void
483 expand_computed_goto (tree exp)
485 rtx x = expand_expr (exp, NULL_RTX, VOIDmode, 0);
487 x = convert_memory_address (Pmode, x);
489 emit_queue ();
490 do_pending_stack_adjust ();
491 emit_indirect_jump (x);
494 /* Handle goto statements and the labels that they can go to. */
496 /* Specify the location in the RTL code of a label LABEL,
497 which is a LABEL_DECL tree node.
499 This is used for the kind of label that the user can jump to with a
500 goto statement, and for alternatives of a switch or case statement.
501 RTL labels generated for loops and conditionals don't go through here;
502 they are generated directly at the RTL level, by other functions below.
504 Note that this has nothing to do with defining label *names*.
505 Languages vary in how they do that and what that even means. */
507 void
508 expand_label (tree label)
510 struct label_chain *p;
511 rtx label_r = label_rtx (label);
513 do_pending_stack_adjust ();
514 emit_label (label_r);
515 if (DECL_NAME (label))
516 LABEL_NAME (DECL_RTL (label)) = IDENTIFIER_POINTER (DECL_NAME (label));
518 if (DECL_NONLOCAL (label))
520 expand_nl_goto_receiver ();
521 nonlocal_goto_handler_labels
522 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
523 nonlocal_goto_handler_labels);
526 if (FORCED_LABEL (label))
527 forced_labels = gen_rtx_EXPR_LIST (VOIDmode, label_r, forced_labels);
529 if (DECL_NONLOCAL (label) || FORCED_LABEL (label))
530 maybe_set_first_label_num (label_r);
532 if (stack_block_stack != 0)
534 p = ggc_alloc (sizeof (struct label_chain));
535 p->next = stack_block_stack->data.block.label_chain;
536 stack_block_stack->data.block.label_chain = p;
537 p->label = label;
541 /* Generate RTL code for a `goto' statement with target label LABEL.
542 LABEL should be a LABEL_DECL tree node that was or will later be
543 defined with `expand_label'. */
545 void
546 expand_goto (tree label)
548 #ifdef ENABLE_CHECKING
549 /* Check for a nonlocal goto to a containing function. Should have
550 gotten translated to __builtin_nonlocal_goto. */
551 tree context = decl_function_context (label);
552 if (context != 0 && context != current_function_decl)
553 abort ();
554 #endif
556 expand_goto_internal (label, label_rtx (label), NULL_RTX);
559 /* Generate RTL code for a `goto' statement with target label BODY.
560 LABEL should be a LABEL_REF.
561 LAST_INSN, if non-0, is the rtx we should consider as the last
562 insn emitted (for the purposes of cleaning up a return). */
564 static void
565 expand_goto_internal (tree body, rtx label, rtx last_insn)
567 struct nesting *block;
568 rtx stack_level = 0;
570 if (GET_CODE (label) != CODE_LABEL)
571 abort ();
573 /* If label has already been defined, we can tell now
574 whether and how we must alter the stack level. */
576 if (PREV_INSN (label) != 0)
578 /* Find the innermost pending block that contains the label.
579 (Check containment by comparing insn-uids.)
580 Then restore the outermost stack level within that block,
581 and do cleanups of all blocks contained in it. */
582 for (block = block_stack; block; block = block->next)
584 if (INSN_UID (block->data.block.first_insn) < INSN_UID (label))
585 break;
586 if (block->data.block.stack_level != 0)
587 stack_level = block->data.block.stack_level;
588 /* Execute the cleanups for blocks we are exiting. */
589 if (block->data.block.cleanups != 0)
591 expand_cleanups (block->data.block.cleanups, 1, 1);
592 do_pending_stack_adjust ();
596 if (stack_level)
598 /* Ensure stack adjust isn't done by emit_jump, as this
599 would clobber the stack pointer. This one should be
600 deleted as dead by flow. */
601 clear_pending_stack_adjust ();
602 do_pending_stack_adjust ();
604 /* Don't do this adjust if it's to the end label and this function
605 is to return with a depressed stack pointer. */
606 if (label == return_label
607 && (((TREE_CODE (TREE_TYPE (current_function_decl))
608 == FUNCTION_TYPE)
609 && (TYPE_RETURNS_STACK_DEPRESSED
610 (TREE_TYPE (current_function_decl))))))
612 else
613 emit_stack_restore (SAVE_BLOCK, stack_level, NULL_RTX);
616 if (body != 0 && DECL_TOO_LATE (body))
617 error ("jump to `%s' invalidly jumps into binding contour",
618 IDENTIFIER_POINTER (DECL_NAME (body)));
620 /* Label not yet defined: may need to put this goto
621 on the fixup list. */
622 else if (! expand_fixup (body, label, last_insn))
624 /* No fixup needed. Record that the label is the target
625 of at least one goto that has no fixup. */
626 if (body != 0)
627 TREE_ADDRESSABLE (body) = 1;
630 emit_jump (label);
633 /* Generate if necessary a fixup for a goto
634 whose target label in tree structure (if any) is TREE_LABEL
635 and whose target in rtl is RTL_LABEL.
637 If LAST_INSN is nonzero, we pretend that the jump appears
638 after insn LAST_INSN instead of at the current point in the insn stream.
640 The fixup will be used later to insert insns just before the goto.
641 Those insns will restore the stack level as appropriate for the
642 target label, and will (in the case of C++) also invoke any object
643 destructors which have to be invoked when we exit the scopes which
644 are exited by the goto.
646 Value is nonzero if a fixup is made. */
648 static int
649 expand_fixup (tree tree_label, rtx rtl_label, rtx last_insn)
651 struct nesting *block, *end_block;
653 /* See if we can recognize which block the label will be output in.
654 This is possible in some very common cases.
655 If we succeed, set END_BLOCK to that block.
656 Otherwise, set it to 0. */
658 if (cond_stack
659 && (rtl_label == cond_stack->data.cond.endif_label
660 || rtl_label == cond_stack->data.cond.next_label))
661 end_block = cond_stack;
662 else
663 end_block = 0;
665 /* Now set END_BLOCK to the binding level to which we will return. */
667 if (end_block)
669 struct nesting *next_block = end_block->all;
670 block = block_stack;
672 /* First see if the END_BLOCK is inside the innermost binding level.
673 If so, then no cleanups or stack levels are relevant. */
674 while (next_block && next_block != block)
675 next_block = next_block->all;
677 if (next_block)
678 return 0;
680 /* Otherwise, set END_BLOCK to the innermost binding level
681 which is outside the relevant control-structure nesting. */
682 next_block = block_stack->next;
683 for (block = block_stack; block != end_block; block = block->all)
684 if (block == next_block)
685 next_block = next_block->next;
686 end_block = next_block;
689 /* Does any containing block have a stack level or cleanups?
690 If not, no fixup is needed, and that is the normal case
691 (the only case, for standard C). */
692 for (block = block_stack; block != end_block; block = block->next)
693 if (block->data.block.stack_level != 0
694 || block->data.block.cleanups != 0)
695 break;
697 if (block != end_block)
699 /* Ok, a fixup is needed. Add a fixup to the list of such. */
700 struct goto_fixup *fixup = ggc_alloc (sizeof (struct goto_fixup));
701 /* In case an old stack level is restored, make sure that comes
702 after any pending stack adjust. */
703 /* ?? If the fixup isn't to come at the present position,
704 doing the stack adjust here isn't useful. Doing it with our
705 settings at that location isn't useful either. Let's hope
706 someone does it! */
707 if (last_insn == 0)
708 do_pending_stack_adjust ();
709 fixup->target = tree_label;
710 fixup->target_rtl = rtl_label;
712 /* Create a BLOCK node and a corresponding matched set of
713 NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes at
714 this point. The notes will encapsulate any and all fixup
715 code which we might later insert at this point in the insn
716 stream. Also, the BLOCK node will be the parent (i.e. the
717 `SUPERBLOCK') of any other BLOCK nodes which we might create
718 later on when we are expanding the fixup code.
720 Note that optimization passes might move the *_BLOCK notes away,
721 so we use a NOTE_INSN_DELETED as a placeholder. */
724 rtx original_before_jump
725 = last_insn ? last_insn : get_last_insn ();
726 rtx start;
727 rtx end;
728 tree block;
730 block = make_node (BLOCK);
731 TREE_USED (block) = 1;
733 BLOCK_CHAIN (block)
734 = BLOCK_CHAIN (DECL_INITIAL (current_function_decl));
735 BLOCK_CHAIN (DECL_INITIAL (current_function_decl))
736 = block;
738 start_sequence ();
739 start = emit_note (NOTE_INSN_BLOCK_BEG);
740 NOTE_BLOCK (start) = block;
741 fixup->before_jump = emit_note (NOTE_INSN_DELETED);
742 end = emit_note (NOTE_INSN_BLOCK_END);
743 NOTE_BLOCK (end) = block;
744 fixup->context = block;
745 end_sequence ();
746 emit_insn_after (start, original_before_jump);
749 fixup->block_start_count = current_block_start_count;
750 fixup->stack_level = 0;
751 fixup->cleanup_list_list
752 = ((block->data.block.outer_cleanups
753 || block->data.block.cleanups)
754 ? tree_cons (NULL_TREE, block->data.block.cleanups,
755 block->data.block.outer_cleanups)
756 : 0);
757 fixup->next = goto_fixup_chain;
758 goto_fixup_chain = fixup;
761 return block != 0;
764 /* Expand any needed fixups in the outputmost binding level of the
765 function. FIRST_INSN is the first insn in the function. */
767 void
768 expand_fixups (rtx first_insn)
770 fixup_gotos (NULL, NULL_RTX, NULL_TREE, first_insn, 0);
773 /* When exiting a binding contour, process all pending gotos requiring fixups.
774 THISBLOCK is the structure that describes the block being exited.
775 STACK_LEVEL is the rtx for the stack level to restore exiting this contour.
776 CLEANUP_LIST is a list of expressions to evaluate on exiting this contour.
777 FIRST_INSN is the insn that began this contour.
779 Gotos that jump out of this contour must restore the
780 stack level and do the cleanups before actually jumping.
782 DONT_JUMP_IN positive means report error if there is a jump into this
783 contour from before the beginning of the contour. This is also done if
784 STACK_LEVEL is nonzero unless DONT_JUMP_IN is negative. */
786 static void
787 fixup_gotos (struct nesting *thisblock, rtx stack_level,
788 tree cleanup_list, rtx first_insn, int dont_jump_in)
790 struct goto_fixup *f, *prev;
792 /* F is the fixup we are considering; PREV is the previous one. */
793 /* We run this loop in two passes so that cleanups of exited blocks
794 are run first, and blocks that are exited are marked so
795 afterwards. */
797 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
799 /* Test for a fixup that is inactive because it is already handled. */
800 if (f->before_jump == 0)
802 /* Delete inactive fixup from the chain, if that is easy to do. */
803 if (prev != 0)
804 prev->next = f->next;
806 /* Has this fixup's target label been defined?
807 If so, we can finalize it. */
808 else if (PREV_INSN (f->target_rtl) != 0)
810 rtx cleanup_insns;
812 /* If this fixup jumped into this contour from before the beginning
813 of this contour, report an error. This code used to use
814 the first non-label insn after f->target_rtl, but that's
815 wrong since such can be added, by things like put_var_into_stack
816 and have INSN_UIDs that are out of the range of the block. */
817 /* ??? Bug: this does not detect jumping in through intermediate
818 blocks that have stack levels or cleanups.
819 It detects only a problem with the innermost block
820 around the label. */
821 if (f->target != 0
822 && (dont_jump_in > 0 || (dont_jump_in == 0 && stack_level)
823 || cleanup_list)
824 && INSN_UID (first_insn) < INSN_UID (f->target_rtl)
825 && INSN_UID (first_insn) > INSN_UID (f->before_jump)
826 && ! DECL_ERROR_ISSUED (f->target))
828 error ("%Jlabel '%D' used before containing binding contour",
829 f->target, f->target);
830 /* Prevent multiple errors for one label. */
831 DECL_ERROR_ISSUED (f->target) = 1;
834 /* We will expand the cleanups into a sequence of their own and
835 then later on we will attach this new sequence to the insn
836 stream just ahead of the actual jump insn. */
838 start_sequence ();
840 /* Temporarily restore the lexical context where we will
841 logically be inserting the fixup code. We do this for the
842 sake of getting the debugging information right. */
844 lang_hooks.decls.pushlevel (0);
845 lang_hooks.decls.set_block (f->context);
847 /* Expand the cleanups for blocks this jump exits. */
848 if (f->cleanup_list_list)
850 tree lists;
851 for (lists = f->cleanup_list_list; lists; lists = TREE_CHAIN (lists))
852 /* Marked elements correspond to blocks that have been closed.
853 Do their cleanups. */
854 if (TREE_ADDRESSABLE (lists)
855 && TREE_VALUE (lists) != 0)
857 expand_cleanups (TREE_VALUE (lists), 1, 1);
858 /* Pop any pushes done in the cleanups,
859 in case function is about to return. */
860 do_pending_stack_adjust ();
864 /* Restore stack level for the biggest contour that this
865 jump jumps out of. */
866 if (f->stack_level
867 && ! (f->target_rtl == return_label
868 && ((TREE_CODE (TREE_TYPE (current_function_decl))
869 == FUNCTION_TYPE)
870 && (TYPE_RETURNS_STACK_DEPRESSED
871 (TREE_TYPE (current_function_decl))))))
872 emit_stack_restore (SAVE_BLOCK, f->stack_level, f->before_jump);
874 /* Finish up the sequence containing the insns which implement the
875 necessary cleanups, and then attach that whole sequence to the
876 insn stream just ahead of the actual jump insn. Attaching it
877 at that point insures that any cleanups which are in fact
878 implicit C++ object destructions (which must be executed upon
879 leaving the block) appear (to the debugger) to be taking place
880 in an area of the generated code where the object(s) being
881 destructed are still "in scope". */
883 cleanup_insns = get_insns ();
884 lang_hooks.decls.poplevel (1, 0, 0);
886 end_sequence ();
887 emit_insn_after (cleanup_insns, f->before_jump);
889 f->before_jump = 0;
893 /* For any still-undefined labels, do the cleanups for this block now.
894 We must do this now since items in the cleanup list may go out
895 of scope when the block ends. */
896 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
897 if (f->before_jump != 0
898 && PREV_INSN (f->target_rtl) == 0
899 /* Label has still not appeared. If we are exiting a block with
900 a stack level to restore, that started before the fixup,
901 mark this stack level as needing restoration
902 when the fixup is later finalized. */
903 && thisblock != 0
904 /* Note: if THISBLOCK == 0 and we have a label that hasn't appeared, it
905 means the label is undefined. That's erroneous, but possible. */
906 && (thisblock->data.block.block_start_count
907 <= f->block_start_count))
909 tree lists = f->cleanup_list_list;
910 rtx cleanup_insns;
912 for (; lists; lists = TREE_CHAIN (lists))
913 /* If the following elt. corresponds to our containing block
914 then the elt. must be for this block. */
915 if (TREE_CHAIN (lists) == thisblock->data.block.outer_cleanups)
917 start_sequence ();
918 lang_hooks.decls.pushlevel (0);
919 lang_hooks.decls.set_block (f->context);
920 expand_cleanups (TREE_VALUE (lists), 1, 1);
921 do_pending_stack_adjust ();
922 cleanup_insns = get_insns ();
923 lang_hooks.decls.poplevel (1, 0, 0);
924 end_sequence ();
925 if (cleanup_insns != 0)
926 f->before_jump
927 = emit_insn_after (cleanup_insns, f->before_jump);
929 f->cleanup_list_list = TREE_CHAIN (lists);
932 if (stack_level)
933 f->stack_level = stack_level;
937 /* Return the number of times character C occurs in string S. */
938 static int
939 n_occurrences (int c, const char *s)
941 int n = 0;
942 while (*s)
943 n += (*s++ == c);
944 return n;
947 /* Generate RTL for an asm statement (explicit assembler code).
948 STRING is a STRING_CST node containing the assembler code text,
949 or an ADDR_EXPR containing a STRING_CST. VOL nonzero means the
950 insn is volatile; don't optimize it. */
952 void
953 expand_asm (tree string, int vol)
955 rtx body;
957 if (TREE_CODE (string) == ADDR_EXPR)
958 string = TREE_OPERAND (string, 0);
960 body = gen_rtx_ASM_INPUT (VOIDmode, TREE_STRING_POINTER (string));
962 MEM_VOLATILE_P (body) = vol;
964 emit_insn (body);
967 /* Parse the output constraint pointed to by *CONSTRAINT_P. It is the
968 OPERAND_NUMth output operand, indexed from zero. There are NINPUTS
969 inputs and NOUTPUTS outputs to this extended-asm. Upon return,
970 *ALLOWS_MEM will be TRUE iff the constraint allows the use of a
971 memory operand. Similarly, *ALLOWS_REG will be TRUE iff the
972 constraint allows the use of a register operand. And, *IS_INOUT
973 will be true if the operand is read-write, i.e., if it is used as
974 an input as well as an output. If *CONSTRAINT_P is not in
975 canonical form, it will be made canonical. (Note that `+' will be
976 replaced with `=' as part of this process.)
978 Returns TRUE if all went well; FALSE if an error occurred. */
980 bool
981 parse_output_constraint (const char **constraint_p, int operand_num,
982 int ninputs, int noutputs, bool *allows_mem,
983 bool *allows_reg, bool *is_inout)
985 const char *constraint = *constraint_p;
986 const char *p;
988 /* Assume the constraint doesn't allow the use of either a register
989 or memory. */
990 *allows_mem = false;
991 *allows_reg = false;
993 /* Allow the `=' or `+' to not be at the beginning of the string,
994 since it wasn't explicitly documented that way, and there is a
995 large body of code that puts it last. Swap the character to
996 the front, so as not to uglify any place else. */
997 p = strchr (constraint, '=');
998 if (!p)
999 p = strchr (constraint, '+');
1001 /* If the string doesn't contain an `=', issue an error
1002 message. */
1003 if (!p)
1005 error ("output operand constraint lacks `='");
1006 return false;
1009 /* If the constraint begins with `+', then the operand is both read
1010 from and written to. */
1011 *is_inout = (*p == '+');
1013 /* Canonicalize the output constraint so that it begins with `='. */
1014 if (p != constraint || is_inout)
1016 char *buf;
1017 size_t c_len = strlen (constraint);
1019 if (p != constraint)
1020 warning ("output constraint `%c' for operand %d is not at the beginning",
1021 *p, operand_num);
1023 /* Make a copy of the constraint. */
1024 buf = alloca (c_len + 1);
1025 strcpy (buf, constraint);
1026 /* Swap the first character and the `=' or `+'. */
1027 buf[p - constraint] = buf[0];
1028 /* Make sure the first character is an `='. (Until we do this,
1029 it might be a `+'.) */
1030 buf[0] = '=';
1031 /* Replace the constraint with the canonicalized string. */
1032 *constraint_p = ggc_alloc_string (buf, c_len);
1033 constraint = *constraint_p;
1036 /* Loop through the constraint string. */
1037 for (p = constraint + 1; *p; p += CONSTRAINT_LEN (*p, p))
1038 switch (*p)
1040 case '+':
1041 case '=':
1042 error ("operand constraint contains incorrectly positioned '+' or '='");
1043 return false;
1045 case '%':
1046 if (operand_num + 1 == ninputs + noutputs)
1048 error ("`%%' constraint used with last operand");
1049 return false;
1051 break;
1053 case 'V': case 'm': case 'o':
1054 *allows_mem = true;
1055 break;
1057 case '?': case '!': case '*': case '&': case '#':
1058 case 'E': case 'F': case 'G': case 'H':
1059 case 's': case 'i': case 'n':
1060 case 'I': case 'J': case 'K': case 'L': case 'M':
1061 case 'N': case 'O': case 'P': case ',':
1062 break;
1064 case '0': case '1': case '2': case '3': case '4':
1065 case '5': case '6': case '7': case '8': case '9':
1066 case '[':
1067 error ("matching constraint not valid in output operand");
1068 return false;
1070 case '<': case '>':
1071 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
1072 excepting those that expand_call created. So match memory
1073 and hope. */
1074 *allows_mem = true;
1075 break;
1077 case 'g': case 'X':
1078 *allows_reg = true;
1079 *allows_mem = true;
1080 break;
1082 case 'p': case 'r':
1083 *allows_reg = true;
1084 break;
1086 default:
1087 if (!ISALPHA (*p))
1088 break;
1089 if (REG_CLASS_FROM_CONSTRAINT (*p, p) != NO_REGS)
1090 *allows_reg = true;
1091 #ifdef EXTRA_CONSTRAINT_STR
1092 else if (EXTRA_ADDRESS_CONSTRAINT (*p, p))
1093 *allows_reg = true;
1094 else if (EXTRA_MEMORY_CONSTRAINT (*p, p))
1095 *allows_mem = true;
1096 else
1098 /* Otherwise we can't assume anything about the nature of
1099 the constraint except that it isn't purely registers.
1100 Treat it like "g" and hope for the best. */
1101 *allows_reg = true;
1102 *allows_mem = true;
1104 #endif
1105 break;
1108 return true;
1111 /* Similar, but for input constraints. */
1113 bool
1114 parse_input_constraint (const char **constraint_p, int input_num,
1115 int ninputs, int noutputs, int ninout,
1116 const char * const * constraints,
1117 bool *allows_mem, bool *allows_reg)
1119 const char *constraint = *constraint_p;
1120 const char *orig_constraint = constraint;
1121 size_t c_len = strlen (constraint);
1122 size_t j;
1123 bool saw_match = false;
1125 /* Assume the constraint doesn't allow the use of either
1126 a register or memory. */
1127 *allows_mem = false;
1128 *allows_reg = false;
1130 /* Make sure constraint has neither `=', `+', nor '&'. */
1132 for (j = 0; j < c_len; j += CONSTRAINT_LEN (constraint[j], constraint+j))
1133 switch (constraint[j])
1135 case '+': case '=': case '&':
1136 if (constraint == orig_constraint)
1138 error ("input operand constraint contains `%c'", constraint[j]);
1139 return false;
1141 break;
1143 case '%':
1144 if (constraint == orig_constraint
1145 && input_num + 1 == ninputs - ninout)
1147 error ("`%%' constraint used with last operand");
1148 return false;
1150 break;
1152 case 'V': case 'm': case 'o':
1153 *allows_mem = true;
1154 break;
1156 case '<': case '>':
1157 case '?': case '!': case '*': case '#':
1158 case 'E': case 'F': case 'G': case 'H':
1159 case 's': case 'i': case 'n':
1160 case 'I': case 'J': case 'K': case 'L': case 'M':
1161 case 'N': case 'O': case 'P': case ',':
1162 break;
1164 /* Whether or not a numeric constraint allows a register is
1165 decided by the matching constraint, and so there is no need
1166 to do anything special with them. We must handle them in
1167 the default case, so that we don't unnecessarily force
1168 operands to memory. */
1169 case '0': case '1': case '2': case '3': case '4':
1170 case '5': case '6': case '7': case '8': case '9':
1172 char *end;
1173 unsigned long match;
1175 saw_match = true;
1177 match = strtoul (constraint + j, &end, 10);
1178 if (match >= (unsigned long) noutputs)
1180 error ("matching constraint references invalid operand number");
1181 return false;
1184 /* Try and find the real constraint for this dup. Only do this
1185 if the matching constraint is the only alternative. */
1186 if (*end == '\0'
1187 && (j == 0 || (j == 1 && constraint[0] == '%')))
1189 constraint = constraints[match];
1190 *constraint_p = constraint;
1191 c_len = strlen (constraint);
1192 j = 0;
1193 /* ??? At the end of the loop, we will skip the first part of
1194 the matched constraint. This assumes not only that the
1195 other constraint is an output constraint, but also that
1196 the '=' or '+' come first. */
1197 break;
1199 else
1200 j = end - constraint;
1201 /* Anticipate increment at end of loop. */
1202 j--;
1204 /* Fall through. */
1206 case 'p': case 'r':
1207 *allows_reg = true;
1208 break;
1210 case 'g': case 'X':
1211 *allows_reg = true;
1212 *allows_mem = true;
1213 break;
1215 default:
1216 if (! ISALPHA (constraint[j]))
1218 error ("invalid punctuation `%c' in constraint", constraint[j]);
1219 return false;
1221 if (REG_CLASS_FROM_CONSTRAINT (constraint[j], constraint + j)
1222 != NO_REGS)
1223 *allows_reg = true;
1224 #ifdef EXTRA_CONSTRAINT_STR
1225 else if (EXTRA_ADDRESS_CONSTRAINT (constraint[j], constraint + j))
1226 *allows_reg = true;
1227 else if (EXTRA_MEMORY_CONSTRAINT (constraint[j], constraint + j))
1228 *allows_mem = true;
1229 else
1231 /* Otherwise we can't assume anything about the nature of
1232 the constraint except that it isn't purely registers.
1233 Treat it like "g" and hope for the best. */
1234 *allows_reg = true;
1235 *allows_mem = true;
1237 #endif
1238 break;
1241 if (saw_match && !*allows_reg)
1242 warning ("matching constraint does not allow a register");
1244 return true;
1247 /* INPUT is one of the input operands from EXPR, an ASM_EXPR. Returns true
1248 if it is an operand which must be passed in memory (i.e. an "m"
1249 constraint), false otherwise. */
1251 bool
1252 asm_op_is_mem_input (tree input, tree expr)
1254 const char *constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (input)));
1255 tree outputs = ASM_OUTPUTS (expr);
1256 int noutputs = list_length (outputs);
1257 const char **constraints
1258 = (const char **) alloca ((noutputs) * sizeof (const char *));
1259 int i = 0;
1260 bool allows_mem, allows_reg;
1261 tree t;
1263 /* Collect output constraints. */
1264 for (t = outputs; t ; t = TREE_CHAIN (t), i++)
1265 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
1267 /* We pass 0 for input_num, ninputs and ninout; they are only used for
1268 error checking which will be done at expand time. */
1269 parse_input_constraint (&constraint, 0, 0, noutputs, 0, constraints,
1270 &allows_mem, &allows_reg);
1271 return (!allows_reg && allows_mem);
1274 /* Check for overlap between registers marked in CLOBBERED_REGS and
1275 anything inappropriate in DECL. Emit error and return TRUE for error,
1276 FALSE for ok. */
1278 static bool
1279 decl_conflicts_with_clobbers_p (tree decl, const HARD_REG_SET clobbered_regs)
1281 /* Conflicts between asm-declared register variables and the clobber
1282 list are not allowed. */
1283 if ((TREE_CODE (decl) == VAR_DECL || TREE_CODE (decl) == PARM_DECL)
1284 && DECL_REGISTER (decl)
1285 && REG_P (DECL_RTL (decl))
1286 && REGNO (DECL_RTL (decl)) < FIRST_PSEUDO_REGISTER)
1288 rtx reg = DECL_RTL (decl);
1289 unsigned int regno;
1291 for (regno = REGNO (reg);
1292 regno < (REGNO (reg)
1293 + hard_regno_nregs[REGNO (reg)][GET_MODE (reg)]);
1294 regno++)
1295 if (TEST_HARD_REG_BIT (clobbered_regs, regno))
1297 error ("asm-specifier for variable `%s' conflicts with asm clobber list",
1298 IDENTIFIER_POINTER (DECL_NAME (decl)));
1300 /* Reset registerness to stop multiple errors emitted for a
1301 single variable. */
1302 DECL_REGISTER (decl) = 0;
1303 return true;
1306 return false;
1309 /* Generate RTL for an asm statement with arguments.
1310 STRING is the instruction template.
1311 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
1312 Each output or input has an expression in the TREE_VALUE and
1313 and a tree list in TREE_PURPOSE which in turn contains a constraint
1314 name in TREE_VALUE (or NULL_TREE) and a constraint string
1315 in TREE_PURPOSE.
1316 CLOBBERS is a list of STRING_CST nodes each naming a hard register
1317 that is clobbered by this insn.
1319 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
1320 Some elements of OUTPUTS may be replaced with trees representing temporary
1321 values. The caller should copy those temporary values to the originally
1322 specified lvalues.
1324 VOL nonzero means the insn is volatile; don't optimize it. */
1326 void
1327 expand_asm_operands (tree string, tree outputs, tree inputs,
1328 tree clobbers, int vol, location_t locus)
1330 rtvec argvec, constraintvec;
1331 rtx body;
1332 int ninputs = list_length (inputs);
1333 int noutputs = list_length (outputs);
1334 int ninout;
1335 int nclobbers;
1336 HARD_REG_SET clobbered_regs;
1337 int clobber_conflict_found = 0;
1338 tree tail;
1339 tree t;
1340 int i;
1341 /* Vector of RTX's of evaluated output operands. */
1342 rtx *output_rtx = alloca (noutputs * sizeof (rtx));
1343 int *inout_opnum = alloca (noutputs * sizeof (int));
1344 rtx *real_output_rtx = alloca (noutputs * sizeof (rtx));
1345 enum machine_mode *inout_mode
1346 = alloca (noutputs * sizeof (enum machine_mode));
1347 const char **constraints
1348 = alloca ((noutputs + ninputs) * sizeof (const char *));
1349 int old_generating_concat_p = generating_concat_p;
1351 /* An ASM with no outputs needs to be treated as volatile, for now. */
1352 if (noutputs == 0)
1353 vol = 1;
1355 if (! check_operand_nalternatives (outputs, inputs))
1356 return;
1358 string = resolve_asm_operand_names (string, outputs, inputs);
1360 /* Collect constraints. */
1361 i = 0;
1362 for (t = outputs; t ; t = TREE_CHAIN (t), i++)
1363 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
1364 for (t = inputs; t ; t = TREE_CHAIN (t), i++)
1365 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
1367 /* Sometimes we wish to automatically clobber registers across an asm.
1368 Case in point is when the i386 backend moved from cc0 to a hard reg --
1369 maintaining source-level compatibility means automatically clobbering
1370 the flags register. */
1371 clobbers = targetm.md_asm_clobbers (clobbers);
1373 /* Count the number of meaningful clobbered registers, ignoring what
1374 we would ignore later. */
1375 nclobbers = 0;
1376 CLEAR_HARD_REG_SET (clobbered_regs);
1377 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1379 const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1381 i = decode_reg_name (regname);
1382 if (i >= 0 || i == -4)
1383 ++nclobbers;
1384 else if (i == -2)
1385 error ("unknown register name `%s' in `asm'", regname);
1387 /* Mark clobbered registers. */
1388 if (i >= 0)
1390 /* Clobbering the PIC register is an error */
1391 if (i == (int) PIC_OFFSET_TABLE_REGNUM)
1393 error ("PIC register `%s' clobbered in `asm'", regname);
1394 return;
1397 SET_HARD_REG_BIT (clobbered_regs, i);
1401 /* First pass over inputs and outputs checks validity and sets
1402 mark_addressable if needed. */
1404 ninout = 0;
1405 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1407 tree val = TREE_VALUE (tail);
1408 tree type = TREE_TYPE (val);
1409 const char *constraint;
1410 bool is_inout;
1411 bool allows_reg;
1412 bool allows_mem;
1414 /* If there's an erroneous arg, emit no insn. */
1415 if (type == error_mark_node)
1416 return;
1418 /* Try to parse the output constraint. If that fails, there's
1419 no point in going further. */
1420 constraint = constraints[i];
1421 if (!parse_output_constraint (&constraint, i, ninputs, noutputs,
1422 &allows_mem, &allows_reg, &is_inout))
1423 return;
1425 if (! allows_reg
1426 && (allows_mem
1427 || is_inout
1428 || (DECL_P (val)
1429 && REG_P (DECL_RTL (val))
1430 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type))))
1431 lang_hooks.mark_addressable (val);
1433 if (is_inout)
1434 ninout++;
1437 ninputs += ninout;
1438 if (ninputs + noutputs > MAX_RECOG_OPERANDS)
1440 error ("more than %d operands in `asm'", MAX_RECOG_OPERANDS);
1441 return;
1444 for (i = 0, tail = inputs; tail; i++, tail = TREE_CHAIN (tail))
1446 bool allows_reg, allows_mem;
1447 const char *constraint;
1449 /* If there's an erroneous arg, emit no insn, because the ASM_INPUT
1450 would get VOIDmode and that could cause a crash in reload. */
1451 if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
1452 return;
1454 constraint = constraints[i + noutputs];
1455 if (! parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
1456 constraints, &allows_mem, &allows_reg))
1457 return;
1459 if (! allows_reg && allows_mem)
1460 lang_hooks.mark_addressable (TREE_VALUE (tail));
1463 /* Second pass evaluates arguments. */
1465 ninout = 0;
1466 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1468 tree val = TREE_VALUE (tail);
1469 tree type = TREE_TYPE (val);
1470 bool is_inout;
1471 bool allows_reg;
1472 bool allows_mem;
1473 rtx op;
1475 if (!parse_output_constraint (&constraints[i], i, ninputs,
1476 noutputs, &allows_mem, &allows_reg,
1477 &is_inout))
1478 abort ();
1480 /* If an output operand is not a decl or indirect ref and our constraint
1481 allows a register, make a temporary to act as an intermediate.
1482 Make the asm insn write into that, then our caller will copy it to
1483 the real output operand. Likewise for promoted variables. */
1485 generating_concat_p = 0;
1487 real_output_rtx[i] = NULL_RTX;
1488 if ((TREE_CODE (val) == INDIRECT_REF
1489 && allows_mem)
1490 || (DECL_P (val)
1491 && (allows_mem || REG_P (DECL_RTL (val)))
1492 && ! (REG_P (DECL_RTL (val))
1493 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
1494 || ! allows_reg
1495 || is_inout)
1497 op = expand_expr (val, NULL_RTX, VOIDmode, EXPAND_WRITE);
1498 if (MEM_P (op))
1499 op = validize_mem (op);
1501 if (! allows_reg && !MEM_P (op))
1502 error ("output number %d not directly addressable", i);
1503 if ((! allows_mem && MEM_P (op))
1504 || GET_CODE (op) == CONCAT)
1506 real_output_rtx[i] = protect_from_queue (op, 1);
1507 op = gen_reg_rtx (GET_MODE (op));
1508 if (is_inout)
1509 emit_move_insn (op, real_output_rtx[i]);
1512 else
1514 op = assign_temp (type, 0, 0, 1);
1515 op = validize_mem (op);
1516 TREE_VALUE (tail) = make_tree (type, op);
1518 output_rtx[i] = op;
1520 generating_concat_p = old_generating_concat_p;
1522 if (is_inout)
1524 inout_mode[ninout] = TYPE_MODE (type);
1525 inout_opnum[ninout++] = i;
1528 if (decl_conflicts_with_clobbers_p (val, clobbered_regs))
1529 clobber_conflict_found = 1;
1532 /* Make vectors for the expression-rtx, constraint strings,
1533 and named operands. */
1535 argvec = rtvec_alloc (ninputs);
1536 constraintvec = rtvec_alloc (ninputs);
1538 body = gen_rtx_ASM_OPERANDS ((noutputs == 0 ? VOIDmode
1539 : GET_MODE (output_rtx[0])),
1540 TREE_STRING_POINTER (string),
1541 empty_string, 0, argvec, constraintvec,
1542 locus);
1544 MEM_VOLATILE_P (body) = vol;
1546 /* Eval the inputs and put them into ARGVEC.
1547 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
1549 for (i = 0, tail = inputs; tail; tail = TREE_CHAIN (tail), ++i)
1551 bool allows_reg, allows_mem;
1552 const char *constraint;
1553 tree val, type;
1554 rtx op;
1556 constraint = constraints[i + noutputs];
1557 if (! parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
1558 constraints, &allows_mem, &allows_reg))
1559 abort ();
1561 generating_concat_p = 0;
1563 val = TREE_VALUE (tail);
1564 type = TREE_TYPE (val);
1565 op = expand_expr (val, NULL_RTX, VOIDmode,
1566 (allows_mem && !allows_reg
1567 ? EXPAND_MEMORY : EXPAND_NORMAL));
1569 /* Never pass a CONCAT to an ASM. */
1570 if (GET_CODE (op) == CONCAT)
1571 op = force_reg (GET_MODE (op), op);
1572 else if (MEM_P (op))
1573 op = validize_mem (op);
1575 if (asm_operand_ok (op, constraint) <= 0)
1577 if (allows_reg)
1578 op = force_reg (TYPE_MODE (type), op);
1579 else if (!allows_mem)
1580 warning ("asm operand %d probably doesn't match constraints",
1581 i + noutputs);
1582 else if (MEM_P (op))
1584 /* We won't recognize either volatile memory or memory
1585 with a queued address as available a memory_operand
1586 at this point. Ignore it: clearly this *is* a memory. */
1588 else
1590 warning ("use of memory input without lvalue in "
1591 "asm operand %d is deprecated", i + noutputs);
1593 if (CONSTANT_P (op))
1595 rtx mem = force_const_mem (TYPE_MODE (type), op);
1596 if (mem)
1597 op = validize_mem (mem);
1598 else
1599 op = force_reg (TYPE_MODE (type), op);
1601 if (REG_P (op)
1602 || GET_CODE (op) == SUBREG
1603 || GET_CODE (op) == ADDRESSOF
1604 || GET_CODE (op) == CONCAT)
1606 tree qual_type = build_qualified_type (type,
1607 (TYPE_QUALS (type)
1608 | TYPE_QUAL_CONST));
1609 rtx memloc = assign_temp (qual_type, 1, 1, 1);
1610 memloc = validize_mem (memloc);
1611 emit_move_insn (memloc, op);
1612 op = memloc;
1617 generating_concat_p = old_generating_concat_p;
1618 ASM_OPERANDS_INPUT (body, i) = op;
1620 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, i)
1621 = gen_rtx_ASM_INPUT (TYPE_MODE (type), constraints[i + noutputs]);
1623 if (decl_conflicts_with_clobbers_p (val, clobbered_regs))
1624 clobber_conflict_found = 1;
1627 /* Protect all the operands from the queue now that they have all been
1628 evaluated. */
1630 generating_concat_p = 0;
1632 for (i = 0; i < ninputs - ninout; i++)
1633 ASM_OPERANDS_INPUT (body, i)
1634 = protect_from_queue (ASM_OPERANDS_INPUT (body, i), 0);
1636 for (i = 0; i < noutputs; i++)
1637 output_rtx[i] = protect_from_queue (output_rtx[i], 1);
1639 /* For in-out operands, copy output rtx to input rtx. */
1640 for (i = 0; i < ninout; i++)
1642 int j = inout_opnum[i];
1643 char buffer[16];
1645 ASM_OPERANDS_INPUT (body, ninputs - ninout + i)
1646 = output_rtx[j];
1648 sprintf (buffer, "%d", j);
1649 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, ninputs - ninout + i)
1650 = gen_rtx_ASM_INPUT (inout_mode[i], ggc_strdup (buffer));
1653 generating_concat_p = old_generating_concat_p;
1655 /* Now, for each output, construct an rtx
1656 (set OUTPUT (asm_operands INSN OUTPUTCONSTRAINT OUTPUTNUMBER
1657 ARGVEC CONSTRAINTS OPNAMES))
1658 If there is more than one, put them inside a PARALLEL. */
1660 if (noutputs == 1 && nclobbers == 0)
1662 ASM_OPERANDS_OUTPUT_CONSTRAINT (body) = constraints[0];
1663 emit_insn (gen_rtx_SET (VOIDmode, output_rtx[0], body));
1666 else if (noutputs == 0 && nclobbers == 0)
1668 /* No output operands: put in a raw ASM_OPERANDS rtx. */
1669 emit_insn (body);
1672 else
1674 rtx obody = body;
1675 int num = noutputs;
1677 if (num == 0)
1678 num = 1;
1680 body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
1682 /* For each output operand, store a SET. */
1683 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1685 XVECEXP (body, 0, i)
1686 = gen_rtx_SET (VOIDmode,
1687 output_rtx[i],
1688 gen_rtx_ASM_OPERANDS
1689 (GET_MODE (output_rtx[i]),
1690 TREE_STRING_POINTER (string),
1691 constraints[i], i, argvec, constraintvec,
1692 locus));
1694 MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
1697 /* If there are no outputs (but there are some clobbers)
1698 store the bare ASM_OPERANDS into the PARALLEL. */
1700 if (i == 0)
1701 XVECEXP (body, 0, i++) = obody;
1703 /* Store (clobber REG) for each clobbered register specified. */
1705 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1707 const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1708 int j = decode_reg_name (regname);
1709 rtx clobbered_reg;
1711 if (j < 0)
1713 if (j == -3) /* `cc', which is not a register */
1714 continue;
1716 if (j == -4) /* `memory', don't cache memory across asm */
1718 XVECEXP (body, 0, i++)
1719 = gen_rtx_CLOBBER (VOIDmode,
1720 gen_rtx_MEM
1721 (BLKmode,
1722 gen_rtx_SCRATCH (VOIDmode)));
1723 continue;
1726 /* Ignore unknown register, error already signaled. */
1727 continue;
1730 /* Use QImode since that's guaranteed to clobber just one reg. */
1731 clobbered_reg = gen_rtx_REG (QImode, j);
1733 /* Do sanity check for overlap between clobbers and respectively
1734 input and outputs that hasn't been handled. Such overlap
1735 should have been detected and reported above. */
1736 if (!clobber_conflict_found)
1738 int opno;
1740 /* We test the old body (obody) contents to avoid tripping
1741 over the under-construction body. */
1742 for (opno = 0; opno < noutputs; opno++)
1743 if (reg_overlap_mentioned_p (clobbered_reg, output_rtx[opno]))
1744 internal_error ("asm clobber conflict with output operand");
1746 for (opno = 0; opno < ninputs - ninout; opno++)
1747 if (reg_overlap_mentioned_p (clobbered_reg,
1748 ASM_OPERANDS_INPUT (obody, opno)))
1749 internal_error ("asm clobber conflict with input operand");
1752 XVECEXP (body, 0, i++)
1753 = gen_rtx_CLOBBER (VOIDmode, clobbered_reg);
1756 emit_insn (body);
1759 /* For any outputs that needed reloading into registers, spill them
1760 back to where they belong. */
1761 for (i = 0; i < noutputs; ++i)
1762 if (real_output_rtx[i])
1763 emit_move_insn (real_output_rtx[i], output_rtx[i]);
1765 free_temp_slots ();
1768 void
1769 expand_asm_expr (tree exp)
1771 int noutputs, i;
1772 tree outputs, tail;
1773 tree *o;
1775 if (ASM_INPUT_P (exp))
1777 expand_asm (ASM_STRING (exp), ASM_VOLATILE_P (exp));
1778 return;
1781 outputs = ASM_OUTPUTS (exp);
1782 noutputs = list_length (outputs);
1783 /* o[I] is the place that output number I should be written. */
1784 o = (tree *) alloca (noutputs * sizeof (tree));
1786 /* Record the contents of OUTPUTS before it is modified. */
1787 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1788 o[i] = TREE_VALUE (tail);
1790 /* Generate the ASM_OPERANDS insn; store into the TREE_VALUEs of
1791 OUTPUTS some trees for where the values were actually stored. */
1792 expand_asm_operands (ASM_STRING (exp), outputs, ASM_INPUTS (exp),
1793 ASM_CLOBBERS (exp), ASM_VOLATILE_P (exp),
1794 input_location);
1796 /* Copy all the intermediate outputs into the specified outputs. */
1797 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1799 if (o[i] != TREE_VALUE (tail))
1801 expand_assignment (o[i], TREE_VALUE (tail), 0);
1802 free_temp_slots ();
1804 /* Restore the original value so that it's correct the next
1805 time we expand this function. */
1806 TREE_VALUE (tail) = o[i];
1810 /* Those MODIFY_EXPRs could do autoincrements. */
1811 emit_queue ();
1814 /* A subroutine of expand_asm_operands. Check that all operands have
1815 the same number of alternatives. Return true if so. */
1817 static bool
1818 check_operand_nalternatives (tree outputs, tree inputs)
1820 if (outputs || inputs)
1822 tree tmp = TREE_PURPOSE (outputs ? outputs : inputs);
1823 int nalternatives
1824 = n_occurrences (',', TREE_STRING_POINTER (TREE_VALUE (tmp)));
1825 tree next = inputs;
1827 if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
1829 error ("too many alternatives in `asm'");
1830 return false;
1833 tmp = outputs;
1834 while (tmp)
1836 const char *constraint
1837 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (tmp)));
1839 if (n_occurrences (',', constraint) != nalternatives)
1841 error ("operand constraints for `asm' differ in number of alternatives");
1842 return false;
1845 if (TREE_CHAIN (tmp))
1846 tmp = TREE_CHAIN (tmp);
1847 else
1848 tmp = next, next = 0;
1852 return true;
1855 /* A subroutine of expand_asm_operands. Check that all operand names
1856 are unique. Return true if so. We rely on the fact that these names
1857 are identifiers, and so have been canonicalized by get_identifier,
1858 so all we need are pointer comparisons. */
1860 static bool
1861 check_unique_operand_names (tree outputs, tree inputs)
1863 tree i, j;
1865 for (i = outputs; i ; i = TREE_CHAIN (i))
1867 tree i_name = TREE_PURPOSE (TREE_PURPOSE (i));
1868 if (! i_name)
1869 continue;
1871 for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
1872 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1873 goto failure;
1876 for (i = inputs; i ; i = TREE_CHAIN (i))
1878 tree i_name = TREE_PURPOSE (TREE_PURPOSE (i));
1879 if (! i_name)
1880 continue;
1882 for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
1883 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1884 goto failure;
1885 for (j = outputs; j ; j = TREE_CHAIN (j))
1886 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1887 goto failure;
1890 return true;
1892 failure:
1893 error ("duplicate asm operand name '%s'",
1894 TREE_STRING_POINTER (TREE_PURPOSE (TREE_PURPOSE (i))));
1895 return false;
1898 /* A subroutine of expand_asm_operands. Resolve the names of the operands
1899 in *POUTPUTS and *PINPUTS to numbers, and replace the name expansions in
1900 STRING and in the constraints to those numbers. */
1902 tree
1903 resolve_asm_operand_names (tree string, tree outputs, tree inputs)
1905 char *buffer;
1906 char *p;
1907 const char *c;
1908 tree t;
1910 check_unique_operand_names (outputs, inputs);
1912 /* Substitute [<name>] in input constraint strings. There should be no
1913 named operands in output constraints. */
1914 for (t = inputs; t ; t = TREE_CHAIN (t))
1916 c = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
1917 if (strchr (c, '[') != NULL)
1919 p = buffer = xstrdup (c);
1920 while ((p = strchr (p, '[')) != NULL)
1921 p = resolve_operand_name_1 (p, outputs, inputs);
1922 TREE_VALUE (TREE_PURPOSE (t))
1923 = build_string (strlen (buffer), buffer);
1924 free (buffer);
1928 /* Now check for any needed substitutions in the template. */
1929 c = TREE_STRING_POINTER (string);
1930 while ((c = strchr (c, '%')) != NULL)
1932 if (c[1] == '[')
1933 break;
1934 else if (ISALPHA (c[1]) && c[2] == '[')
1935 break;
1936 else
1938 c += 1;
1939 continue;
1943 if (c)
1945 /* OK, we need to make a copy so we can perform the substitutions.
1946 Assume that we will not need extra space--we get to remove '['
1947 and ']', which means we cannot have a problem until we have more
1948 than 999 operands. */
1949 buffer = xstrdup (TREE_STRING_POINTER (string));
1950 p = buffer + (c - TREE_STRING_POINTER (string));
1952 while ((p = strchr (p, '%')) != NULL)
1954 if (p[1] == '[')
1955 p += 1;
1956 else if (ISALPHA (p[1]) && p[2] == '[')
1957 p += 2;
1958 else
1960 p += 1;
1961 continue;
1964 p = resolve_operand_name_1 (p, outputs, inputs);
1967 string = build_string (strlen (buffer), buffer);
1968 free (buffer);
1971 return string;
1974 /* A subroutine of resolve_operand_names. P points to the '[' for a
1975 potential named operand of the form [<name>]. In place, replace
1976 the name and brackets with a number. Return a pointer to the
1977 balance of the string after substitution. */
1979 static char *
1980 resolve_operand_name_1 (char *p, tree outputs, tree inputs)
1982 char *q;
1983 int op;
1984 tree t;
1985 size_t len;
1987 /* Collect the operand name. */
1988 q = strchr (p, ']');
1989 if (!q)
1991 error ("missing close brace for named operand");
1992 return strchr (p, '\0');
1994 len = q - p - 1;
1996 /* Resolve the name to a number. */
1997 for (op = 0, t = outputs; t ; t = TREE_CHAIN (t), op++)
1999 tree name = TREE_PURPOSE (TREE_PURPOSE (t));
2000 if (name)
2002 const char *c = TREE_STRING_POINTER (name);
2003 if (strncmp (c, p + 1, len) == 0 && c[len] == '\0')
2004 goto found;
2007 for (t = inputs; t ; t = TREE_CHAIN (t), op++)
2009 tree name = TREE_PURPOSE (TREE_PURPOSE (t));
2010 if (name)
2012 const char *c = TREE_STRING_POINTER (name);
2013 if (strncmp (c, p + 1, len) == 0 && c[len] == '\0')
2014 goto found;
2018 *q = '\0';
2019 error ("undefined named operand '%s'", p + 1);
2020 op = 0;
2021 found:
2023 /* Replace the name with the number. Unfortunately, not all libraries
2024 get the return value of sprintf correct, so search for the end of the
2025 generated string by hand. */
2026 sprintf (p, "%d", op);
2027 p = strchr (p, '\0');
2029 /* Verify the no extra buffer space assumption. */
2030 if (p > q)
2031 abort ();
2033 /* Shift the rest of the buffer down to fill the gap. */
2034 memmove (p, q + 1, strlen (q + 1) + 1);
2036 return p;
2039 /* Generate RTL to evaluate the expression EXP. */
2041 void
2042 expand_expr_stmt (tree exp)
2044 rtx value;
2045 tree type;
2047 value = expand_expr (exp, const0_rtx, VOIDmode, 0);
2048 type = TREE_TYPE (exp);
2050 /* If all we do is reference a volatile value in memory,
2051 copy it to a register to be sure it is actually touched. */
2052 if (value && MEM_P (value) && TREE_THIS_VOLATILE (exp))
2054 if (TYPE_MODE (type) == VOIDmode)
2056 else if (TYPE_MODE (type) != BLKmode)
2057 value = copy_to_reg (value);
2058 else
2060 rtx lab = gen_label_rtx ();
2062 /* Compare the value with itself to reference it. */
2063 emit_cmp_and_jump_insns (value, value, EQ,
2064 expand_expr (TYPE_SIZE (type),
2065 NULL_RTX, VOIDmode, 0),
2066 BLKmode, 0, lab);
2067 emit_label (lab);
2071 /* Free any temporaries used to evaluate this expression. */
2072 free_temp_slots ();
2074 emit_queue ();
2077 /* Warn if EXP contains any computations whose results are not used.
2078 Return 1 if a warning is printed; 0 otherwise. LOCUS is the
2079 (potential) location of the expression. */
2082 warn_if_unused_value (tree exp, location_t locus)
2084 restart:
2085 if (TREE_USED (exp))
2086 return 0;
2088 /* Don't warn about void constructs. This includes casting to void,
2089 void function calls, and statement expressions with a final cast
2090 to void. */
2091 if (VOID_TYPE_P (TREE_TYPE (exp)))
2092 return 0;
2094 if (EXPR_LOCUS (exp))
2095 locus = *EXPR_LOCUS (exp);
2097 switch (TREE_CODE (exp))
2099 case PREINCREMENT_EXPR:
2100 case POSTINCREMENT_EXPR:
2101 case PREDECREMENT_EXPR:
2102 case POSTDECREMENT_EXPR:
2103 case MODIFY_EXPR:
2104 case INIT_EXPR:
2105 case TARGET_EXPR:
2106 case CALL_EXPR:
2107 case TRY_CATCH_EXPR:
2108 case WITH_CLEANUP_EXPR:
2109 case EXIT_EXPR:
2110 return 0;
2112 case BIND_EXPR:
2113 /* For a binding, warn if no side effect within it. */
2114 exp = BIND_EXPR_BODY (exp);
2115 goto restart;
2117 case SAVE_EXPR:
2118 exp = TREE_OPERAND (exp, 0);
2119 goto restart;
2121 case TRUTH_ORIF_EXPR:
2122 case TRUTH_ANDIF_EXPR:
2123 /* In && or ||, warn if 2nd operand has no side effect. */
2124 exp = TREE_OPERAND (exp, 1);
2125 goto restart;
2127 case COMPOUND_EXPR:
2128 if (TREE_NO_WARNING (exp))
2129 return 0;
2130 if (warn_if_unused_value (TREE_OPERAND (exp, 0), locus))
2131 return 1;
2132 /* Let people do `(foo (), 0)' without a warning. */
2133 if (TREE_CONSTANT (TREE_OPERAND (exp, 1)))
2134 return 0;
2135 exp = TREE_OPERAND (exp, 1);
2136 goto restart;
2138 case NOP_EXPR:
2139 case CONVERT_EXPR:
2140 case NON_LVALUE_EXPR:
2141 /* Don't warn about conversions not explicit in the user's program. */
2142 if (TREE_NO_WARNING (exp))
2143 return 0;
2144 /* Assignment to a cast usually results in a cast of a modify.
2145 Don't complain about that. There can be an arbitrary number of
2146 casts before the modify, so we must loop until we find the first
2147 non-cast expression and then test to see if that is a modify. */
2149 tree tem = TREE_OPERAND (exp, 0);
2151 while (TREE_CODE (tem) == CONVERT_EXPR || TREE_CODE (tem) == NOP_EXPR)
2152 tem = TREE_OPERAND (tem, 0);
2154 if (TREE_CODE (tem) == MODIFY_EXPR || TREE_CODE (tem) == INIT_EXPR
2155 || TREE_CODE (tem) == CALL_EXPR)
2156 return 0;
2158 goto maybe_warn;
2160 case INDIRECT_REF:
2161 /* Don't warn about automatic dereferencing of references, since
2162 the user cannot control it. */
2163 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == REFERENCE_TYPE)
2165 exp = TREE_OPERAND (exp, 0);
2166 goto restart;
2168 /* Fall through. */
2170 default:
2171 /* Referencing a volatile value is a side effect, so don't warn. */
2172 if ((DECL_P (exp)
2173 || TREE_CODE_CLASS (TREE_CODE (exp)) == 'r')
2174 && TREE_THIS_VOLATILE (exp))
2175 return 0;
2177 /* If this is an expression which has no operands, there is no value
2178 to be unused. There are no such language-independent codes,
2179 but front ends may define such. */
2180 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'e'
2181 && TREE_CODE_LENGTH (TREE_CODE (exp)) == 0)
2182 return 0;
2184 maybe_warn:
2185 /* If this is an expression with side effects, don't warn. */
2186 if (TREE_SIDE_EFFECTS (exp))
2187 return 0;
2189 warning ("%Hvalue computed is not used", &locus);
2190 return 1;
2194 /* Generate RTL for the start of an if-then. COND is the expression
2195 whose truth should be tested.
2197 If EXITFLAG is nonzero, this conditional is visible to
2198 `exit_something'. */
2200 void
2201 expand_start_cond (tree cond, int exitflag)
2203 struct nesting *thiscond = ALLOC_NESTING ();
2205 /* Make an entry on cond_stack for the cond we are entering. */
2207 thiscond->desc = COND_NESTING;
2208 thiscond->next = cond_stack;
2209 thiscond->all = nesting_stack;
2210 thiscond->depth = ++nesting_depth;
2211 thiscond->data.cond.next_label = gen_label_rtx ();
2212 /* Before we encounter an `else', we don't need a separate exit label
2213 unless there are supposed to be exit statements
2214 to exit this conditional. */
2215 thiscond->exit_label = exitflag ? gen_label_rtx () : 0;
2216 thiscond->data.cond.endif_label = thiscond->exit_label;
2217 cond_stack = thiscond;
2218 nesting_stack = thiscond;
2220 do_jump (cond, thiscond->data.cond.next_label, NULL_RTX);
2223 /* Generate RTL between then-clause and the elseif-clause
2224 of an if-then-elseif-.... */
2226 void
2227 expand_start_elseif (tree cond)
2229 if (cond_stack->data.cond.endif_label == 0)
2230 cond_stack->data.cond.endif_label = gen_label_rtx ();
2231 emit_jump (cond_stack->data.cond.endif_label);
2232 emit_label (cond_stack->data.cond.next_label);
2233 cond_stack->data.cond.next_label = gen_label_rtx ();
2234 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
2237 /* Generate RTL between the then-clause and the else-clause
2238 of an if-then-else. */
2240 void
2241 expand_start_else (void)
2243 if (cond_stack->data.cond.endif_label == 0)
2244 cond_stack->data.cond.endif_label = gen_label_rtx ();
2246 emit_jump (cond_stack->data.cond.endif_label);
2247 emit_label (cond_stack->data.cond.next_label);
2248 cond_stack->data.cond.next_label = 0; /* No more _else or _elseif calls. */
2251 /* After calling expand_start_else, turn this "else" into an "else if"
2252 by providing another condition. */
2254 void
2255 expand_elseif (tree cond)
2257 cond_stack->data.cond.next_label = gen_label_rtx ();
2258 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
2261 /* Generate RTL for the end of an if-then.
2262 Pop the record for it off of cond_stack. */
2264 void
2265 expand_end_cond (void)
2267 struct nesting *thiscond = cond_stack;
2269 do_pending_stack_adjust ();
2270 if (thiscond->data.cond.next_label)
2271 emit_label (thiscond->data.cond.next_label);
2272 if (thiscond->data.cond.endif_label)
2273 emit_label (thiscond->data.cond.endif_label);
2275 POPSTACK (cond_stack);
2278 /* Return nonzero if we should preserve sub-expressions as separate
2279 pseudos. We never do so if we aren't optimizing. We always do so
2280 if -fexpensive-optimizations. */
2283 preserve_subexpressions_p (void)
2285 if (flag_expensive_optimizations)
2286 return 1;
2288 if (optimize == 0 || cfun == 0 || cfun->stmt == 0)
2289 return 0;
2291 return 1;
2295 /* Generate RTL to return from the current function, with no value.
2296 (That is, we do not do anything about returning any value.) */
2298 void
2299 expand_null_return (void)
2301 rtx last_insn;
2303 last_insn = get_last_insn ();
2305 /* If this function was declared to return a value, but we
2306 didn't, clobber the return registers so that they are not
2307 propagated live to the rest of the function. */
2308 clobber_return_register ();
2310 expand_null_return_1 (last_insn);
2313 /* Generate RTL to return directly from the current function.
2314 (That is, we bypass any return value.) */
2316 void
2317 expand_naked_return (void)
2319 rtx last_insn, end_label;
2321 last_insn = get_last_insn ();
2322 end_label = naked_return_label;
2324 clear_pending_stack_adjust ();
2325 do_pending_stack_adjust ();
2327 if (end_label == 0)
2328 end_label = naked_return_label = gen_label_rtx ();
2329 expand_goto_internal (NULL_TREE, end_label, last_insn);
2332 /* Try to guess whether the value of return means error code. */
2333 static enum br_predictor
2334 return_prediction (rtx val)
2336 /* Different heuristics for pointers and scalars. */
2337 if (POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (current_function_decl))))
2339 /* NULL is usually not returned. */
2340 if (val == const0_rtx)
2341 return PRED_NULL_RETURN;
2343 else
2345 /* Negative return values are often used to indicate
2346 errors. */
2347 if (GET_CODE (val) == CONST_INT
2348 && INTVAL (val) < 0)
2349 return PRED_NEGATIVE_RETURN;
2350 /* Constant return values are also usually erors,
2351 zero/one often mean booleans so exclude them from the
2352 heuristics. */
2353 if (CONSTANT_P (val)
2354 && (val != const0_rtx && val != const1_rtx))
2355 return PRED_CONST_RETURN;
2357 return PRED_NO_PREDICTION;
2361 /* If the current function returns values in the most significant part
2362 of a register, shift return value VAL appropriately. The mode of
2363 the function's return type is known not to be BLKmode. */
2365 static rtx
2366 shift_return_value (rtx val)
2368 tree type;
2370 type = TREE_TYPE (DECL_RESULT (current_function_decl));
2371 if (targetm.calls.return_in_msb (type))
2373 rtx target;
2374 HOST_WIDE_INT shift;
2376 target = DECL_RTL (DECL_RESULT (current_function_decl));
2377 shift = (GET_MODE_BITSIZE (GET_MODE (target))
2378 - BITS_PER_UNIT * int_size_in_bytes (type));
2379 if (shift > 0)
2380 val = expand_binop (GET_MODE (target), ashl_optab,
2381 gen_lowpart (GET_MODE (target), val),
2382 GEN_INT (shift), target, 1, OPTAB_WIDEN);
2384 return val;
2388 /* Generate RTL to return from the current function, with value VAL. */
2390 static void
2391 expand_value_return (rtx val)
2393 rtx last_insn;
2394 rtx return_reg;
2395 enum br_predictor pred;
2397 if (flag_guess_branch_prob
2398 && (pred = return_prediction (val)) != PRED_NO_PREDICTION)
2400 /* Emit information for branch prediction. */
2401 rtx note;
2403 note = emit_note (NOTE_INSN_PREDICTION);
2405 NOTE_PREDICTION (note) = NOTE_PREDICT (pred, NOT_TAKEN);
2409 last_insn = get_last_insn ();
2410 return_reg = DECL_RTL (DECL_RESULT (current_function_decl));
2412 /* Copy the value to the return location
2413 unless it's already there. */
2415 if (return_reg != val)
2417 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
2418 if (targetm.calls.promote_function_return (TREE_TYPE (current_function_decl)))
2420 int unsignedp = TYPE_UNSIGNED (type);
2421 enum machine_mode old_mode
2422 = DECL_MODE (DECL_RESULT (current_function_decl));
2423 enum machine_mode mode
2424 = promote_mode (type, old_mode, &unsignedp, 1);
2426 if (mode != old_mode)
2427 val = convert_modes (mode, old_mode, val, unsignedp);
2429 if (GET_CODE (return_reg) == PARALLEL)
2430 emit_group_load (return_reg, val, type, int_size_in_bytes (type));
2431 else
2432 emit_move_insn (return_reg, val);
2435 expand_null_return_1 (last_insn);
2438 /* Output a return with no value. If LAST_INSN is nonzero,
2439 pretend that the return takes place after LAST_INSN. */
2441 static void
2442 expand_null_return_1 (rtx last_insn)
2444 rtx end_label = return_label;
2446 clear_pending_stack_adjust ();
2447 do_pending_stack_adjust ();
2449 if (end_label == 0)
2450 end_label = return_label = gen_label_rtx ();
2451 expand_goto_internal (NULL_TREE, end_label, last_insn);
2454 /* Generate RTL to evaluate the expression RETVAL and return it
2455 from the current function. */
2457 void
2458 expand_return (tree retval)
2460 /* If there are any cleanups to be performed, then they will
2461 be inserted following LAST_INSN. It is desirable
2462 that the last_insn, for such purposes, should be the
2463 last insn before computing the return value. Otherwise, cleanups
2464 which call functions can clobber the return value. */
2465 /* ??? rms: I think that is erroneous, because in C++ it would
2466 run destructors on variables that might be used in the subsequent
2467 computation of the return value. */
2468 rtx last_insn = 0;
2469 rtx result_rtl;
2470 rtx val = 0;
2471 tree retval_rhs;
2473 /* If function wants no value, give it none. */
2474 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
2476 expand_expr (retval, NULL_RTX, VOIDmode, 0);
2477 emit_queue ();
2478 expand_null_return ();
2479 return;
2482 if (retval == error_mark_node)
2484 /* Treat this like a return of no value from a function that
2485 returns a value. */
2486 expand_null_return ();
2487 return;
2489 else if (TREE_CODE (retval) == RESULT_DECL)
2490 retval_rhs = retval;
2491 else if ((TREE_CODE (retval) == MODIFY_EXPR || TREE_CODE (retval) == INIT_EXPR)
2492 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
2493 retval_rhs = TREE_OPERAND (retval, 1);
2494 else
2495 retval_rhs = retval;
2497 last_insn = get_last_insn ();
2499 result_rtl = DECL_RTL (DECL_RESULT (current_function_decl));
2501 /* If the result is an aggregate that is being returned in one (or more)
2502 registers, load the registers here. The compiler currently can't handle
2503 copying a BLKmode value into registers. We could put this code in a
2504 more general area (for use by everyone instead of just function
2505 call/return), but until this feature is generally usable it is kept here
2506 (and in expand_call). The value must go into a pseudo in case there
2507 are cleanups that will clobber the real return register. */
2509 if (retval_rhs != 0
2510 && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
2511 && REG_P (result_rtl))
2513 int i;
2514 unsigned HOST_WIDE_INT bitpos, xbitpos;
2515 unsigned HOST_WIDE_INT padding_correction = 0;
2516 unsigned HOST_WIDE_INT bytes
2517 = int_size_in_bytes (TREE_TYPE (retval_rhs));
2518 int n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
2519 unsigned int bitsize
2520 = MIN (TYPE_ALIGN (TREE_TYPE (retval_rhs)), BITS_PER_WORD);
2521 rtx *result_pseudos = alloca (sizeof (rtx) * n_regs);
2522 rtx result_reg, src = NULL_RTX, dst = NULL_RTX;
2523 rtx result_val = expand_expr (retval_rhs, NULL_RTX, VOIDmode, 0);
2524 enum machine_mode tmpmode, result_reg_mode;
2526 if (bytes == 0)
2528 expand_null_return ();
2529 return;
2532 /* If the structure doesn't take up a whole number of words, see
2533 whether the register value should be padded on the left or on
2534 the right. Set PADDING_CORRECTION to the number of padding
2535 bits needed on the left side.
2537 In most ABIs, the structure will be returned at the least end of
2538 the register, which translates to right padding on little-endian
2539 targets and left padding on big-endian targets. The opposite
2540 holds if the structure is returned at the most significant
2541 end of the register. */
2542 if (bytes % UNITS_PER_WORD != 0
2543 && (targetm.calls.return_in_msb (TREE_TYPE (retval_rhs))
2544 ? !BYTES_BIG_ENDIAN
2545 : BYTES_BIG_ENDIAN))
2546 padding_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2547 * BITS_PER_UNIT));
2549 /* Copy the structure BITSIZE bits at a time. */
2550 for (bitpos = 0, xbitpos = padding_correction;
2551 bitpos < bytes * BITS_PER_UNIT;
2552 bitpos += bitsize, xbitpos += bitsize)
2554 /* We need a new destination pseudo each time xbitpos is
2555 on a word boundary and when xbitpos == padding_correction
2556 (the first time through). */
2557 if (xbitpos % BITS_PER_WORD == 0
2558 || xbitpos == padding_correction)
2560 /* Generate an appropriate register. */
2561 dst = gen_reg_rtx (word_mode);
2562 result_pseudos[xbitpos / BITS_PER_WORD] = dst;
2564 /* Clear the destination before we move anything into it. */
2565 emit_move_insn (dst, CONST0_RTX (GET_MODE (dst)));
2568 /* We need a new source operand each time bitpos is on a word
2569 boundary. */
2570 if (bitpos % BITS_PER_WORD == 0)
2571 src = operand_subword_force (result_val,
2572 bitpos / BITS_PER_WORD,
2573 BLKmode);
2575 /* Use bitpos for the source extraction (left justified) and
2576 xbitpos for the destination store (right justified). */
2577 store_bit_field (dst, bitsize, xbitpos % BITS_PER_WORD, word_mode,
2578 extract_bit_field (src, bitsize,
2579 bitpos % BITS_PER_WORD, 1,
2580 NULL_RTX, word_mode, word_mode,
2581 BITS_PER_WORD),
2582 BITS_PER_WORD);
2585 tmpmode = GET_MODE (result_rtl);
2586 if (tmpmode == BLKmode)
2588 /* Find the smallest integer mode large enough to hold the
2589 entire structure and use that mode instead of BLKmode
2590 on the USE insn for the return register. */
2591 for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2592 tmpmode != VOIDmode;
2593 tmpmode = GET_MODE_WIDER_MODE (tmpmode))
2594 /* Have we found a large enough mode? */
2595 if (GET_MODE_SIZE (tmpmode) >= bytes)
2596 break;
2598 /* No suitable mode found. */
2599 if (tmpmode == VOIDmode)
2600 abort ();
2602 PUT_MODE (result_rtl, tmpmode);
2605 if (GET_MODE_SIZE (tmpmode) < GET_MODE_SIZE (word_mode))
2606 result_reg_mode = word_mode;
2607 else
2608 result_reg_mode = tmpmode;
2609 result_reg = gen_reg_rtx (result_reg_mode);
2611 emit_queue ();
2612 for (i = 0; i < n_regs; i++)
2613 emit_move_insn (operand_subword (result_reg, i, 0, result_reg_mode),
2614 result_pseudos[i]);
2616 if (tmpmode != result_reg_mode)
2617 result_reg = gen_lowpart (tmpmode, result_reg);
2619 expand_value_return (result_reg);
2621 else if (retval_rhs != 0
2622 && !VOID_TYPE_P (TREE_TYPE (retval_rhs))
2623 && (REG_P (result_rtl)
2624 || (GET_CODE (result_rtl) == PARALLEL)))
2626 /* Calculate the return value into a temporary (usually a pseudo
2627 reg). */
2628 tree ot = TREE_TYPE (DECL_RESULT (current_function_decl));
2629 tree nt = build_qualified_type (ot, TYPE_QUALS (ot) | TYPE_QUAL_CONST);
2631 val = assign_temp (nt, 0, 0, 1);
2632 val = expand_expr (retval_rhs, val, GET_MODE (val), 0);
2633 val = force_not_mem (val);
2634 emit_queue ();
2635 /* Return the calculated value, doing cleanups first. */
2636 expand_value_return (shift_return_value (val));
2638 else
2640 /* No cleanups or no hard reg used;
2641 calculate value into hard return reg. */
2642 expand_expr (retval, const0_rtx, VOIDmode, 0);
2643 emit_queue ();
2644 expand_value_return (result_rtl);
2648 /* Generate the RTL code for entering a binding contour.
2649 The variables are declared one by one, by calls to `expand_decl'.
2651 FLAGS is a bitwise or of the following flags:
2653 1 - Nonzero if this construct should be visible to
2654 `exit_something'.
2656 2 - Nonzero if this contour does not require a
2657 NOTE_INSN_BLOCK_BEG note. Virtually all calls from
2658 language-independent code should set this flag because they
2659 will not create corresponding BLOCK nodes. (There should be
2660 a one-to-one correspondence between NOTE_INSN_BLOCK_BEG notes
2661 and BLOCKs.) If this flag is set, MARK_ENDS should be zero
2662 when expand_end_bindings is called.
2664 If we are creating a NOTE_INSN_BLOCK_BEG note, a BLOCK may
2665 optionally be supplied. If so, it becomes the NOTE_BLOCK for the
2666 note. */
2668 void
2669 expand_start_bindings_and_block (int flags, tree block)
2671 struct nesting *thisblock = ALLOC_NESTING ();
2672 rtx note;
2673 int exit_flag = ((flags & 1) != 0);
2674 int block_flag = ((flags & 2) == 0);
2676 /* If a BLOCK is supplied, then the caller should be requesting a
2677 NOTE_INSN_BLOCK_BEG note. */
2678 if (!block_flag && block)
2679 abort ();
2681 /* Create a note to mark the beginning of the block. */
2682 note = emit_note (NOTE_INSN_DELETED);
2684 /* Make an entry on block_stack for the block we are entering. */
2686 thisblock->desc = BLOCK_NESTING;
2687 thisblock->next = block_stack;
2688 thisblock->all = nesting_stack;
2689 thisblock->depth = ++nesting_depth;
2690 thisblock->data.block.stack_level = 0;
2691 thisblock->data.block.cleanups = 0;
2692 thisblock->data.block.exception_region = 0;
2693 thisblock->data.block.block_target_temp_slot_level = target_temp_slot_level;
2695 thisblock->data.block.conditional_code = 0;
2696 thisblock->data.block.last_unconditional_cleanup = note;
2697 /* When we insert instructions after the last unconditional cleanup,
2698 we don't adjust last_insn. That means that a later add_insn will
2699 clobber the instructions we've just added. The easiest way to
2700 fix this is to just insert another instruction here, so that the
2701 instructions inserted after the last unconditional cleanup are
2702 never the last instruction. */
2703 emit_note (NOTE_INSN_DELETED);
2705 if (block_stack
2706 && !(block_stack->data.block.cleanups == NULL_TREE
2707 && block_stack->data.block.outer_cleanups == NULL_TREE))
2708 thisblock->data.block.outer_cleanups
2709 = tree_cons (NULL_TREE, block_stack->data.block.cleanups,
2710 block_stack->data.block.outer_cleanups);
2711 else
2712 thisblock->data.block.outer_cleanups = 0;
2713 thisblock->data.block.label_chain = 0;
2714 thisblock->data.block.innermost_stack_block = stack_block_stack;
2715 thisblock->data.block.first_insn = note;
2716 thisblock->data.block.block_start_count = ++current_block_start_count;
2717 thisblock->exit_label = exit_flag ? gen_label_rtx () : 0;
2718 block_stack = thisblock;
2719 nesting_stack = thisblock;
2721 /* Make a new level for allocating stack slots. */
2722 push_temp_slots ();
2725 /* Specify the scope of temporaries created by TARGET_EXPRs. Similar
2726 to CLEANUP_POINT_EXPR, but handles cases when a series of calls to
2727 expand_expr are made. After we end the region, we know that all
2728 space for all temporaries that were created by TARGET_EXPRs will be
2729 destroyed and their space freed for reuse. */
2731 void
2732 expand_start_target_temps (void)
2734 /* This is so that even if the result is preserved, the space
2735 allocated will be freed, as we know that it is no longer in use. */
2736 push_temp_slots ();
2738 /* Start a new binding layer that will keep track of all cleanup
2739 actions to be performed. */
2740 expand_start_bindings (2);
2742 target_temp_slot_level = temp_slot_level;
2745 void
2746 expand_end_target_temps (void)
2748 expand_end_bindings (NULL_TREE, 0, 0);
2750 /* This is so that even if the result is preserved, the space
2751 allocated will be freed, as we know that it is no longer in use. */
2752 pop_temp_slots ();
2755 /* Given a pointer to a BLOCK node return nonzero if (and only if) the node
2756 in question represents the outermost pair of curly braces (i.e. the "body
2757 block") of a function or method.
2759 For any BLOCK node representing a "body block" of a function or method, the
2760 BLOCK_SUPERCONTEXT of the node will point to another BLOCK node which
2761 represents the outermost (function) scope for the function or method (i.e.
2762 the one which includes the formal parameters). The BLOCK_SUPERCONTEXT of
2763 *that* node in turn will point to the relevant FUNCTION_DECL node. */
2766 is_body_block (tree stmt)
2768 if (lang_hooks.no_body_blocks)
2769 return 0;
2771 if (TREE_CODE (stmt) == BLOCK)
2773 tree parent = BLOCK_SUPERCONTEXT (stmt);
2775 if (parent && TREE_CODE (parent) == BLOCK)
2777 tree grandparent = BLOCK_SUPERCONTEXT (parent);
2779 if (grandparent && TREE_CODE (grandparent) == FUNCTION_DECL)
2780 return 1;
2784 return 0;
2787 /* True if we are currently emitting insns in an area of output code
2788 that is controlled by a conditional expression. This is used by
2789 the cleanup handling code to generate conditional cleanup actions. */
2792 conditional_context (void)
2794 return block_stack && block_stack->data.block.conditional_code;
2797 /* Return an opaque pointer to the current nesting level, so frontend code
2798 can check its own sanity. */
2800 struct nesting *
2801 current_nesting_level (void)
2803 return cfun ? block_stack : 0;
2806 /* Emit code to restore vital registers at the beginning of a nonlocal goto
2807 handler. */
2808 static void
2809 expand_nl_goto_receiver (void)
2811 /* Clobber the FP when we get here, so we have to make sure it's
2812 marked as used by this function. */
2813 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
2815 /* Mark the static chain as clobbered here so life information
2816 doesn't get messed up for it. */
2817 emit_insn (gen_rtx_CLOBBER (VOIDmode, static_chain_rtx));
2819 #ifdef HAVE_nonlocal_goto
2820 if (! HAVE_nonlocal_goto)
2821 #endif
2822 /* First adjust our frame pointer to its actual value. It was
2823 previously set to the start of the virtual area corresponding to
2824 the stacked variables when we branched here and now needs to be
2825 adjusted to the actual hardware fp value.
2827 Assignments are to virtual registers are converted by
2828 instantiate_virtual_regs into the corresponding assignment
2829 to the underlying register (fp in this case) that makes
2830 the original assignment true.
2831 So the following insn will actually be
2832 decrementing fp by STARTING_FRAME_OFFSET. */
2833 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
2835 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
2836 if (fixed_regs[ARG_POINTER_REGNUM])
2838 #ifdef ELIMINABLE_REGS
2839 /* If the argument pointer can be eliminated in favor of the
2840 frame pointer, we don't need to restore it. We assume here
2841 that if such an elimination is present, it can always be used.
2842 This is the case on all known machines; if we don't make this
2843 assumption, we do unnecessary saving on many machines. */
2844 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
2845 size_t i;
2847 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
2848 if (elim_regs[i].from == ARG_POINTER_REGNUM
2849 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
2850 break;
2852 if (i == ARRAY_SIZE (elim_regs))
2853 #endif
2855 /* Now restore our arg pointer from the address at which it
2856 was saved in our stack frame. */
2857 emit_move_insn (virtual_incoming_args_rtx,
2858 copy_to_reg (get_arg_pointer_save_area (cfun)));
2861 #endif
2863 #ifdef HAVE_nonlocal_goto_receiver
2864 if (HAVE_nonlocal_goto_receiver)
2865 emit_insn (gen_nonlocal_goto_receiver ());
2866 #endif
2868 /* @@@ This is a kludge. Not all machine descriptions define a blockage
2869 insn, but we must not allow the code we just generated to be reordered
2870 by scheduling. Specifically, the update of the frame pointer must
2871 happen immediately, not later. So emit an ASM_INPUT to act as blockage
2872 insn. */
2873 emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
2876 /* Warn about any unused VARS (which may contain nodes other than
2877 VAR_DECLs, but such nodes are ignored). The nodes are connected
2878 via the TREE_CHAIN field. */
2880 void
2881 warn_about_unused_variables (tree vars)
2883 tree decl;
2885 if (warn_unused_variable)
2886 for (decl = vars; decl; decl = TREE_CHAIN (decl))
2887 if (TREE_CODE (decl) == VAR_DECL
2888 && ! TREE_USED (decl)
2889 && ! DECL_IN_SYSTEM_HEADER (decl)
2890 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
2891 warning ("%Junused variable '%D'", decl, decl);
2894 /* Generate RTL code to terminate a binding contour.
2896 VARS is the chain of VAR_DECL nodes for the variables bound in this
2897 contour. There may actually be other nodes in this chain, but any
2898 nodes other than VAR_DECLS are ignored.
2900 MARK_ENDS is nonzero if we should put a note at the beginning
2901 and end of this binding contour.
2903 DONT_JUMP_IN is positive if it is not valid to jump into this contour,
2904 zero if we can jump into this contour only if it does not have a saved
2905 stack level, and negative if we are not to check for invalid use of
2906 labels (because the front end does that). */
2908 void
2909 expand_end_bindings (tree vars, int mark_ends ATTRIBUTE_UNUSED,
2910 int dont_jump_in)
2912 struct nesting *thisblock = block_stack;
2914 /* If any of the variables in this scope were not used, warn the
2915 user. */
2916 warn_about_unused_variables (vars);
2918 if (thisblock->exit_label)
2920 do_pending_stack_adjust ();
2921 emit_label (thisblock->exit_label);
2924 /* Don't allow jumping into a block that has a stack level.
2925 Cleanups are allowed, though. */
2926 if (dont_jump_in > 0
2927 || (dont_jump_in == 0 && thisblock->data.block.stack_level != 0))
2929 struct label_chain *chain;
2931 /* Any labels in this block are no longer valid to go to.
2932 Mark them to cause an error message. */
2933 for (chain = thisblock->data.block.label_chain; chain; chain = chain->next)
2935 DECL_TOO_LATE (chain->label) = 1;
2936 /* If any goto without a fixup came to this label,
2937 that must be an error, because gotos without fixups
2938 come from outside all saved stack-levels. */
2939 if (TREE_ADDRESSABLE (chain->label))
2940 error ("%Jlabel '%D' used before containing binding contour",
2941 chain->label, chain->label);
2945 /* Restore stack level in effect before the block
2946 (only if variable-size objects allocated). */
2947 /* Perform any cleanups associated with the block. */
2949 if (thisblock->data.block.stack_level != 0
2950 || thisblock->data.block.cleanups != 0)
2952 int reachable;
2953 rtx insn;
2955 /* Only clean up here if this point can actually be reached. */
2956 insn = get_last_insn ();
2957 if (GET_CODE (insn) == NOTE)
2958 insn = prev_nonnote_insn (insn);
2959 reachable = (! insn || GET_CODE (insn) != BARRIER);
2961 /* Do the cleanups. */
2962 expand_cleanups (thisblock->data.block.cleanups, 0, reachable);
2963 if (reachable)
2964 do_pending_stack_adjust ();
2966 /* Restore the stack level. */
2968 if (reachable && thisblock->data.block.stack_level != 0)
2970 emit_stack_restore (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
2971 thisblock->data.block.stack_level, NULL_RTX);
2972 if (cfun->nonlocal_goto_save_area)
2973 update_nonlocal_goto_save_area ();
2976 /* Any gotos out of this block must also do these things.
2977 Also report any gotos with fixups that came to labels in this
2978 level. */
2979 fixup_gotos (thisblock,
2980 thisblock->data.block.stack_level,
2981 thisblock->data.block.cleanups,
2982 thisblock->data.block.first_insn,
2983 dont_jump_in);
2986 /* Mark the beginning and end of the scope if requested.
2987 We do this now, after running cleanups on the variables
2988 just going out of scope, so they are in scope for their cleanups. */
2990 /* Get rid of the beginning-mark if we don't make an end-mark. */
2991 NOTE_LINE_NUMBER (thisblock->data.block.first_insn) = NOTE_INSN_DELETED;
2993 /* Restore the temporary level of TARGET_EXPRs. */
2994 target_temp_slot_level = thisblock->data.block.block_target_temp_slot_level;
2996 /* Restore block_stack level for containing block. */
2998 stack_block_stack = thisblock->data.block.innermost_stack_block;
2999 POPSTACK (block_stack);
3001 /* Pop the stack slot nesting and free any slots at this level. */
3002 pop_temp_slots ();
3005 /* Generate code to save the stack pointer at the start of the current block
3006 and set up to restore it on exit. */
3008 void
3009 save_stack_pointer (void)
3011 struct nesting *thisblock = block_stack;
3013 if (thisblock->data.block.stack_level == 0)
3015 emit_stack_save (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3016 &thisblock->data.block.stack_level,
3017 thisblock->data.block.first_insn);
3018 stack_block_stack = thisblock;
3022 /* Generate RTL for the automatic variable declaration DECL.
3023 (Other kinds of declarations are simply ignored if seen here.) */
3025 void
3026 expand_decl (tree decl)
3028 tree type;
3030 type = TREE_TYPE (decl);
3032 /* For a CONST_DECL, set mode, alignment, and sizes from those of the
3033 type in case this node is used in a reference. */
3034 if (TREE_CODE (decl) == CONST_DECL)
3036 DECL_MODE (decl) = TYPE_MODE (type);
3037 DECL_ALIGN (decl) = TYPE_ALIGN (type);
3038 DECL_SIZE (decl) = TYPE_SIZE (type);
3039 DECL_SIZE_UNIT (decl) = TYPE_SIZE_UNIT (type);
3040 return;
3043 /* Otherwise, only automatic variables need any expansion done. Static and
3044 external variables, and external functions, will be handled by
3045 `assemble_variable' (called from finish_decl). TYPE_DECL requires
3046 nothing. PARM_DECLs are handled in `assign_parms'. */
3047 if (TREE_CODE (decl) != VAR_DECL)
3048 return;
3050 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
3051 return;
3053 /* Create the RTL representation for the variable. */
3055 if (type == error_mark_node)
3056 SET_DECL_RTL (decl, gen_rtx_MEM (BLKmode, const0_rtx));
3058 else if (DECL_SIZE (decl) == 0)
3059 /* Variable with incomplete type. */
3061 rtx x;
3062 if (DECL_INITIAL (decl) == 0)
3063 /* Error message was already done; now avoid a crash. */
3064 x = gen_rtx_MEM (BLKmode, const0_rtx);
3065 else
3066 /* An initializer is going to decide the size of this array.
3067 Until we know the size, represent its address with a reg. */
3068 x = gen_rtx_MEM (BLKmode, gen_reg_rtx (Pmode));
3070 set_mem_attributes (x, decl, 1);
3071 SET_DECL_RTL (decl, x);
3073 else if (DECL_MODE (decl) != BLKmode
3074 /* If -ffloat-store, don't put explicit float vars
3075 into regs. */
3076 && !(flag_float_store
3077 && TREE_CODE (type) == REAL_TYPE)
3078 && ! TREE_THIS_VOLATILE (decl)
3079 && ! DECL_NONLOCAL (decl)
3080 && (DECL_REGISTER (decl) || DECL_ARTIFICIAL (decl) || optimize))
3082 /* Automatic variable that can go in a register. */
3083 int unsignedp = TYPE_UNSIGNED (type);
3084 enum machine_mode reg_mode
3085 = promote_mode (type, DECL_MODE (decl), &unsignedp, 0);
3087 SET_DECL_RTL (decl, gen_reg_rtx (reg_mode));
3089 /* Note if the object is a user variable. */
3090 if (!DECL_ARTIFICIAL (decl))
3092 mark_user_reg (DECL_RTL (decl));
3094 /* Trust user variables which have a pointer type to really
3095 be pointers. Do not trust compiler generated temporaries
3096 as our type system is totally busted as it relates to
3097 pointer arithmetic which translates into lots of compiler
3098 generated objects with pointer types, but which are not really
3099 pointers. */
3100 if (POINTER_TYPE_P (type))
3101 mark_reg_pointer (DECL_RTL (decl),
3102 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (decl))));
3105 maybe_set_unchanging (DECL_RTL (decl), decl);
3107 /* If something wants our address, try to use ADDRESSOF. */
3108 if (TREE_ADDRESSABLE (decl))
3109 put_var_into_stack (decl, /*rescan=*/false);
3112 else if (TREE_CODE (DECL_SIZE_UNIT (decl)) == INTEGER_CST
3113 && ! (flag_stack_check && ! STACK_CHECK_BUILTIN
3114 && 0 < compare_tree_int (DECL_SIZE_UNIT (decl),
3115 STACK_CHECK_MAX_VAR_SIZE)))
3117 /* Variable of fixed size that goes on the stack. */
3118 rtx oldaddr = 0;
3119 rtx addr;
3120 rtx x;
3122 /* If we previously made RTL for this decl, it must be an array
3123 whose size was determined by the initializer.
3124 The old address was a register; set that register now
3125 to the proper address. */
3126 if (DECL_RTL_SET_P (decl))
3128 if (!MEM_P (DECL_RTL (decl))
3129 || !REG_P (XEXP (DECL_RTL (decl), 0)))
3130 abort ();
3131 oldaddr = XEXP (DECL_RTL (decl), 0);
3134 /* Set alignment we actually gave this decl. */
3135 DECL_ALIGN (decl) = (DECL_MODE (decl) == BLKmode ? BIGGEST_ALIGNMENT
3136 : GET_MODE_BITSIZE (DECL_MODE (decl)));
3137 DECL_USER_ALIGN (decl) = 0;
3139 x = assign_temp (decl, 1, 1, 1);
3140 set_mem_attributes (x, decl, 1);
3141 SET_DECL_RTL (decl, x);
3143 if (oldaddr)
3145 addr = force_operand (XEXP (DECL_RTL (decl), 0), oldaddr);
3146 if (addr != oldaddr)
3147 emit_move_insn (oldaddr, addr);
3150 else
3151 /* Dynamic-size object: must push space on the stack. */
3153 rtx address, size, x;
3155 /* Record the stack pointer on entry to block, if have
3156 not already done so. */
3157 do_pending_stack_adjust ();
3158 save_stack_pointer ();
3160 /* Compute the variable's size, in bytes. This will expand any
3161 needed SAVE_EXPRs for the first time. */
3162 size = expand_expr (DECL_SIZE_UNIT (decl), NULL_RTX, VOIDmode, 0);
3163 free_temp_slots ();
3165 /* Allocate space on the stack for the variable. Note that
3166 DECL_ALIGN says how the variable is to be aligned and we
3167 cannot use it to conclude anything about the alignment of
3168 the size. */
3169 address = allocate_dynamic_stack_space (size, NULL_RTX,
3170 TYPE_ALIGN (TREE_TYPE (decl)));
3172 /* Reference the variable indirect through that rtx. */
3173 x = gen_rtx_MEM (DECL_MODE (decl), address);
3174 set_mem_attributes (x, decl, 1);
3175 SET_DECL_RTL (decl, x);
3178 /* Indicate the alignment we actually gave this variable. */
3179 #ifdef STACK_BOUNDARY
3180 DECL_ALIGN (decl) = STACK_BOUNDARY;
3181 #else
3182 DECL_ALIGN (decl) = BIGGEST_ALIGNMENT;
3183 #endif
3184 DECL_USER_ALIGN (decl) = 0;
3188 /* Emit code to allocate T_SIZE bytes of dynamic stack space for ALLOC. */
3189 void
3190 expand_stack_alloc (tree alloc, tree t_size)
3192 rtx address, dest, size;
3193 tree var, type;
3195 if (TREE_CODE (alloc) != ADDR_EXPR)
3196 abort ();
3197 var = TREE_OPERAND (alloc, 0);
3198 if (TREE_CODE (var) != VAR_DECL)
3199 abort ();
3201 type = TREE_TYPE (var);
3203 /* Compute the variable's size, in bytes. */
3204 size = expand_expr (t_size, NULL_RTX, VOIDmode, 0);
3205 free_temp_slots ();
3207 /* Allocate space on the stack for the variable. */
3208 address = XEXP (DECL_RTL (var), 0);
3209 dest = allocate_dynamic_stack_space (size, address, TYPE_ALIGN (type));
3210 if (dest != address)
3211 emit_move_insn (address, dest);
3213 /* Indicate the alignment we actually gave this variable. */
3214 #ifdef STACK_BOUNDARY
3215 DECL_ALIGN (var) = STACK_BOUNDARY;
3216 #else
3217 DECL_ALIGN (var) = BIGGEST_ALIGNMENT;
3218 #endif
3219 DECL_USER_ALIGN (var) = 0;
3222 /* Emit code to save the current value of stack. */
3224 expand_stack_save (void)
3226 rtx ret = NULL_RTX;
3228 do_pending_stack_adjust ();
3229 emit_stack_save (SAVE_BLOCK, &ret, NULL_RTX);
3230 return ret;
3233 /* Emit code to restore the current value of stack. */
3234 void
3235 expand_stack_restore (tree var)
3237 rtx sa = DECL_RTL (var);
3239 emit_stack_restore (SAVE_BLOCK, sa, NULL_RTX);
3242 /* Emit code to perform the initialization of a declaration DECL. */
3244 void
3245 expand_decl_init (tree decl)
3247 int was_used = TREE_USED (decl);
3249 /* If this is a CONST_DECL, we don't have to generate any code. Likewise
3250 for static decls. */
3251 if (TREE_CODE (decl) == CONST_DECL
3252 || TREE_STATIC (decl))
3253 return;
3255 /* Compute and store the initial value now. */
3257 push_temp_slots ();
3259 if (DECL_INITIAL (decl) == error_mark_node)
3261 enum tree_code code = TREE_CODE (TREE_TYPE (decl));
3263 if (code == INTEGER_TYPE || code == REAL_TYPE || code == ENUMERAL_TYPE
3264 || code == POINTER_TYPE || code == REFERENCE_TYPE)
3265 expand_assignment (decl, convert (TREE_TYPE (decl), integer_zero_node),
3267 emit_queue ();
3269 else if (DECL_INITIAL (decl) && TREE_CODE (DECL_INITIAL (decl)) != TREE_LIST)
3271 emit_line_note (DECL_SOURCE_LOCATION (decl));
3272 expand_assignment (decl, DECL_INITIAL (decl), 0);
3273 emit_queue ();
3276 /* Don't let the initialization count as "using" the variable. */
3277 TREE_USED (decl) = was_used;
3279 /* Free any temporaries we made while initializing the decl. */
3280 preserve_temp_slots (NULL_RTX);
3281 free_temp_slots ();
3282 pop_temp_slots ();
3285 /* CLEANUP is an expression to be executed at exit from this binding contour;
3286 for example, in C++, it might call the destructor for this variable.
3288 We wrap CLEANUP in an UNSAVE_EXPR node, so that we can expand the
3289 CLEANUP multiple times, and have the correct semantics. This
3290 happens in exception handling, for gotos, returns, breaks that
3291 leave the current scope.
3293 If CLEANUP is nonzero and DECL is zero, we record a cleanup
3294 that is not associated with any particular variable. */
3297 expand_decl_cleanup (tree decl, tree cleanup)
3299 struct nesting *thisblock;
3301 /* Error if we are not in any block. */
3302 if (cfun == 0 || block_stack == 0)
3303 return 0;
3305 thisblock = block_stack;
3307 /* Record the cleanup if there is one. */
3309 if (cleanup != 0)
3311 tree t;
3312 rtx seq;
3313 tree *cleanups = &thisblock->data.block.cleanups;
3314 int cond_context = conditional_context ();
3316 if (cond_context)
3318 rtx flag = gen_reg_rtx (word_mode);
3319 rtx set_flag_0;
3320 tree cond;
3322 start_sequence ();
3323 emit_move_insn (flag, const0_rtx);
3324 set_flag_0 = get_insns ();
3325 end_sequence ();
3327 thisblock->data.block.last_unconditional_cleanup
3328 = emit_insn_after (set_flag_0,
3329 thisblock->data.block.last_unconditional_cleanup);
3331 emit_move_insn (flag, const1_rtx);
3333 cond = build_decl (VAR_DECL, NULL_TREE,
3334 lang_hooks.types.type_for_mode (word_mode, 1));
3335 SET_DECL_RTL (cond, flag);
3337 /* Conditionalize the cleanup. */
3338 cleanup = build (COND_EXPR, void_type_node,
3339 lang_hooks.truthvalue_conversion (cond),
3340 cleanup, integer_zero_node);
3341 cleanup = fold (cleanup);
3343 cleanups = &thisblock->data.block.cleanups;
3346 cleanup = unsave_expr (cleanup);
3348 t = *cleanups = tree_cons (decl, cleanup, *cleanups);
3350 if (! cond_context)
3351 /* If this block has a cleanup, it belongs in stack_block_stack. */
3352 stack_block_stack = thisblock;
3354 if (cond_context)
3356 start_sequence ();
3359 if (! using_eh_for_cleanups_p)
3360 TREE_ADDRESSABLE (t) = 1;
3361 else
3362 expand_eh_region_start ();
3364 if (cond_context)
3366 seq = get_insns ();
3367 end_sequence ();
3368 if (seq)
3369 thisblock->data.block.last_unconditional_cleanup
3370 = emit_insn_after (seq,
3371 thisblock->data.block.last_unconditional_cleanup);
3373 else
3375 thisblock->data.block.last_unconditional_cleanup
3376 = get_last_insn ();
3377 /* When we insert instructions after the last unconditional cleanup,
3378 we don't adjust last_insn. That means that a later add_insn will
3379 clobber the instructions we've just added. The easiest way to
3380 fix this is to just insert another instruction here, so that the
3381 instructions inserted after the last unconditional cleanup are
3382 never the last instruction. */
3383 emit_note (NOTE_INSN_DELETED);
3386 return 1;
3389 /* Like expand_decl_cleanup, but maybe only run the cleanup if an exception
3390 is thrown. */
3393 expand_decl_cleanup_eh (tree decl, tree cleanup, int eh_only)
3395 int ret = expand_decl_cleanup (decl, cleanup);
3396 if (cleanup && ret)
3398 tree node = block_stack->data.block.cleanups;
3399 CLEANUP_EH_ONLY (node) = eh_only;
3401 return ret;
3404 /* DECL is an anonymous union. CLEANUP is a cleanup for DECL.
3405 DECL_ELTS is the list of elements that belong to DECL's type.
3406 In each, the TREE_VALUE is a VAR_DECL, and the TREE_PURPOSE a cleanup. */
3408 void
3409 expand_anon_union_decl (tree decl, tree cleanup, tree decl_elts)
3411 struct nesting *thisblock = cfun == 0 ? 0 : block_stack;
3412 rtx x;
3413 tree t;
3415 /* If any of the elements are addressable, so is the entire union. */
3416 for (t = decl_elts; t; t = TREE_CHAIN (t))
3417 if (TREE_ADDRESSABLE (TREE_VALUE (t)))
3419 TREE_ADDRESSABLE (decl) = 1;
3420 break;
3423 expand_decl (decl);
3424 expand_decl_cleanup (decl, cleanup);
3425 x = DECL_RTL (decl);
3427 /* Go through the elements, assigning RTL to each. */
3428 for (t = decl_elts; t; t = TREE_CHAIN (t))
3430 tree decl_elt = TREE_VALUE (t);
3431 tree cleanup_elt = TREE_PURPOSE (t);
3432 enum machine_mode mode = TYPE_MODE (TREE_TYPE (decl_elt));
3434 /* If any of the elements are addressable, so is the entire
3435 union. */
3436 if (TREE_USED (decl_elt))
3437 TREE_USED (decl) = 1;
3439 /* Propagate the union's alignment to the elements. */
3440 DECL_ALIGN (decl_elt) = DECL_ALIGN (decl);
3441 DECL_USER_ALIGN (decl_elt) = DECL_USER_ALIGN (decl);
3443 /* If the element has BLKmode and the union doesn't, the union is
3444 aligned such that the element doesn't need to have BLKmode, so
3445 change the element's mode to the appropriate one for its size. */
3446 if (mode == BLKmode && DECL_MODE (decl) != BLKmode)
3447 DECL_MODE (decl_elt) = mode
3448 = mode_for_size_tree (DECL_SIZE (decl_elt), MODE_INT, 1);
3450 /* (SUBREG (MEM ...)) at RTL generation time is invalid, so we
3451 instead create a new MEM rtx with the proper mode. */
3452 if (MEM_P (x))
3454 if (mode == GET_MODE (x))
3455 SET_DECL_RTL (decl_elt, x);
3456 else
3457 SET_DECL_RTL (decl_elt, adjust_address_nv (x, mode, 0));
3459 else if (REG_P (x))
3461 if (mode == GET_MODE (x))
3462 SET_DECL_RTL (decl_elt, x);
3463 else
3464 SET_DECL_RTL (decl_elt, gen_lowpart_SUBREG (mode, x));
3466 else
3467 abort ();
3469 /* Record the cleanup if there is one. */
3471 if (cleanup != 0)
3472 thisblock->data.block.cleanups
3473 = tree_cons (decl_elt, cleanup_elt,
3474 thisblock->data.block.cleanups);
3478 /* Expand a list of cleanups LIST.
3479 Elements may be expressions or may be nested lists.
3481 If IN_FIXUP is nonzero, we are generating this cleanup for a fixup
3482 goto and handle protection regions specially in that case.
3484 If REACHABLE, we emit code, otherwise just inform the exception handling
3485 code about this finalization. */
3487 static void
3488 expand_cleanups (tree list, int in_fixup, int reachable)
3490 tree tail;
3491 for (tail = list; tail; tail = TREE_CHAIN (tail))
3492 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3493 expand_cleanups (TREE_VALUE (tail), in_fixup, reachable);
3494 else
3496 if (! in_fixup && using_eh_for_cleanups_p)
3497 expand_eh_region_end_cleanup (TREE_VALUE (tail));
3499 if (reachable && !CLEANUP_EH_ONLY (tail))
3501 /* Cleanups may be run multiple times. For example,
3502 when exiting a binding contour, we expand the
3503 cleanups associated with that contour. When a goto
3504 within that binding contour has a target outside that
3505 contour, it will expand all cleanups from its scope to
3506 the target. Though the cleanups are expanded multiple
3507 times, the control paths are non-overlapping so the
3508 cleanups will not be executed twice. */
3510 /* We may need to protect from outer cleanups. */
3511 if (in_fixup && using_eh_for_cleanups_p)
3513 expand_eh_region_start ();
3515 expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
3517 expand_eh_region_end_fixup (TREE_VALUE (tail));
3519 else
3520 expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
3522 free_temp_slots ();
3527 /* Mark when the context we are emitting RTL for as a conditional
3528 context, so that any cleanup actions we register with
3529 expand_decl_init will be properly conditionalized when those
3530 cleanup actions are later performed. Must be called before any
3531 expression (tree) is expanded that is within a conditional context. */
3533 void
3534 start_cleanup_deferral (void)
3536 /* block_stack can be NULL if we are inside the parameter list. It is
3537 OK to do nothing, because cleanups aren't possible here. */
3538 if (block_stack)
3539 ++block_stack->data.block.conditional_code;
3542 /* Mark the end of a conditional region of code. Because cleanup
3543 deferrals may be nested, we may still be in a conditional region
3544 after we end the currently deferred cleanups, only after we end all
3545 deferred cleanups, are we back in unconditional code. */
3547 void
3548 end_cleanup_deferral (void)
3550 /* block_stack can be NULL if we are inside the parameter list. It is
3551 OK to do nothing, because cleanups aren't possible here. */
3552 if (block_stack)
3553 --block_stack->data.block.conditional_code;
3556 tree
3557 last_cleanup_this_contour (void)
3559 if (block_stack == 0)
3560 return 0;
3562 return block_stack->data.block.cleanups;
3566 /* Return nonzero if any containing block has a stack level or
3567 cleanups. */
3570 containing_blocks_have_cleanups_or_stack_level (void)
3572 struct nesting *block;
3574 for (block = block_stack; block; block = block->next)
3575 if (block->data.block.stack_level != 0
3576 || block->data.block.cleanups != 0)
3577 return 1;
3579 return 0;
3582 /* Return 1 if there are any pending cleanups at this point.
3583 Check the current contour as well as contours that enclose
3584 the current contour. */
3587 any_pending_cleanups (void)
3589 struct nesting *block;
3591 if (cfun == NULL || cfun->stmt == NULL || block_stack == 0)
3592 return 0;
3594 if (block_stack->data.block.cleanups != NULL)
3595 return 1;
3597 if (block_stack->data.block.outer_cleanups == 0)
3598 return 0;
3600 for (block = block_stack->next; block; block = block->next)
3601 if (block->data.block.cleanups != 0)
3602 return 1;
3604 return 0;
3607 /* Enter a case (Pascal) or switch (C) statement.
3608 Push a block onto case_stack and nesting_stack
3609 to accumulate the case-labels that are seen
3610 and to record the labels generated for the statement.
3612 EXIT_FLAG is nonzero if `exit_something' should exit this case stmt.
3613 Otherwise, this construct is transparent for `exit_something'.
3615 EXPR is the index-expression to be dispatched on.
3616 TYPE is its nominal type. We could simply convert EXPR to this type,
3617 but instead we take short cuts. */
3619 void
3620 expand_start_case (int exit_flag, tree expr, tree type,
3621 const char *printname)
3623 struct nesting *thiscase = ALLOC_NESTING ();
3625 /* Make an entry on case_stack for the case we are entering. */
3627 thiscase->desc = CASE_NESTING;
3628 thiscase->next = case_stack;
3629 thiscase->all = nesting_stack;
3630 thiscase->depth = ++nesting_depth;
3631 thiscase->exit_label = exit_flag ? gen_label_rtx () : 0;
3632 thiscase->data.case_stmt.case_list = 0;
3633 thiscase->data.case_stmt.index_expr = expr;
3634 thiscase->data.case_stmt.nominal_type = type;
3635 thiscase->data.case_stmt.default_label = 0;
3636 thiscase->data.case_stmt.printname = printname;
3637 thiscase->data.case_stmt.line_number_status = force_line_numbers ();
3638 case_stack = thiscase;
3639 nesting_stack = thiscase;
3641 do_pending_stack_adjust ();
3642 emit_queue ();
3644 /* Make sure case_stmt.start points to something that won't
3645 need any transformation before expand_end_case. */
3646 if (GET_CODE (get_last_insn ()) != NOTE)
3647 emit_note (NOTE_INSN_DELETED);
3649 thiscase->data.case_stmt.start = get_last_insn ();
3651 start_cleanup_deferral ();
3654 /* Accumulate one case or default label inside a case or switch statement.
3655 VALUE is the value of the case (a null pointer, for a default label).
3656 The function CONVERTER, when applied to arguments T and V,
3657 converts the value V to the type T.
3659 If not currently inside a case or switch statement, return 1 and do
3660 nothing. The caller will print a language-specific error message.
3661 If VALUE is a duplicate or overlaps, return 2 and do nothing
3662 except store the (first) duplicate node in *DUPLICATE.
3663 If VALUE is out of range, return 3 and do nothing.
3664 If we are jumping into the scope of a cleanup or var-sized array, return 5.
3665 Return 0 on success.
3667 Extended to handle range statements. */
3670 pushcase (tree value, tree (*converter) (tree, tree), tree label,
3671 tree *duplicate)
3673 tree index_type;
3674 tree nominal_type;
3676 /* Fail if not inside a real case statement. */
3677 if (! (case_stack && case_stack->data.case_stmt.start))
3678 return 1;
3680 if (stack_block_stack
3681 && stack_block_stack->depth > case_stack->depth)
3682 return 5;
3684 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
3685 nominal_type = case_stack->data.case_stmt.nominal_type;
3687 /* If the index is erroneous, avoid more problems: pretend to succeed. */
3688 if (index_type == error_mark_node)
3689 return 0;
3691 /* Convert VALUE to the type in which the comparisons are nominally done. */
3692 if (value != 0)
3693 value = (*converter) (nominal_type, value);
3695 /* Fail if this value is out of range for the actual type of the index
3696 (which may be narrower than NOMINAL_TYPE). */
3697 if (value != 0
3698 && (TREE_CONSTANT_OVERFLOW (value)
3699 || ! int_fits_type_p (value, index_type)))
3700 return 3;
3702 return add_case_node (value, value, label, duplicate, false);
3705 /* Like pushcase but this case applies to all values between VALUE1 and
3706 VALUE2 (inclusive). If VALUE1 is NULL, the range starts at the lowest
3707 value of the index type and ends at VALUE2. If VALUE2 is NULL, the range
3708 starts at VALUE1 and ends at the highest value of the index type.
3709 If both are NULL, this case applies to all values.
3711 The return value is the same as that of pushcase but there is one
3712 additional error code: 4 means the specified range was empty. */
3715 pushcase_range (tree value1, tree value2, tree (*converter) (tree, tree),
3716 tree label, tree *duplicate)
3718 tree index_type;
3719 tree nominal_type;
3721 /* Fail if not inside a real case statement. */
3722 if (! (case_stack && case_stack->data.case_stmt.start))
3723 return 1;
3725 if (stack_block_stack
3726 && stack_block_stack->depth > case_stack->depth)
3727 return 5;
3729 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
3730 nominal_type = case_stack->data.case_stmt.nominal_type;
3732 /* If the index is erroneous, avoid more problems: pretend to succeed. */
3733 if (index_type == error_mark_node)
3734 return 0;
3736 /* Convert VALUEs to type in which the comparisons are nominally done
3737 and replace any unspecified value with the corresponding bound. */
3738 if (value1 == 0)
3739 value1 = TYPE_MIN_VALUE (index_type);
3740 if (value2 == 0)
3741 value2 = TYPE_MAX_VALUE (index_type);
3743 /* Fail if the range is empty. Do this before any conversion since
3744 we want to allow out-of-range empty ranges. */
3745 if (value2 != 0 && tree_int_cst_lt (value2, value1))
3746 return 4;
3748 /* If the max was unbounded, use the max of the nominal_type we are
3749 converting to. Do this after the < check above to suppress false
3750 positives. */
3751 if (value2 == 0)
3752 value2 = TYPE_MAX_VALUE (nominal_type);
3754 value1 = (*converter) (nominal_type, value1);
3755 value2 = (*converter) (nominal_type, value2);
3757 /* Fail if these values are out of range. */
3758 if (TREE_CONSTANT_OVERFLOW (value1)
3759 || ! int_fits_type_p (value1, index_type))
3760 return 3;
3762 if (TREE_CONSTANT_OVERFLOW (value2)
3763 || ! int_fits_type_p (value2, index_type))
3764 return 3;
3766 return add_case_node (value1, value2, label, duplicate, false);
3769 /* Do the actual insertion of a case label for pushcase and pushcase_range
3770 into case_stack->data.case_stmt.case_list. Use an AVL tree to avoid
3771 slowdown for large switch statements. */
3774 add_case_node (tree low, tree high, tree label, tree *duplicate,
3775 bool dont_expand_label)
3777 struct case_node *p, **q, *r;
3779 /* If there's no HIGH value, then this is not a case range; it's
3780 just a simple case label. But that's just a degenerate case
3781 range. */
3782 if (!high)
3783 high = low;
3785 /* Handle default labels specially. */
3786 if (!high && !low)
3788 if (case_stack->data.case_stmt.default_label != 0)
3790 *duplicate = case_stack->data.case_stmt.default_label;
3791 return 2;
3793 case_stack->data.case_stmt.default_label = label;
3794 if (!dont_expand_label)
3795 expand_label (label);
3796 return 0;
3799 q = &case_stack->data.case_stmt.case_list;
3800 p = *q;
3802 while ((r = *q))
3804 p = r;
3806 /* Keep going past elements distinctly greater than HIGH. */
3807 if (tree_int_cst_lt (high, p->low))
3808 q = &p->left;
3810 /* or distinctly less than LOW. */
3811 else if (tree_int_cst_lt (p->high, low))
3812 q = &p->right;
3814 else
3816 /* We have an overlap; this is an error. */
3817 *duplicate = p->code_label;
3818 return 2;
3822 /* Add this label to the chain, and succeed. */
3824 r = ggc_alloc (sizeof (struct case_node));
3825 r->low = low;
3827 /* If the bounds are equal, turn this into the one-value case. */
3828 if (tree_int_cst_equal (low, high))
3829 r->high = r->low;
3830 else
3831 r->high = high;
3833 r->code_label = label;
3834 if (!dont_expand_label)
3835 expand_label (label);
3837 *q = r;
3838 r->parent = p;
3839 r->left = 0;
3840 r->right = 0;
3841 r->balance = 0;
3843 while (p)
3845 struct case_node *s;
3847 if (r == p->left)
3849 int b;
3851 if (! (b = p->balance))
3852 /* Growth propagation from left side. */
3853 p->balance = -1;
3854 else if (b < 0)
3856 if (r->balance < 0)
3858 /* R-Rotation */
3859 if ((p->left = s = r->right))
3860 s->parent = p;
3862 r->right = p;
3863 p->balance = 0;
3864 r->balance = 0;
3865 s = p->parent;
3866 p->parent = r;
3868 if ((r->parent = s))
3870 if (s->left == p)
3871 s->left = r;
3872 else
3873 s->right = r;
3875 else
3876 case_stack->data.case_stmt.case_list = r;
3878 else
3879 /* r->balance == +1 */
3881 /* LR-Rotation */
3883 int b2;
3884 struct case_node *t = r->right;
3886 if ((p->left = s = t->right))
3887 s->parent = p;
3889 t->right = p;
3890 if ((r->right = s = t->left))
3891 s->parent = r;
3893 t->left = r;
3894 b = t->balance;
3895 b2 = b < 0;
3896 p->balance = b2;
3897 b2 = -b2 - b;
3898 r->balance = b2;
3899 t->balance = 0;
3900 s = p->parent;
3901 p->parent = t;
3902 r->parent = t;
3904 if ((t->parent = s))
3906 if (s->left == p)
3907 s->left = t;
3908 else
3909 s->right = t;
3911 else
3912 case_stack->data.case_stmt.case_list = t;
3914 break;
3917 else
3919 /* p->balance == +1; growth of left side balances the node. */
3920 p->balance = 0;
3921 break;
3924 else
3925 /* r == p->right */
3927 int b;
3929 if (! (b = p->balance))
3930 /* Growth propagation from right side. */
3931 p->balance++;
3932 else if (b > 0)
3934 if (r->balance > 0)
3936 /* L-Rotation */
3938 if ((p->right = s = r->left))
3939 s->parent = p;
3941 r->left = p;
3942 p->balance = 0;
3943 r->balance = 0;
3944 s = p->parent;
3945 p->parent = r;
3946 if ((r->parent = s))
3948 if (s->left == p)
3949 s->left = r;
3950 else
3951 s->right = r;
3954 else
3955 case_stack->data.case_stmt.case_list = r;
3958 else
3959 /* r->balance == -1 */
3961 /* RL-Rotation */
3962 int b2;
3963 struct case_node *t = r->left;
3965 if ((p->right = s = t->left))
3966 s->parent = p;
3968 t->left = p;
3970 if ((r->left = s = t->right))
3971 s->parent = r;
3973 t->right = r;
3974 b = t->balance;
3975 b2 = b < 0;
3976 r->balance = b2;
3977 b2 = -b2 - b;
3978 p->balance = b2;
3979 t->balance = 0;
3980 s = p->parent;
3981 p->parent = t;
3982 r->parent = t;
3984 if ((t->parent = s))
3986 if (s->left == p)
3987 s->left = t;
3988 else
3989 s->right = t;
3992 else
3993 case_stack->data.case_stmt.case_list = t;
3995 break;
3997 else
3999 /* p->balance == -1; growth of right side balances the node. */
4000 p->balance = 0;
4001 break;
4005 r = p;
4006 p = p->parent;
4009 return 0;
4012 /* Maximum number of case bit tests. */
4013 #define MAX_CASE_BIT_TESTS 3
4015 /* By default, enable case bit tests on targets with ashlsi3. */
4016 #ifndef CASE_USE_BIT_TESTS
4017 #define CASE_USE_BIT_TESTS (ashl_optab->handlers[word_mode].insn_code \
4018 != CODE_FOR_nothing)
4019 #endif
4022 /* A case_bit_test represents a set of case nodes that may be
4023 selected from using a bit-wise comparison. HI and LO hold
4024 the integer to be tested against, LABEL contains the label
4025 to jump to upon success and BITS counts the number of case
4026 nodes handled by this test, typically the number of bits
4027 set in HI:LO. */
4029 struct case_bit_test
4031 HOST_WIDE_INT hi;
4032 HOST_WIDE_INT lo;
4033 rtx label;
4034 int bits;
4037 /* Determine whether "1 << x" is relatively cheap in word_mode. */
4039 static
4040 bool lshift_cheap_p (void)
4042 static bool init = false;
4043 static bool cheap = true;
4045 if (!init)
4047 rtx reg = gen_rtx_REG (word_mode, 10000);
4048 int cost = rtx_cost (gen_rtx_ASHIFT (word_mode, const1_rtx, reg), SET);
4049 cheap = cost < COSTS_N_INSNS (3);
4050 init = true;
4053 return cheap;
4056 /* Comparison function for qsort to order bit tests by decreasing
4057 number of case nodes, i.e. the node with the most cases gets
4058 tested first. */
4060 static int
4061 case_bit_test_cmp (const void *p1, const void *p2)
4063 const struct case_bit_test *d1 = p1;
4064 const struct case_bit_test *d2 = p2;
4066 return d2->bits - d1->bits;
4069 /* Expand a switch statement by a short sequence of bit-wise
4070 comparisons. "switch(x)" is effectively converted into
4071 "if ((1 << (x-MINVAL)) & CST)" where CST and MINVAL are
4072 integer constants.
4074 INDEX_EXPR is the value being switched on, which is of
4075 type INDEX_TYPE. MINVAL is the lowest case value of in
4076 the case nodes, of INDEX_TYPE type, and RANGE is highest
4077 value minus MINVAL, also of type INDEX_TYPE. NODES is
4078 the set of case nodes, and DEFAULT_LABEL is the label to
4079 branch to should none of the cases match.
4081 There *MUST* be MAX_CASE_BIT_TESTS or less unique case
4082 node targets. */
4084 static void
4085 emit_case_bit_tests (tree index_type, tree index_expr, tree minval,
4086 tree range, case_node_ptr nodes, rtx default_label)
4088 struct case_bit_test test[MAX_CASE_BIT_TESTS];
4089 enum machine_mode mode;
4090 rtx expr, index, label;
4091 unsigned int i,j,lo,hi;
4092 struct case_node *n;
4093 unsigned int count;
4095 count = 0;
4096 for (n = nodes; n; n = n->right)
4098 label = label_rtx (n->code_label);
4099 for (i = 0; i < count; i++)
4100 if (same_case_target_p (label, test[i].label))
4101 break;
4103 if (i == count)
4105 if (count >= MAX_CASE_BIT_TESTS)
4106 abort ();
4107 test[i].hi = 0;
4108 test[i].lo = 0;
4109 test[i].label = label;
4110 test[i].bits = 1;
4111 count++;
4113 else
4114 test[i].bits++;
4116 lo = tree_low_cst (fold (build (MINUS_EXPR, index_type,
4117 n->low, minval)), 1);
4118 hi = tree_low_cst (fold (build (MINUS_EXPR, index_type,
4119 n->high, minval)), 1);
4120 for (j = lo; j <= hi; j++)
4121 if (j >= HOST_BITS_PER_WIDE_INT)
4122 test[i].hi |= (HOST_WIDE_INT) 1 << (j - HOST_BITS_PER_INT);
4123 else
4124 test[i].lo |= (HOST_WIDE_INT) 1 << j;
4127 qsort (test, count, sizeof(*test), case_bit_test_cmp);
4129 index_expr = fold (build (MINUS_EXPR, index_type,
4130 convert (index_type, index_expr),
4131 convert (index_type, minval)));
4132 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
4133 emit_queue ();
4134 index = protect_from_queue (index, 0);
4135 do_pending_stack_adjust ();
4137 mode = TYPE_MODE (index_type);
4138 expr = expand_expr (range, NULL_RTX, VOIDmode, 0);
4139 emit_cmp_and_jump_insns (index, expr, GTU, NULL_RTX, mode, 1,
4140 default_label);
4142 index = convert_to_mode (word_mode, index, 0);
4143 index = expand_binop (word_mode, ashl_optab, const1_rtx,
4144 index, NULL_RTX, 1, OPTAB_WIDEN);
4146 for (i = 0; i < count; i++)
4148 expr = immed_double_const (test[i].lo, test[i].hi, word_mode);
4149 expr = expand_binop (word_mode, and_optab, index, expr,
4150 NULL_RTX, 1, OPTAB_WIDEN);
4151 emit_cmp_and_jump_insns (expr, const0_rtx, NE, NULL_RTX,
4152 word_mode, 1, test[i].label);
4155 emit_jump (default_label);
4158 #ifndef HAVE_casesi
4159 #define HAVE_casesi 0
4160 #endif
4162 #ifndef HAVE_tablejump
4163 #define HAVE_tablejump 0
4164 #endif
4166 /* Terminate a case (Pascal) or switch (C) statement
4167 in which ORIG_INDEX is the expression to be tested.
4168 If ORIG_TYPE is not NULL, it is the original ORIG_INDEX
4169 type as given in the source before any compiler conversions.
4170 Generate the code to test it and jump to the right place. */
4172 void
4173 expand_end_case_type (tree orig_index, tree orig_type)
4175 tree minval = NULL_TREE, maxval = NULL_TREE, range = NULL_TREE;
4176 rtx default_label = 0;
4177 struct case_node *n, *m;
4178 unsigned int count, uniq;
4179 rtx index;
4180 rtx table_label;
4181 int ncases;
4182 rtx *labelvec;
4183 int i;
4184 rtx before_case, end, lab;
4185 struct nesting *thiscase = case_stack;
4186 tree index_expr, index_type;
4187 bool exit_done = false;
4188 int unsignedp;
4190 /* Don't crash due to previous errors. */
4191 if (thiscase == NULL)
4192 return;
4194 index_expr = thiscase->data.case_stmt.index_expr;
4195 index_type = TREE_TYPE (index_expr);
4196 unsignedp = TYPE_UNSIGNED (index_type);
4197 if (orig_type == NULL)
4198 orig_type = TREE_TYPE (orig_index);
4200 do_pending_stack_adjust ();
4202 /* An ERROR_MARK occurs for various reasons including invalid data type. */
4203 if (index_type != error_mark_node)
4205 /* If we don't have a default-label, create one here,
4206 after the body of the switch. */
4207 if (thiscase->data.case_stmt.default_label == 0)
4209 thiscase->data.case_stmt.default_label
4210 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
4211 /* Share the exit label if possible. */
4212 if (thiscase->exit_label)
4214 SET_DECL_RTL (thiscase->data.case_stmt.default_label,
4215 thiscase->exit_label);
4216 exit_done = true;
4218 expand_label (thiscase->data.case_stmt.default_label);
4220 default_label = label_rtx (thiscase->data.case_stmt.default_label);
4222 before_case = get_last_insn ();
4224 if (thiscase->data.case_stmt.case_list
4225 && thiscase->data.case_stmt.case_list->left)
4226 thiscase->data.case_stmt.case_list
4227 = case_tree2list (thiscase->data.case_stmt.case_list, 0);
4229 /* Simplify the case-list before we count it. */
4230 group_case_nodes (thiscase->data.case_stmt.case_list);
4231 strip_default_case_nodes (&thiscase->data.case_stmt.case_list,
4232 default_label);
4234 /* Get upper and lower bounds of case values.
4235 Also convert all the case values to the index expr's data type. */
4237 uniq = 0;
4238 count = 0;
4239 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
4241 /* Check low and high label values are integers. */
4242 if (TREE_CODE (n->low) != INTEGER_CST)
4243 abort ();
4244 if (TREE_CODE (n->high) != INTEGER_CST)
4245 abort ();
4247 n->low = convert (index_type, n->low);
4248 n->high = convert (index_type, n->high);
4250 /* Count the elements and track the largest and smallest
4251 of them (treating them as signed even if they are not). */
4252 if (count++ == 0)
4254 minval = n->low;
4255 maxval = n->high;
4257 else
4259 if (INT_CST_LT (n->low, minval))
4260 minval = n->low;
4261 if (INT_CST_LT (maxval, n->high))
4262 maxval = n->high;
4264 /* A range counts double, since it requires two compares. */
4265 if (! tree_int_cst_equal (n->low, n->high))
4266 count++;
4268 /* Count the number of unique case node targets. */
4269 uniq++;
4270 lab = label_rtx (n->code_label);
4271 for (m = thiscase->data.case_stmt.case_list; m != n; m = m->right)
4272 if (same_case_target_p (label_rtx (m->code_label), lab))
4274 uniq--;
4275 break;
4279 /* Compute span of values. */
4280 if (count != 0)
4281 range = fold (build (MINUS_EXPR, index_type, maxval, minval));
4283 end_cleanup_deferral ();
4285 if (count == 0)
4287 expand_expr (index_expr, const0_rtx, VOIDmode, 0);
4288 emit_queue ();
4289 emit_jump (default_label);
4292 /* Try implementing this switch statement by a short sequence of
4293 bit-wise comparisons. However, we let the binary-tree case
4294 below handle constant index expressions. */
4295 else if (CASE_USE_BIT_TESTS
4296 && ! TREE_CONSTANT (index_expr)
4297 && compare_tree_int (range, GET_MODE_BITSIZE (word_mode)) < 0
4298 && compare_tree_int (range, 0) > 0
4299 && lshift_cheap_p ()
4300 && ((uniq == 1 && count >= 3)
4301 || (uniq == 2 && count >= 5)
4302 || (uniq == 3 && count >= 6)))
4304 /* Optimize the case where all the case values fit in a
4305 word without having to subtract MINVAL. In this case,
4306 we can optimize away the subtraction. */
4307 if (compare_tree_int (minval, 0) > 0
4308 && compare_tree_int (maxval, GET_MODE_BITSIZE (word_mode)) < 0)
4310 minval = integer_zero_node;
4311 range = maxval;
4313 emit_case_bit_tests (index_type, index_expr, minval, range,
4314 thiscase->data.case_stmt.case_list,
4315 default_label);
4318 /* If range of values is much bigger than number of values,
4319 make a sequence of conditional branches instead of a dispatch.
4320 If the switch-index is a constant, do it this way
4321 because we can optimize it. */
4323 else if (count < case_values_threshold ()
4324 || compare_tree_int (range,
4325 (optimize_size ? 3 : 10) * count) > 0
4326 /* RANGE may be signed, and really large ranges will show up
4327 as negative numbers. */
4328 || compare_tree_int (range, 0) < 0
4329 #ifndef ASM_OUTPUT_ADDR_DIFF_ELT
4330 || flag_pic
4331 #endif
4332 || TREE_CONSTANT (index_expr)
4333 /* If neither casesi or tablejump is available, we can
4334 only go this way. */
4335 || (!HAVE_casesi && !HAVE_tablejump))
4337 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
4339 /* If the index is a short or char that we do not have
4340 an insn to handle comparisons directly, convert it to
4341 a full integer now, rather than letting each comparison
4342 generate the conversion. */
4344 if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT
4345 && ! have_insn_for (COMPARE, GET_MODE (index)))
4347 enum machine_mode wider_mode;
4348 for (wider_mode = GET_MODE (index); wider_mode != VOIDmode;
4349 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
4350 if (have_insn_for (COMPARE, wider_mode))
4352 index = convert_to_mode (wider_mode, index, unsignedp);
4353 break;
4357 emit_queue ();
4358 do_pending_stack_adjust ();
4360 index = protect_from_queue (index, 0);
4361 if (MEM_P (index))
4362 index = copy_to_reg (index);
4363 if (GET_CODE (index) == CONST_INT
4364 || TREE_CODE (index_expr) == INTEGER_CST)
4366 /* Make a tree node with the proper constant value
4367 if we don't already have one. */
4368 if (TREE_CODE (index_expr) != INTEGER_CST)
4370 index_expr
4371 = build_int_2 (INTVAL (index),
4372 unsignedp || INTVAL (index) >= 0 ? 0 : -1);
4373 index_expr = convert (index_type, index_expr);
4376 /* For constant index expressions we need only
4377 issue an unconditional branch to the appropriate
4378 target code. The job of removing any unreachable
4379 code is left to the optimization phase if the
4380 "-O" option is specified. */
4381 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
4382 if (! tree_int_cst_lt (index_expr, n->low)
4383 && ! tree_int_cst_lt (n->high, index_expr))
4384 break;
4386 if (n)
4387 emit_jump (label_rtx (n->code_label));
4388 else
4389 emit_jump (default_label);
4391 else
4393 /* If the index expression is not constant we generate
4394 a binary decision tree to select the appropriate
4395 target code. This is done as follows:
4397 The list of cases is rearranged into a binary tree,
4398 nearly optimal assuming equal probability for each case.
4400 The tree is transformed into RTL, eliminating
4401 redundant test conditions at the same time.
4403 If program flow could reach the end of the
4404 decision tree an unconditional jump to the
4405 default code is emitted. */
4407 use_cost_table
4408 = (TREE_CODE (orig_type) != ENUMERAL_TYPE
4409 && estimate_case_costs (thiscase->data.case_stmt.case_list));
4410 balance_case_nodes (&thiscase->data.case_stmt.case_list, NULL);
4411 emit_case_nodes (index, thiscase->data.case_stmt.case_list,
4412 default_label, index_type);
4413 emit_jump_if_reachable (default_label);
4416 else
4418 table_label = gen_label_rtx ();
4419 if (! try_casesi (index_type, index_expr, minval, range,
4420 table_label, default_label))
4422 index_type = thiscase->data.case_stmt.nominal_type;
4424 /* Index jumptables from zero for suitable values of
4425 minval to avoid a subtraction. */
4426 if (! optimize_size
4427 && compare_tree_int (minval, 0) > 0
4428 && compare_tree_int (minval, 3) < 0)
4430 minval = integer_zero_node;
4431 range = maxval;
4434 if (! try_tablejump (index_type, index_expr, minval, range,
4435 table_label, default_label))
4436 abort ();
4439 /* Get table of labels to jump to, in order of case index. */
4441 ncases = tree_low_cst (range, 0) + 1;
4442 labelvec = alloca (ncases * sizeof (rtx));
4443 memset (labelvec, 0, ncases * sizeof (rtx));
4445 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
4447 /* Compute the low and high bounds relative to the minimum
4448 value since that should fit in a HOST_WIDE_INT while the
4449 actual values may not. */
4450 HOST_WIDE_INT i_low
4451 = tree_low_cst (fold (build (MINUS_EXPR, index_type,
4452 n->low, minval)), 1);
4453 HOST_WIDE_INT i_high
4454 = tree_low_cst (fold (build (MINUS_EXPR, index_type,
4455 n->high, minval)), 1);
4456 HOST_WIDE_INT i;
4458 for (i = i_low; i <= i_high; i ++)
4459 labelvec[i]
4460 = gen_rtx_LABEL_REF (Pmode, label_rtx (n->code_label));
4463 /* Fill in the gaps with the default. */
4464 for (i = 0; i < ncases; i++)
4465 if (labelvec[i] == 0)
4466 labelvec[i] = gen_rtx_LABEL_REF (Pmode, default_label);
4468 /* Output the table. */
4469 emit_label (table_label);
4471 if (CASE_VECTOR_PC_RELATIVE || flag_pic)
4472 emit_jump_insn (gen_rtx_ADDR_DIFF_VEC (CASE_VECTOR_MODE,
4473 gen_rtx_LABEL_REF (Pmode, table_label),
4474 gen_rtvec_v (ncases, labelvec),
4475 const0_rtx, const0_rtx));
4476 else
4477 emit_jump_insn (gen_rtx_ADDR_VEC (CASE_VECTOR_MODE,
4478 gen_rtvec_v (ncases, labelvec)));
4480 /* If the case insn drops through the table,
4481 after the table we must jump to the default-label.
4482 Otherwise record no drop-through after the table. */
4483 #ifdef CASE_DROPS_THROUGH
4484 emit_jump (default_label);
4485 #else
4486 emit_barrier ();
4487 #endif
4490 before_case = NEXT_INSN (before_case);
4491 end = get_last_insn ();
4492 if (squeeze_notes (&before_case, &end))
4493 abort ();
4494 reorder_insns (before_case, end,
4495 thiscase->data.case_stmt.start);
4497 else
4498 end_cleanup_deferral ();
4500 if (thiscase->exit_label && !exit_done)
4501 emit_label (thiscase->exit_label);
4503 POPSTACK (case_stack);
4505 free_temp_slots ();
4508 /* Convert the tree NODE into a list linked by the right field, with the left
4509 field zeroed. RIGHT is used for recursion; it is a list to be placed
4510 rightmost in the resulting list. */
4512 static struct case_node *
4513 case_tree2list (struct case_node *node, struct case_node *right)
4515 struct case_node *left;
4517 if (node->right)
4518 right = case_tree2list (node->right, right);
4520 node->right = right;
4521 if ((left = node->left))
4523 node->left = 0;
4524 return case_tree2list (left, node);
4527 return node;
4530 /* Generate code to jump to LABEL if OP1 and OP2 are equal. */
4532 static void
4533 do_jump_if_equal (rtx op1, rtx op2, rtx label, int unsignedp)
4535 if (GET_CODE (op1) == CONST_INT && GET_CODE (op2) == CONST_INT)
4537 if (op1 == op2)
4538 emit_jump (label);
4540 else
4541 emit_cmp_and_jump_insns (op1, op2, EQ, NULL_RTX,
4542 (GET_MODE (op1) == VOIDmode
4543 ? GET_MODE (op2) : GET_MODE (op1)),
4544 unsignedp, label);
4547 /* Not all case values are encountered equally. This function
4548 uses a heuristic to weight case labels, in cases where that
4549 looks like a reasonable thing to do.
4551 Right now, all we try to guess is text, and we establish the
4552 following weights:
4554 chars above space: 16
4555 digits: 16
4556 default: 12
4557 space, punct: 8
4558 tab: 4
4559 newline: 2
4560 other "\" chars: 1
4561 remaining chars: 0
4563 If we find any cases in the switch that are not either -1 or in the range
4564 of valid ASCII characters, or are control characters other than those
4565 commonly used with "\", don't treat this switch scanning text.
4567 Return 1 if these nodes are suitable for cost estimation, otherwise
4568 return 0. */
4570 static int
4571 estimate_case_costs (case_node_ptr node)
4573 tree min_ascii = integer_minus_one_node;
4574 tree max_ascii = convert (TREE_TYPE (node->high), build_int_2 (127, 0));
4575 case_node_ptr n;
4576 int i;
4578 /* If we haven't already made the cost table, make it now. Note that the
4579 lower bound of the table is -1, not zero. */
4581 if (! cost_table_initialized)
4583 cost_table_initialized = 1;
4585 for (i = 0; i < 128; i++)
4587 if (ISALNUM (i))
4588 COST_TABLE (i) = 16;
4589 else if (ISPUNCT (i))
4590 COST_TABLE (i) = 8;
4591 else if (ISCNTRL (i))
4592 COST_TABLE (i) = -1;
4595 COST_TABLE (' ') = 8;
4596 COST_TABLE ('\t') = 4;
4597 COST_TABLE ('\0') = 4;
4598 COST_TABLE ('\n') = 2;
4599 COST_TABLE ('\f') = 1;
4600 COST_TABLE ('\v') = 1;
4601 COST_TABLE ('\b') = 1;
4604 /* See if all the case expressions look like text. It is text if the
4605 constant is >= -1 and the highest constant is <= 127. Do all comparisons
4606 as signed arithmetic since we don't want to ever access cost_table with a
4607 value less than -1. Also check that none of the constants in a range
4608 are strange control characters. */
4610 for (n = node; n; n = n->right)
4612 if ((INT_CST_LT (n->low, min_ascii)) || INT_CST_LT (max_ascii, n->high))
4613 return 0;
4615 for (i = (HOST_WIDE_INT) TREE_INT_CST_LOW (n->low);
4616 i <= (HOST_WIDE_INT) TREE_INT_CST_LOW (n->high); i++)
4617 if (COST_TABLE (i) < 0)
4618 return 0;
4621 /* All interesting values are within the range of interesting
4622 ASCII characters. */
4623 return 1;
4626 /* Determine whether two case labels branch to the same target. */
4628 static bool
4629 same_case_target_p (rtx l1, rtx l2)
4631 #if 0
4632 rtx i1, i2;
4634 if (l1 == l2)
4635 return true;
4637 i1 = next_real_insn (l1);
4638 i2 = next_real_insn (l2);
4639 if (i1 == i2)
4640 return true;
4642 if (i1 && simplejump_p (i1))
4644 l1 = XEXP (SET_SRC (PATTERN (i1)), 0);
4647 if (i2 && simplejump_p (i2))
4649 l2 = XEXP (SET_SRC (PATTERN (i2)), 0);
4651 #endif
4652 /* When coming from gimple, we usually won't have emitted either
4653 the labels or the body of the switch statement. The job being
4654 done here should be done via jump threading at the tree level.
4655 Cases that go the same place should have the same label. */
4656 return l1 == l2;
4659 /* Delete nodes that branch to the default label from a list of
4660 case nodes. Eg. case 5: default: becomes just default: */
4662 static void
4663 strip_default_case_nodes (case_node_ptr *prev, rtx deflab)
4665 case_node_ptr ptr;
4667 while (*prev)
4669 ptr = *prev;
4670 if (same_case_target_p (label_rtx (ptr->code_label), deflab))
4671 *prev = ptr->right;
4672 else
4673 prev = &ptr->right;
4677 /* Scan an ordered list of case nodes
4678 combining those with consecutive values or ranges.
4680 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
4682 static void
4683 group_case_nodes (case_node_ptr head)
4685 case_node_ptr node = head;
4687 while (node)
4689 rtx lab;
4690 case_node_ptr np = node;
4692 lab = label_rtx (node->code_label);
4694 /* Try to group the successors of NODE with NODE. */
4695 while (((np = np->right) != 0)
4696 /* Do they jump to the same place? */
4697 && same_case_target_p (label_rtx (np->code_label), lab)
4698 /* Are their ranges consecutive? */
4699 && tree_int_cst_equal (np->low,
4700 fold (build (PLUS_EXPR,
4701 TREE_TYPE (node->high),
4702 node->high,
4703 integer_one_node)))
4704 /* An overflow is not consecutive. */
4705 && tree_int_cst_lt (node->high,
4706 fold (build (PLUS_EXPR,
4707 TREE_TYPE (node->high),
4708 node->high,
4709 integer_one_node))))
4711 node->high = np->high;
4713 /* NP is the first node after NODE which can't be grouped with it.
4714 Delete the nodes in between, and move on to that node. */
4715 node->right = np;
4716 node = np;
4720 /* Take an ordered list of case nodes
4721 and transform them into a near optimal binary tree,
4722 on the assumption that any target code selection value is as
4723 likely as any other.
4725 The transformation is performed by splitting the ordered
4726 list into two equal sections plus a pivot. The parts are
4727 then attached to the pivot as left and right branches. Each
4728 branch is then transformed recursively. */
4730 static void
4731 balance_case_nodes (case_node_ptr *head, case_node_ptr parent)
4733 case_node_ptr np;
4735 np = *head;
4736 if (np)
4738 int cost = 0;
4739 int i = 0;
4740 int ranges = 0;
4741 case_node_ptr *npp;
4742 case_node_ptr left;
4744 /* Count the number of entries on branch. Also count the ranges. */
4746 while (np)
4748 if (!tree_int_cst_equal (np->low, np->high))
4750 ranges++;
4751 if (use_cost_table)
4752 cost += COST_TABLE (TREE_INT_CST_LOW (np->high));
4755 if (use_cost_table)
4756 cost += COST_TABLE (TREE_INT_CST_LOW (np->low));
4758 i++;
4759 np = np->right;
4762 if (i > 2)
4764 /* Split this list if it is long enough for that to help. */
4765 npp = head;
4766 left = *npp;
4767 if (use_cost_table)
4769 /* Find the place in the list that bisects the list's total cost,
4770 Here I gets half the total cost. */
4771 int n_moved = 0;
4772 i = (cost + 1) / 2;
4773 while (1)
4775 /* Skip nodes while their cost does not reach that amount. */
4776 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
4777 i -= COST_TABLE (TREE_INT_CST_LOW ((*npp)->high));
4778 i -= COST_TABLE (TREE_INT_CST_LOW ((*npp)->low));
4779 if (i <= 0)
4780 break;
4781 npp = &(*npp)->right;
4782 n_moved += 1;
4784 if (n_moved == 0)
4786 /* Leave this branch lopsided, but optimize left-hand
4787 side and fill in `parent' fields for right-hand side. */
4788 np = *head;
4789 np->parent = parent;
4790 balance_case_nodes (&np->left, np);
4791 for (; np->right; np = np->right)
4792 np->right->parent = np;
4793 return;
4796 /* If there are just three nodes, split at the middle one. */
4797 else if (i == 3)
4798 npp = &(*npp)->right;
4799 else
4801 /* Find the place in the list that bisects the list's total cost,
4802 where ranges count as 2.
4803 Here I gets half the total cost. */
4804 i = (i + ranges + 1) / 2;
4805 while (1)
4807 /* Skip nodes while their cost does not reach that amount. */
4808 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
4809 i--;
4810 i--;
4811 if (i <= 0)
4812 break;
4813 npp = &(*npp)->right;
4816 *head = np = *npp;
4817 *npp = 0;
4818 np->parent = parent;
4819 np->left = left;
4821 /* Optimize each of the two split parts. */
4822 balance_case_nodes (&np->left, np);
4823 balance_case_nodes (&np->right, np);
4825 else
4827 /* Else leave this branch as one level,
4828 but fill in `parent' fields. */
4829 np = *head;
4830 np->parent = parent;
4831 for (; np->right; np = np->right)
4832 np->right->parent = np;
4837 /* Search the parent sections of the case node tree
4838 to see if a test for the lower bound of NODE would be redundant.
4839 INDEX_TYPE is the type of the index expression.
4841 The instructions to generate the case decision tree are
4842 output in the same order as nodes are processed so it is
4843 known that if a parent node checks the range of the current
4844 node minus one that the current node is bounded at its lower
4845 span. Thus the test would be redundant. */
4847 static int
4848 node_has_low_bound (case_node_ptr node, tree index_type)
4850 tree low_minus_one;
4851 case_node_ptr pnode;
4853 /* If the lower bound of this node is the lowest value in the index type,
4854 we need not test it. */
4856 if (tree_int_cst_equal (node->low, TYPE_MIN_VALUE (index_type)))
4857 return 1;
4859 /* If this node has a left branch, the value at the left must be less
4860 than that at this node, so it cannot be bounded at the bottom and
4861 we need not bother testing any further. */
4863 if (node->left)
4864 return 0;
4866 low_minus_one = fold (build (MINUS_EXPR, TREE_TYPE (node->low),
4867 node->low, integer_one_node));
4869 /* If the subtraction above overflowed, we can't verify anything.
4870 Otherwise, look for a parent that tests our value - 1. */
4872 if (! tree_int_cst_lt (low_minus_one, node->low))
4873 return 0;
4875 for (pnode = node->parent; pnode; pnode = pnode->parent)
4876 if (tree_int_cst_equal (low_minus_one, pnode->high))
4877 return 1;
4879 return 0;
4882 /* Search the parent sections of the case node tree
4883 to see if a test for the upper bound of NODE would be redundant.
4884 INDEX_TYPE is the type of the index expression.
4886 The instructions to generate the case decision tree are
4887 output in the same order as nodes are processed so it is
4888 known that if a parent node checks the range of the current
4889 node plus one that the current node is bounded at its upper
4890 span. Thus the test would be redundant. */
4892 static int
4893 node_has_high_bound (case_node_ptr node, tree index_type)
4895 tree high_plus_one;
4896 case_node_ptr pnode;
4898 /* If there is no upper bound, obviously no test is needed. */
4900 if (TYPE_MAX_VALUE (index_type) == NULL)
4901 return 1;
4903 /* If the upper bound of this node is the highest value in the type
4904 of the index expression, we need not test against it. */
4906 if (tree_int_cst_equal (node->high, TYPE_MAX_VALUE (index_type)))
4907 return 1;
4909 /* If this node has a right branch, the value at the right must be greater
4910 than that at this node, so it cannot be bounded at the top and
4911 we need not bother testing any further. */
4913 if (node->right)
4914 return 0;
4916 high_plus_one = fold (build (PLUS_EXPR, TREE_TYPE (node->high),
4917 node->high, integer_one_node));
4919 /* If the addition above overflowed, we can't verify anything.
4920 Otherwise, look for a parent that tests our value + 1. */
4922 if (! tree_int_cst_lt (node->high, high_plus_one))
4923 return 0;
4925 for (pnode = node->parent; pnode; pnode = pnode->parent)
4926 if (tree_int_cst_equal (high_plus_one, pnode->low))
4927 return 1;
4929 return 0;
4932 /* Search the parent sections of the
4933 case node tree to see if both tests for the upper and lower
4934 bounds of NODE would be redundant. */
4936 static int
4937 node_is_bounded (case_node_ptr node, tree index_type)
4939 return (node_has_low_bound (node, index_type)
4940 && node_has_high_bound (node, index_type));
4943 /* Emit an unconditional jump to LABEL unless it would be dead code. */
4945 static void
4946 emit_jump_if_reachable (rtx label)
4948 if (GET_CODE (get_last_insn ()) != BARRIER)
4949 emit_jump (label);
4952 /* Emit step-by-step code to select a case for the value of INDEX.
4953 The thus generated decision tree follows the form of the
4954 case-node binary tree NODE, whose nodes represent test conditions.
4955 INDEX_TYPE is the type of the index of the switch.
4957 Care is taken to prune redundant tests from the decision tree
4958 by detecting any boundary conditions already checked by
4959 emitted rtx. (See node_has_high_bound, node_has_low_bound
4960 and node_is_bounded, above.)
4962 Where the test conditions can be shown to be redundant we emit
4963 an unconditional jump to the target code. As a further
4964 optimization, the subordinates of a tree node are examined to
4965 check for bounded nodes. In this case conditional and/or
4966 unconditional jumps as a result of the boundary check for the
4967 current node are arranged to target the subordinates associated
4968 code for out of bound conditions on the current node.
4970 We can assume that when control reaches the code generated here,
4971 the index value has already been compared with the parents
4972 of this node, and determined to be on the same side of each parent
4973 as this node is. Thus, if this node tests for the value 51,
4974 and a parent tested for 52, we don't need to consider
4975 the possibility of a value greater than 51. If another parent
4976 tests for the value 50, then this node need not test anything. */
4978 static void
4979 emit_case_nodes (rtx index, case_node_ptr node, rtx default_label,
4980 tree index_type)
4982 /* If INDEX has an unsigned type, we must make unsigned branches. */
4983 int unsignedp = TYPE_UNSIGNED (index_type);
4984 enum machine_mode mode = GET_MODE (index);
4985 enum machine_mode imode = TYPE_MODE (index_type);
4987 /* See if our parents have already tested everything for us.
4988 If they have, emit an unconditional jump for this node. */
4989 if (node_is_bounded (node, index_type))
4990 emit_jump (label_rtx (node->code_label));
4992 else if (tree_int_cst_equal (node->low, node->high))
4994 /* Node is single valued. First see if the index expression matches
4995 this node and then check our children, if any. */
4997 do_jump_if_equal (index,
4998 convert_modes (mode, imode,
4999 expand_expr (node->low, NULL_RTX,
5000 VOIDmode, 0),
5001 unsignedp),
5002 label_rtx (node->code_label), unsignedp);
5004 if (node->right != 0 && node->left != 0)
5006 /* This node has children on both sides.
5007 Dispatch to one side or the other
5008 by comparing the index value with this node's value.
5009 If one subtree is bounded, check that one first,
5010 so we can avoid real branches in the tree. */
5012 if (node_is_bounded (node->right, index_type))
5014 emit_cmp_and_jump_insns (index,
5015 convert_modes
5016 (mode, imode,
5017 expand_expr (node->high, NULL_RTX,
5018 VOIDmode, 0),
5019 unsignedp),
5020 GT, NULL_RTX, mode, unsignedp,
5021 label_rtx (node->right->code_label));
5022 emit_case_nodes (index, node->left, default_label, index_type);
5025 else if (node_is_bounded (node->left, index_type))
5027 emit_cmp_and_jump_insns (index,
5028 convert_modes
5029 (mode, imode,
5030 expand_expr (node->high, NULL_RTX,
5031 VOIDmode, 0),
5032 unsignedp),
5033 LT, NULL_RTX, mode, unsignedp,
5034 label_rtx (node->left->code_label));
5035 emit_case_nodes (index, node->right, default_label, index_type);
5038 /* If both children are single-valued cases with no
5039 children, finish up all the work. This way, we can save
5040 one ordered comparison. */
5041 else if (tree_int_cst_equal (node->right->low, node->right->high)
5042 && node->right->left == 0
5043 && node->right->right == 0
5044 && tree_int_cst_equal (node->left->low, node->left->high)
5045 && node->left->left == 0
5046 && node->left->right == 0)
5048 /* Neither node is bounded. First distinguish the two sides;
5049 then emit the code for one side at a time. */
5051 /* See if the value matches what the right hand side
5052 wants. */
5053 do_jump_if_equal (index,
5054 convert_modes (mode, imode,
5055 expand_expr (node->right->low,
5056 NULL_RTX,
5057 VOIDmode, 0),
5058 unsignedp),
5059 label_rtx (node->right->code_label),
5060 unsignedp);
5062 /* See if the value matches what the left hand side
5063 wants. */
5064 do_jump_if_equal (index,
5065 convert_modes (mode, imode,
5066 expand_expr (node->left->low,
5067 NULL_RTX,
5068 VOIDmode, 0),
5069 unsignedp),
5070 label_rtx (node->left->code_label),
5071 unsignedp);
5074 else
5076 /* Neither node is bounded. First distinguish the two sides;
5077 then emit the code for one side at a time. */
5079 tree test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
5081 /* See if the value is on the right. */
5082 emit_cmp_and_jump_insns (index,
5083 convert_modes
5084 (mode, imode,
5085 expand_expr (node->high, NULL_RTX,
5086 VOIDmode, 0),
5087 unsignedp),
5088 GT, NULL_RTX, mode, unsignedp,
5089 label_rtx (test_label));
5091 /* Value must be on the left.
5092 Handle the left-hand subtree. */
5093 emit_case_nodes (index, node->left, default_label, index_type);
5094 /* If left-hand subtree does nothing,
5095 go to default. */
5096 emit_jump_if_reachable (default_label);
5098 /* Code branches here for the right-hand subtree. */
5099 expand_label (test_label);
5100 emit_case_nodes (index, node->right, default_label, index_type);
5104 else if (node->right != 0 && node->left == 0)
5106 /* Here we have a right child but no left so we issue conditional
5107 branch to default and process the right child.
5109 Omit the conditional branch to default if we it avoid only one
5110 right child; it costs too much space to save so little time. */
5112 if (node->right->right || node->right->left
5113 || !tree_int_cst_equal (node->right->low, node->right->high))
5115 if (!node_has_low_bound (node, index_type))
5117 emit_cmp_and_jump_insns (index,
5118 convert_modes
5119 (mode, imode,
5120 expand_expr (node->high, NULL_RTX,
5121 VOIDmode, 0),
5122 unsignedp),
5123 LT, NULL_RTX, mode, unsignedp,
5124 default_label);
5127 emit_case_nodes (index, node->right, default_label, index_type);
5129 else
5130 /* We cannot process node->right normally
5131 since we haven't ruled out the numbers less than
5132 this node's value. So handle node->right explicitly. */
5133 do_jump_if_equal (index,
5134 convert_modes
5135 (mode, imode,
5136 expand_expr (node->right->low, NULL_RTX,
5137 VOIDmode, 0),
5138 unsignedp),
5139 label_rtx (node->right->code_label), unsignedp);
5142 else if (node->right == 0 && node->left != 0)
5144 /* Just one subtree, on the left. */
5145 if (node->left->left || node->left->right
5146 || !tree_int_cst_equal (node->left->low, node->left->high))
5148 if (!node_has_high_bound (node, index_type))
5150 emit_cmp_and_jump_insns (index,
5151 convert_modes
5152 (mode, imode,
5153 expand_expr (node->high, NULL_RTX,
5154 VOIDmode, 0),
5155 unsignedp),
5156 GT, NULL_RTX, mode, unsignedp,
5157 default_label);
5160 emit_case_nodes (index, node->left, default_label, index_type);
5162 else
5163 /* We cannot process node->left normally
5164 since we haven't ruled out the numbers less than
5165 this node's value. So handle node->left explicitly. */
5166 do_jump_if_equal (index,
5167 convert_modes
5168 (mode, imode,
5169 expand_expr (node->left->low, NULL_RTX,
5170 VOIDmode, 0),
5171 unsignedp),
5172 label_rtx (node->left->code_label), unsignedp);
5175 else
5177 /* Node is a range. These cases are very similar to those for a single
5178 value, except that we do not start by testing whether this node
5179 is the one to branch to. */
5181 if (node->right != 0 && node->left != 0)
5183 /* Node has subtrees on both sides.
5184 If the right-hand subtree is bounded,
5185 test for it first, since we can go straight there.
5186 Otherwise, we need to make a branch in the control structure,
5187 then handle the two subtrees. */
5188 tree test_label = 0;
5190 if (node_is_bounded (node->right, index_type))
5191 /* Right hand node is fully bounded so we can eliminate any
5192 testing and branch directly to the target code. */
5193 emit_cmp_and_jump_insns (index,
5194 convert_modes
5195 (mode, imode,
5196 expand_expr (node->high, NULL_RTX,
5197 VOIDmode, 0),
5198 unsignedp),
5199 GT, NULL_RTX, mode, unsignedp,
5200 label_rtx (node->right->code_label));
5201 else
5203 /* Right hand node requires testing.
5204 Branch to a label where we will handle it later. */
5206 test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
5207 emit_cmp_and_jump_insns (index,
5208 convert_modes
5209 (mode, imode,
5210 expand_expr (node->high, NULL_RTX,
5211 VOIDmode, 0),
5212 unsignedp),
5213 GT, NULL_RTX, mode, unsignedp,
5214 label_rtx (test_label));
5217 /* Value belongs to this node or to the left-hand subtree. */
5219 emit_cmp_and_jump_insns (index,
5220 convert_modes
5221 (mode, imode,
5222 expand_expr (node->low, NULL_RTX,
5223 VOIDmode, 0),
5224 unsignedp),
5225 GE, NULL_RTX, mode, unsignedp,
5226 label_rtx (node->code_label));
5228 /* Handle the left-hand subtree. */
5229 emit_case_nodes (index, node->left, default_label, index_type);
5231 /* If right node had to be handled later, do that now. */
5233 if (test_label)
5235 /* If the left-hand subtree fell through,
5236 don't let it fall into the right-hand subtree. */
5237 emit_jump_if_reachable (default_label);
5239 expand_label (test_label);
5240 emit_case_nodes (index, node->right, default_label, index_type);
5244 else if (node->right != 0 && node->left == 0)
5246 /* Deal with values to the left of this node,
5247 if they are possible. */
5248 if (!node_has_low_bound (node, index_type))
5250 emit_cmp_and_jump_insns (index,
5251 convert_modes
5252 (mode, imode,
5253 expand_expr (node->low, NULL_RTX,
5254 VOIDmode, 0),
5255 unsignedp),
5256 LT, NULL_RTX, mode, unsignedp,
5257 default_label);
5260 /* Value belongs to this node or to the right-hand subtree. */
5262 emit_cmp_and_jump_insns (index,
5263 convert_modes
5264 (mode, imode,
5265 expand_expr (node->high, NULL_RTX,
5266 VOIDmode, 0),
5267 unsignedp),
5268 LE, NULL_RTX, mode, unsignedp,
5269 label_rtx (node->code_label));
5271 emit_case_nodes (index, node->right, default_label, index_type);
5274 else if (node->right == 0 && node->left != 0)
5276 /* Deal with values to the right of this node,
5277 if they are possible. */
5278 if (!node_has_high_bound (node, index_type))
5280 emit_cmp_and_jump_insns (index,
5281 convert_modes
5282 (mode, imode,
5283 expand_expr (node->high, NULL_RTX,
5284 VOIDmode, 0),
5285 unsignedp),
5286 GT, NULL_RTX, mode, unsignedp,
5287 default_label);
5290 /* Value belongs to this node or to the left-hand subtree. */
5292 emit_cmp_and_jump_insns (index,
5293 convert_modes
5294 (mode, imode,
5295 expand_expr (node->low, NULL_RTX,
5296 VOIDmode, 0),
5297 unsignedp),
5298 GE, NULL_RTX, mode, unsignedp,
5299 label_rtx (node->code_label));
5301 emit_case_nodes (index, node->left, default_label, index_type);
5304 else
5306 /* Node has no children so we check low and high bounds to remove
5307 redundant tests. Only one of the bounds can exist,
5308 since otherwise this node is bounded--a case tested already. */
5309 int high_bound = node_has_high_bound (node, index_type);
5310 int low_bound = node_has_low_bound (node, index_type);
5312 if (!high_bound && low_bound)
5314 emit_cmp_and_jump_insns (index,
5315 convert_modes
5316 (mode, imode,
5317 expand_expr (node->high, NULL_RTX,
5318 VOIDmode, 0),
5319 unsignedp),
5320 GT, NULL_RTX, mode, unsignedp,
5321 default_label);
5324 else if (!low_bound && high_bound)
5326 emit_cmp_and_jump_insns (index,
5327 convert_modes
5328 (mode, imode,
5329 expand_expr (node->low, NULL_RTX,
5330 VOIDmode, 0),
5331 unsignedp),
5332 LT, NULL_RTX, mode, unsignedp,
5333 default_label);
5335 else if (!low_bound && !high_bound)
5337 /* Widen LOW and HIGH to the same width as INDEX. */
5338 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
5339 tree low = build1 (CONVERT_EXPR, type, node->low);
5340 tree high = build1 (CONVERT_EXPR, type, node->high);
5341 rtx low_rtx, new_index, new_bound;
5343 /* Instead of doing two branches, emit one unsigned branch for
5344 (index-low) > (high-low). */
5345 low_rtx = expand_expr (low, NULL_RTX, mode, 0);
5346 new_index = expand_simple_binop (mode, MINUS, index, low_rtx,
5347 NULL_RTX, unsignedp,
5348 OPTAB_WIDEN);
5349 new_bound = expand_expr (fold (build (MINUS_EXPR, type,
5350 high, low)),
5351 NULL_RTX, mode, 0);
5353 emit_cmp_and_jump_insns (new_index, new_bound, GT, NULL_RTX,
5354 mode, 1, default_label);
5357 emit_jump (label_rtx (node->code_label));
5362 #include "gt-stmt.h"