* gnu/regexp/CharIndexedReader.java: Removed.
[official-gcc.git] / gcc / stmt.c
blob0baeb8ce27281183aabf57a3b2a95654c663a65e
1 /* Expands front end tree to back end RTL for GCC
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 /* This file handles the generation of rtl code from tree structure
23 above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
24 It also creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
27 The functions whose names start with `expand_' are called by the
28 parser to generate RTL instructions for various kinds of constructs.
30 Some control and binding constructs require calling several such
31 functions at different times. For example, a simple if-then
32 is expanded by calling `expand_start_cond' (with the condition-expression
33 as argument) before parsing the then-clause and calling `expand_end_cond'
34 after parsing the then-clause. */
36 #include "config.h"
37 #include "system.h"
38 #include "coretypes.h"
39 #include "tm.h"
41 #include "rtl.h"
42 #include "tree.h"
43 #include "tm_p.h"
44 #include "flags.h"
45 #include "except.h"
46 #include "function.h"
47 #include "insn-config.h"
48 #include "expr.h"
49 #include "libfuncs.h"
50 #include "hard-reg-set.h"
51 #include "loop.h"
52 #include "recog.h"
53 #include "machmode.h"
54 #include "toplev.h"
55 #include "output.h"
56 #include "ggc.h"
57 #include "langhooks.h"
58 #include "predict.h"
59 #include "optabs.h"
60 #include "target.h"
61 #include "regs.h"
63 /* Functions and data structures for expanding case statements. */
65 /* Case label structure, used to hold info on labels within case
66 statements. We handle "range" labels; for a single-value label
67 as in C, the high and low limits are the same.
69 An AVL tree of case nodes is initially created, and later transformed
70 to a list linked via the RIGHT fields in the nodes. Nodes with
71 higher case values are later in the list.
73 Switch statements can be output in one of two forms. A branch table
74 is used if there are more than a few labels and the labels are dense
75 within the range between the smallest and largest case value. If a
76 branch table is used, no further manipulations are done with the case
77 node chain.
79 The alternative to the use of a branch table is to generate a series
80 of compare and jump insns. When that is done, we use the LEFT, RIGHT,
81 and PARENT fields to hold a binary tree. Initially the tree is
82 totally unbalanced, with everything on the right. We balance the tree
83 with nodes on the left having lower case values than the parent
84 and nodes on the right having higher values. We then output the tree
85 in order. */
87 struct case_node GTY(())
89 struct case_node *left; /* Left son in binary tree */
90 struct case_node *right; /* Right son in binary tree; also node chain */
91 struct case_node *parent; /* Parent of node in binary tree */
92 tree low; /* Lowest index value for this label */
93 tree high; /* Highest index value for this label */
94 tree code_label; /* Label to jump to when node matches */
95 int balance;
98 typedef struct case_node case_node;
99 typedef struct case_node *case_node_ptr;
101 /* These are used by estimate_case_costs and balance_case_nodes. */
103 /* This must be a signed type, and non-ANSI compilers lack signed char. */
104 static short cost_table_[129];
105 static int use_cost_table;
106 static int cost_table_initialized;
108 /* Special care is needed because we allow -1, but TREE_INT_CST_LOW
109 is unsigned. */
110 #define COST_TABLE(I) cost_table_[(unsigned HOST_WIDE_INT) ((I) + 1)]
112 /* Stack of control and binding constructs we are currently inside.
114 These constructs begin when you call `expand_start_WHATEVER'
115 and end when you call `expand_end_WHATEVER'. This stack records
116 info about how the construct began that tells the end-function
117 what to do. It also may provide information about the construct
118 to alter the behavior of other constructs within the body.
119 For example, they may affect the behavior of C `break' and `continue'.
121 Each construct gets one `struct nesting' object.
122 All of these objects are chained through the `all' field.
123 `nesting_stack' points to the first object (innermost construct).
124 The position of an entry on `nesting_stack' is in its `depth' field.
126 Each type of construct has its own individual stack.
127 For example, loops have `loop_stack'. Each object points to the
128 next object of the same type through the `next' field.
130 Some constructs are visible to `break' exit-statements and others
131 are not. Which constructs are visible depends on the language.
132 Therefore, the data structure allows each construct to be visible
133 or not, according to the args given when the construct is started.
134 The construct is visible if the `exit_label' field is non-null.
135 In that case, the value should be a CODE_LABEL rtx. */
137 struct nesting GTY(())
139 struct nesting *all;
140 struct nesting *next;
141 int depth;
142 rtx exit_label;
143 enum nesting_desc {
144 COND_NESTING,
145 LOOP_NESTING,
146 BLOCK_NESTING,
147 CASE_NESTING
148 } desc;
149 union nesting_u
151 /* For conds (if-then and if-then-else statements). */
152 struct nesting_cond
154 /* Label for the end of the if construct.
155 There is none if EXITFLAG was not set
156 and no `else' has been seen yet. */
157 rtx endif_label;
158 /* Label for the end of this alternative.
159 This may be the end of the if or the next else/elseif. */
160 rtx next_label;
161 } GTY ((tag ("COND_NESTING"))) cond;
162 /* For loops. */
163 struct nesting_loop
165 /* Label at the top of the loop; place to loop back to. */
166 rtx start_label;
167 /* Label at the end of the whole construct. */
168 rtx end_label;
169 /* Label for `continue' statement to jump to;
170 this is in front of the stepper of the loop. */
171 rtx continue_label;
172 } GTY ((tag ("LOOP_NESTING"))) loop;
173 /* For variable binding contours. */
174 struct nesting_block
176 /* Sequence number of this binding contour within the function,
177 in order of entry. */
178 int block_start_count;
179 /* Nonzero => value to restore stack to on exit. */
180 rtx stack_level;
181 /* The NOTE that starts this contour.
182 Used by expand_goto to check whether the destination
183 is within each contour or not. */
184 rtx first_insn;
185 /* Innermost containing binding contour that has a stack level. */
186 struct nesting *innermost_stack_block;
187 /* List of cleanups to be run on exit from this contour.
188 This is a list of expressions to be evaluated.
189 The TREE_PURPOSE of each link is the ..._DECL node
190 which the cleanup pertains to. */
191 tree cleanups;
192 /* List of cleanup-lists of blocks containing this block,
193 as they were at the locus where this block appears.
194 There is an element for each containing block,
195 ordered innermost containing block first.
196 The tail of this list can be 0,
197 if all remaining elements would be empty lists.
198 The element's TREE_VALUE is the cleanup-list of that block,
199 which may be null. */
200 tree outer_cleanups;
201 /* Chain of labels defined inside this binding contour.
202 For contours that have stack levels or cleanups. */
203 struct label_chain *label_chain;
204 /* Nonzero if this is associated with an EH region. */
205 int exception_region;
206 /* The saved target_temp_slot_level from our outer block.
207 We may reset target_temp_slot_level to be the level of
208 this block, if that is done, target_temp_slot_level
209 reverts to the saved target_temp_slot_level at the very
210 end of the block. */
211 int block_target_temp_slot_level;
212 /* True if we are currently emitting insns in an area of
213 output code that is controlled by a conditional
214 expression. This is used by the cleanup handling code to
215 generate conditional cleanup actions. */
216 int conditional_code;
217 /* A place to move the start of the exception region for any
218 of the conditional cleanups, must be at the end or after
219 the start of the last unconditional cleanup, and before any
220 conditional branch points. */
221 rtx last_unconditional_cleanup;
222 } GTY ((tag ("BLOCK_NESTING"))) block;
223 /* For switch (C) or case (Pascal) statements. */
224 struct nesting_case
226 /* The insn after which the case dispatch should finally
227 be emitted. Zero for a dummy. */
228 rtx start;
229 /* A list of case labels; it is first built as an AVL tree.
230 During expand_end_case, this is converted to a list, and may be
231 rearranged into a nearly balanced binary tree. */
232 struct case_node *case_list;
233 /* Label to jump to if no case matches. */
234 tree default_label;
235 /* The expression to be dispatched on. */
236 tree index_expr;
237 /* Type that INDEX_EXPR should be converted to. */
238 tree nominal_type;
239 /* Name of this kind of statement, for warnings. */
240 const char *printname;
241 /* Used to save no_line_numbers till we see the first case label.
242 We set this to -1 when we see the first case label in this
243 case statement. */
244 int line_number_status;
245 } GTY ((tag ("CASE_NESTING"))) case_stmt;
246 } GTY ((desc ("%1.desc"))) data;
249 /* Allocate and return a new `struct nesting'. */
251 #define ALLOC_NESTING() ggc_alloc (sizeof (struct nesting))
253 /* Pop the nesting stack element by element until we pop off
254 the element which is at the top of STACK.
255 Update all the other stacks, popping off elements from them
256 as we pop them from nesting_stack. */
258 #define POPSTACK(STACK) \
259 do { struct nesting *target = STACK; \
260 struct nesting *this; \
261 do { this = nesting_stack; \
262 if (loop_stack == this) \
263 loop_stack = loop_stack->next; \
264 if (cond_stack == this) \
265 cond_stack = cond_stack->next; \
266 if (block_stack == this) \
267 block_stack = block_stack->next; \
268 if (stack_block_stack == this) \
269 stack_block_stack = stack_block_stack->next; \
270 if (case_stack == this) \
271 case_stack = case_stack->next; \
272 nesting_depth = nesting_stack->depth - 1; \
273 nesting_stack = this->all; } \
274 while (this != target); } while (0)
276 /* In some cases it is impossible to generate code for a forward goto
277 until the label definition is seen. This happens when it may be necessary
278 for the goto to reset the stack pointer: we don't yet know how to do that.
279 So expand_goto puts an entry on this fixup list.
280 Each time a binding contour that resets the stack is exited,
281 we check each fixup.
282 If the target label has now been defined, we can insert the proper code. */
284 struct goto_fixup GTY(())
286 /* Points to following fixup. */
287 struct goto_fixup *next;
288 /* Points to the insn before the jump insn.
289 If more code must be inserted, it goes after this insn. */
290 rtx before_jump;
291 /* The LABEL_DECL that this jump is jumping to, or 0
292 for break, continue or return. */
293 tree target;
294 /* The BLOCK for the place where this goto was found. */
295 tree context;
296 /* The CODE_LABEL rtx that this is jumping to. */
297 rtx target_rtl;
298 /* Number of binding contours started in current function
299 before the label reference. */
300 int block_start_count;
301 /* The outermost stack level that should be restored for this jump.
302 Each time a binding contour that resets the stack is exited,
303 if the target label is *not* yet defined, this slot is updated. */
304 rtx stack_level;
305 /* List of lists of cleanup expressions to be run by this goto.
306 There is one element for each block that this goto is within.
307 The tail of this list can be 0,
308 if all remaining elements would be empty.
309 The TREE_VALUE contains the cleanup list of that block as of the
310 time this goto was seen.
311 The TREE_ADDRESSABLE flag is 1 for a block that has been exited. */
312 tree cleanup_list_list;
315 /* Within any binding contour that must restore a stack level,
316 all labels are recorded with a chain of these structures. */
318 struct label_chain GTY(())
320 /* Points to following fixup. */
321 struct label_chain *next;
322 tree label;
325 struct stmt_status GTY(())
327 /* Chain of all pending binding contours. */
328 struct nesting * x_block_stack;
330 /* If any new stacks are added here, add them to POPSTACKS too. */
332 /* Chain of all pending binding contours that restore stack levels
333 or have cleanups. */
334 struct nesting * x_stack_block_stack;
336 /* Chain of all pending conditional statements. */
337 struct nesting * x_cond_stack;
339 /* Chain of all pending loops. */
340 struct nesting * x_loop_stack;
342 /* Chain of all pending case or switch statements. */
343 struct nesting * x_case_stack;
345 /* Separate chain including all of the above,
346 chained through the `all' field. */
347 struct nesting * x_nesting_stack;
349 /* Number of entries on nesting_stack now. */
350 int x_nesting_depth;
352 /* Number of binding contours started so far in this function. */
353 int x_block_start_count;
355 /* Each time we expand an expression-statement,
356 record the expr's type and its RTL value here. */
357 tree x_last_expr_type;
358 rtx x_last_expr_value;
359 rtx x_last_expr_alt_rtl;
361 /* Nonzero if within a ({...}) grouping, in which case we must
362 always compute a value for each expr-stmt in case it is the last one. */
363 int x_expr_stmts_for_value;
365 /* Location of last line-number note, whether we actually
366 emitted it or not. */
367 location_t x_emit_locus;
369 struct goto_fixup *x_goto_fixup_chain;
372 #define block_stack (cfun->stmt->x_block_stack)
373 #define stack_block_stack (cfun->stmt->x_stack_block_stack)
374 #define cond_stack (cfun->stmt->x_cond_stack)
375 #define loop_stack (cfun->stmt->x_loop_stack)
376 #define case_stack (cfun->stmt->x_case_stack)
377 #define nesting_stack (cfun->stmt->x_nesting_stack)
378 #define nesting_depth (cfun->stmt->x_nesting_depth)
379 #define current_block_start_count (cfun->stmt->x_block_start_count)
380 #define last_expr_type (cfun->stmt->x_last_expr_type)
381 #define last_expr_value (cfun->stmt->x_last_expr_value)
382 #define last_expr_alt_rtl (cfun->stmt->x_last_expr_alt_rtl)
383 #define expr_stmts_for_value (cfun->stmt->x_expr_stmts_for_value)
384 #define emit_locus (cfun->stmt->x_emit_locus)
385 #define goto_fixup_chain (cfun->stmt->x_goto_fixup_chain)
387 /* Nonzero if we are using EH to handle cleanups. */
388 int using_eh_for_cleanups_p = 0;
390 static int n_occurrences (int, const char *);
391 static bool decl_conflicts_with_clobbers_p (tree, const HARD_REG_SET);
392 static void expand_goto_internal (tree, rtx, rtx);
393 static int expand_fixup (tree, rtx, rtx);
394 static void expand_nl_goto_receiver (void);
395 static void fixup_gotos (struct nesting *, rtx, tree, rtx, int);
396 static bool check_operand_nalternatives (tree, tree);
397 static bool check_unique_operand_names (tree, tree);
398 static char *resolve_operand_name_1 (char *, tree, tree);
399 static void expand_null_return_1 (rtx);
400 static enum br_predictor return_prediction (rtx);
401 static rtx shift_return_value (rtx);
402 static void expand_value_return (rtx);
403 static int tail_recursion_args (tree, tree);
404 static void expand_cleanups (tree, int, int);
405 static void check_seenlabel (void);
406 static void do_jump_if_equal (rtx, rtx, rtx, int);
407 static int estimate_case_costs (case_node_ptr);
408 static bool same_case_target_p (rtx, rtx);
409 static void strip_default_case_nodes (case_node_ptr *, rtx);
410 static bool lshift_cheap_p (void);
411 static int case_bit_test_cmp (const void *, const void *);
412 static void emit_case_bit_tests (tree, tree, tree, tree, case_node_ptr, rtx);
413 static void group_case_nodes (case_node_ptr);
414 static void balance_case_nodes (case_node_ptr *, case_node_ptr);
415 static int node_has_low_bound (case_node_ptr, tree);
416 static int node_has_high_bound (case_node_ptr, tree);
417 static int node_is_bounded (case_node_ptr, tree);
418 static void emit_jump_if_reachable (rtx);
419 static void emit_case_nodes (rtx, case_node_ptr, rtx, tree);
420 static struct case_node *case_tree2list (case_node *, case_node *);
422 void
423 using_eh_for_cleanups (void)
425 using_eh_for_cleanups_p = 1;
428 void
429 init_stmt_for_function (void)
431 cfun->stmt = ggc_alloc_cleared (sizeof (struct stmt_status));
434 /* Record the current file and line. Called from emit_line_note. */
436 void
437 set_file_and_line_for_stmt (location_t location)
439 /* If we're outputting an inline function, and we add a line note,
440 there may be no CFUN->STMT information. So, there's no need to
441 update it. */
442 if (cfun->stmt)
443 emit_locus = location;
446 /* Emit a no-op instruction. */
448 void
449 emit_nop (void)
451 rtx last_insn;
453 last_insn = get_last_insn ();
454 if (!optimize
455 && (GET_CODE (last_insn) == CODE_LABEL
456 || (GET_CODE (last_insn) == NOTE
457 && prev_real_insn (last_insn) == 0)))
458 emit_insn (gen_nop ());
461 /* Return the rtx-label that corresponds to a LABEL_DECL,
462 creating it if necessary. */
465 label_rtx (tree label)
467 if (TREE_CODE (label) != LABEL_DECL)
468 abort ();
470 if (!DECL_RTL_SET_P (label))
472 rtx r = gen_label_rtx ();
473 SET_DECL_RTL (label, r);
474 if (FORCED_LABEL (label) || DECL_NONLOCAL (label))
475 LABEL_PRESERVE_P (r) = 1;
478 return DECL_RTL (label);
481 /* As above, but also put it on the forced-reference list of the
482 function that contains it. */
484 force_label_rtx (tree label)
486 rtx ref = label_rtx (label);
487 tree function = decl_function_context (label);
488 struct function *p;
490 if (!function)
491 abort ();
493 if (function != current_function_decl)
494 p = find_function_data (function);
495 else
496 p = cfun;
498 p->expr->x_forced_labels = gen_rtx_EXPR_LIST (VOIDmode, ref,
499 p->expr->x_forced_labels);
500 return ref;
503 /* Add an unconditional jump to LABEL as the next sequential instruction. */
505 void
506 emit_jump (rtx label)
508 do_pending_stack_adjust ();
509 emit_jump_insn (gen_jump (label));
510 emit_barrier ();
513 /* Emit code to jump to the address
514 specified by the pointer expression EXP. */
516 void
517 expand_computed_goto (tree exp)
519 rtx x = expand_expr (exp, NULL_RTX, VOIDmode, 0);
521 x = convert_memory_address (Pmode, x);
523 emit_queue ();
525 if (! cfun->computed_goto_common_label)
527 cfun->computed_goto_common_reg = copy_to_mode_reg (Pmode, x);
528 cfun->computed_goto_common_label = gen_label_rtx ();
530 do_pending_stack_adjust ();
531 emit_label (cfun->computed_goto_common_label);
532 emit_indirect_jump (cfun->computed_goto_common_reg);
534 current_function_has_computed_jump = 1;
536 else
538 emit_move_insn (cfun->computed_goto_common_reg, x);
539 emit_jump (cfun->computed_goto_common_label);
543 /* Handle goto statements and the labels that they can go to. */
545 /* Specify the location in the RTL code of a label LABEL,
546 which is a LABEL_DECL tree node.
548 This is used for the kind of label that the user can jump to with a
549 goto statement, and for alternatives of a switch or case statement.
550 RTL labels generated for loops and conditionals don't go through here;
551 they are generated directly at the RTL level, by other functions below.
553 Note that this has nothing to do with defining label *names*.
554 Languages vary in how they do that and what that even means. */
556 void
557 expand_label (tree label)
559 struct label_chain *p;
560 rtx label_r = label_rtx (label);
562 do_pending_stack_adjust ();
563 emit_label (label_r);
564 if (DECL_NAME (label))
565 LABEL_NAME (DECL_RTL (label)) = IDENTIFIER_POINTER (DECL_NAME (label));
567 if (DECL_NONLOCAL (label))
569 expand_nl_goto_receiver ();
570 nonlocal_goto_handler_labels
571 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
572 nonlocal_goto_handler_labels);
575 if (FORCED_LABEL (label))
576 forced_labels = gen_rtx_EXPR_LIST (VOIDmode, label_r, forced_labels);
578 if (DECL_NONLOCAL (label) || FORCED_LABEL (label))
579 maybe_set_first_label_num (label_r);
581 if (stack_block_stack != 0)
583 p = ggc_alloc (sizeof (struct label_chain));
584 p->next = stack_block_stack->data.block.label_chain;
585 stack_block_stack->data.block.label_chain = p;
586 p->label = label;
590 /* Generate RTL code for a `goto' statement with target label LABEL.
591 LABEL should be a LABEL_DECL tree node that was or will later be
592 defined with `expand_label'. */
594 void
595 expand_goto (tree label)
597 #ifdef ENABLE_CHECKING
598 /* Check for a nonlocal goto to a containing function. Should have
599 gotten translated to __builtin_nonlocal_goto. */
600 tree context = decl_function_context (label);
601 if (context != 0 && context != current_function_decl)
602 abort ();
603 #endif
605 expand_goto_internal (label, label_rtx (label), NULL_RTX);
608 /* Generate RTL code for a `goto' statement with target label BODY.
609 LABEL should be a LABEL_REF.
610 LAST_INSN, if non-0, is the rtx we should consider as the last
611 insn emitted (for the purposes of cleaning up a return). */
613 static void
614 expand_goto_internal (tree body, rtx label, rtx last_insn)
616 struct nesting *block;
617 rtx stack_level = 0;
619 if (GET_CODE (label) != CODE_LABEL)
620 abort ();
622 /* If label has already been defined, we can tell now
623 whether and how we must alter the stack level. */
625 if (PREV_INSN (label) != 0)
627 /* Find the innermost pending block that contains the label.
628 (Check containment by comparing insn-uids.)
629 Then restore the outermost stack level within that block,
630 and do cleanups of all blocks contained in it. */
631 for (block = block_stack; block; block = block->next)
633 if (INSN_UID (block->data.block.first_insn) < INSN_UID (label))
634 break;
635 if (block->data.block.stack_level != 0)
636 stack_level = block->data.block.stack_level;
637 /* Execute the cleanups for blocks we are exiting. */
638 if (block->data.block.cleanups != 0)
640 expand_cleanups (block->data.block.cleanups, 1, 1);
641 do_pending_stack_adjust ();
645 if (stack_level)
647 /* Ensure stack adjust isn't done by emit_jump, as this
648 would clobber the stack pointer. This one should be
649 deleted as dead by flow. */
650 clear_pending_stack_adjust ();
651 do_pending_stack_adjust ();
653 /* Don't do this adjust if it's to the end label and this function
654 is to return with a depressed stack pointer. */
655 if (label == return_label
656 && (((TREE_CODE (TREE_TYPE (current_function_decl))
657 == FUNCTION_TYPE)
658 && (TYPE_RETURNS_STACK_DEPRESSED
659 (TREE_TYPE (current_function_decl))))))
661 else
662 emit_stack_restore (SAVE_BLOCK, stack_level, NULL_RTX);
665 if (body != 0 && DECL_TOO_LATE (body))
666 error ("jump to `%s' invalidly jumps into binding contour",
667 IDENTIFIER_POINTER (DECL_NAME (body)));
669 /* Label not yet defined: may need to put this goto
670 on the fixup list. */
671 else if (! expand_fixup (body, label, last_insn))
673 /* No fixup needed. Record that the label is the target
674 of at least one goto that has no fixup. */
675 if (body != 0)
676 TREE_ADDRESSABLE (body) = 1;
679 emit_jump (label);
682 /* Generate if necessary a fixup for a goto
683 whose target label in tree structure (if any) is TREE_LABEL
684 and whose target in rtl is RTL_LABEL.
686 If LAST_INSN is nonzero, we pretend that the jump appears
687 after insn LAST_INSN instead of at the current point in the insn stream.
689 The fixup will be used later to insert insns just before the goto.
690 Those insns will restore the stack level as appropriate for the
691 target label, and will (in the case of C++) also invoke any object
692 destructors which have to be invoked when we exit the scopes which
693 are exited by the goto.
695 Value is nonzero if a fixup is made. */
697 static int
698 expand_fixup (tree tree_label, rtx rtl_label, rtx last_insn)
700 struct nesting *block, *end_block;
702 /* See if we can recognize which block the label will be output in.
703 This is possible in some very common cases.
704 If we succeed, set END_BLOCK to that block.
705 Otherwise, set it to 0. */
707 if (cond_stack
708 && (rtl_label == cond_stack->data.cond.endif_label
709 || rtl_label == cond_stack->data.cond.next_label))
710 end_block = cond_stack;
711 /* If we are in a loop, recognize certain labels which
712 are likely targets. This reduces the number of fixups
713 we need to create. */
714 else if (loop_stack
715 && (rtl_label == loop_stack->data.loop.start_label
716 || rtl_label == loop_stack->data.loop.end_label
717 || rtl_label == loop_stack->data.loop.continue_label))
718 end_block = loop_stack;
719 else
720 end_block = 0;
722 /* Now set END_BLOCK to the binding level to which we will return. */
724 if (end_block)
726 struct nesting *next_block = end_block->all;
727 block = block_stack;
729 /* First see if the END_BLOCK is inside the innermost binding level.
730 If so, then no cleanups or stack levels are relevant. */
731 while (next_block && next_block != block)
732 next_block = next_block->all;
734 if (next_block)
735 return 0;
737 /* Otherwise, set END_BLOCK to the innermost binding level
738 which is outside the relevant control-structure nesting. */
739 next_block = block_stack->next;
740 for (block = block_stack; block != end_block; block = block->all)
741 if (block == next_block)
742 next_block = next_block->next;
743 end_block = next_block;
746 /* Does any containing block have a stack level or cleanups?
747 If not, no fixup is needed, and that is the normal case
748 (the only case, for standard C). */
749 for (block = block_stack; block != end_block; block = block->next)
750 if (block->data.block.stack_level != 0
751 || block->data.block.cleanups != 0)
752 break;
754 if (block != end_block)
756 /* Ok, a fixup is needed. Add a fixup to the list of such. */
757 struct goto_fixup *fixup = ggc_alloc (sizeof (struct goto_fixup));
758 /* In case an old stack level is restored, make sure that comes
759 after any pending stack adjust. */
760 /* ?? If the fixup isn't to come at the present position,
761 doing the stack adjust here isn't useful. Doing it with our
762 settings at that location isn't useful either. Let's hope
763 someone does it! */
764 if (last_insn == 0)
765 do_pending_stack_adjust ();
766 fixup->target = tree_label;
767 fixup->target_rtl = rtl_label;
769 /* Create a BLOCK node and a corresponding matched set of
770 NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes at
771 this point. The notes will encapsulate any and all fixup
772 code which we might later insert at this point in the insn
773 stream. Also, the BLOCK node will be the parent (i.e. the
774 `SUPERBLOCK') of any other BLOCK nodes which we might create
775 later on when we are expanding the fixup code.
777 Note that optimization passes (including expand_end_loop)
778 might move the *_BLOCK notes away, so we use a NOTE_INSN_DELETED
779 as a placeholder. */
782 rtx original_before_jump
783 = last_insn ? last_insn : get_last_insn ();
784 rtx start;
785 rtx end;
786 tree block;
788 block = make_node (BLOCK);
789 TREE_USED (block) = 1;
791 if (!cfun->x_whole_function_mode_p)
792 lang_hooks.decls.insert_block (block);
793 else
795 BLOCK_CHAIN (block)
796 = BLOCK_CHAIN (DECL_INITIAL (current_function_decl));
797 BLOCK_CHAIN (DECL_INITIAL (current_function_decl))
798 = block;
801 start_sequence ();
802 start = emit_note (NOTE_INSN_BLOCK_BEG);
803 if (cfun->x_whole_function_mode_p)
804 NOTE_BLOCK (start) = block;
805 fixup->before_jump = emit_note (NOTE_INSN_DELETED);
806 end = emit_note (NOTE_INSN_BLOCK_END);
807 if (cfun->x_whole_function_mode_p)
808 NOTE_BLOCK (end) = block;
809 fixup->context = block;
810 end_sequence ();
811 emit_insn_after (start, original_before_jump);
814 fixup->block_start_count = current_block_start_count;
815 fixup->stack_level = 0;
816 fixup->cleanup_list_list
817 = ((block->data.block.outer_cleanups
818 || block->data.block.cleanups)
819 ? tree_cons (NULL_TREE, block->data.block.cleanups,
820 block->data.block.outer_cleanups)
821 : 0);
822 fixup->next = goto_fixup_chain;
823 goto_fixup_chain = fixup;
826 return block != 0;
829 /* Expand any needed fixups in the outputmost binding level of the
830 function. FIRST_INSN is the first insn in the function. */
832 void
833 expand_fixups (rtx first_insn)
835 fixup_gotos (NULL, NULL_RTX, NULL_TREE, first_insn, 0);
838 /* When exiting a binding contour, process all pending gotos requiring fixups.
839 THISBLOCK is the structure that describes the block being exited.
840 STACK_LEVEL is the rtx for the stack level to restore exiting this contour.
841 CLEANUP_LIST is a list of expressions to evaluate on exiting this contour.
842 FIRST_INSN is the insn that began this contour.
844 Gotos that jump out of this contour must restore the
845 stack level and do the cleanups before actually jumping.
847 DONT_JUMP_IN positive means report error if there is a jump into this
848 contour from before the beginning of the contour. This is also done if
849 STACK_LEVEL is nonzero unless DONT_JUMP_IN is negative. */
851 static void
852 fixup_gotos (struct nesting *thisblock, rtx stack_level,
853 tree cleanup_list, rtx first_insn, int dont_jump_in)
855 struct goto_fixup *f, *prev;
857 /* F is the fixup we are considering; PREV is the previous one. */
858 /* We run this loop in two passes so that cleanups of exited blocks
859 are run first, and blocks that are exited are marked so
860 afterwards. */
862 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
864 /* Test for a fixup that is inactive because it is already handled. */
865 if (f->before_jump == 0)
867 /* Delete inactive fixup from the chain, if that is easy to do. */
868 if (prev != 0)
869 prev->next = f->next;
871 /* Has this fixup's target label been defined?
872 If so, we can finalize it. */
873 else if (PREV_INSN (f->target_rtl) != 0)
875 rtx cleanup_insns;
877 /* If this fixup jumped into this contour from before the beginning
878 of this contour, report an error. This code used to use
879 the first non-label insn after f->target_rtl, but that's
880 wrong since such can be added, by things like put_var_into_stack
881 and have INSN_UIDs that are out of the range of the block. */
882 /* ??? Bug: this does not detect jumping in through intermediate
883 blocks that have stack levels or cleanups.
884 It detects only a problem with the innermost block
885 around the label. */
886 if (f->target != 0
887 && (dont_jump_in > 0 || (dont_jump_in == 0 && stack_level)
888 || cleanup_list)
889 && INSN_UID (first_insn) < INSN_UID (f->target_rtl)
890 && INSN_UID (first_insn) > INSN_UID (f->before_jump)
891 && ! DECL_ERROR_ISSUED (f->target))
893 error ("%Jlabel '%D' used before containing binding contour",
894 f->target, f->target);
895 /* Prevent multiple errors for one label. */
896 DECL_ERROR_ISSUED (f->target) = 1;
899 /* We will expand the cleanups into a sequence of their own and
900 then later on we will attach this new sequence to the insn
901 stream just ahead of the actual jump insn. */
903 start_sequence ();
905 /* Temporarily restore the lexical context where we will
906 logically be inserting the fixup code. We do this for the
907 sake of getting the debugging information right. */
909 lang_hooks.decls.pushlevel (0);
910 lang_hooks.decls.set_block (f->context);
912 /* Expand the cleanups for blocks this jump exits. */
913 if (f->cleanup_list_list)
915 tree lists;
916 for (lists = f->cleanup_list_list; lists; lists = TREE_CHAIN (lists))
917 /* Marked elements correspond to blocks that have been closed.
918 Do their cleanups. */
919 if (TREE_ADDRESSABLE (lists)
920 && TREE_VALUE (lists) != 0)
922 expand_cleanups (TREE_VALUE (lists), 1, 1);
923 /* Pop any pushes done in the cleanups,
924 in case function is about to return. */
925 do_pending_stack_adjust ();
929 /* Restore stack level for the biggest contour that this
930 jump jumps out of. */
931 if (f->stack_level
932 && ! (f->target_rtl == return_label
933 && ((TREE_CODE (TREE_TYPE (current_function_decl))
934 == FUNCTION_TYPE)
935 && (TYPE_RETURNS_STACK_DEPRESSED
936 (TREE_TYPE (current_function_decl))))))
937 emit_stack_restore (SAVE_BLOCK, f->stack_level, f->before_jump);
939 /* Finish up the sequence containing the insns which implement the
940 necessary cleanups, and then attach that whole sequence to the
941 insn stream just ahead of the actual jump insn. Attaching it
942 at that point insures that any cleanups which are in fact
943 implicit C++ object destructions (which must be executed upon
944 leaving the block) appear (to the debugger) to be taking place
945 in an area of the generated code where the object(s) being
946 destructed are still "in scope". */
948 cleanup_insns = get_insns ();
949 lang_hooks.decls.poplevel (1, 0, 0);
951 end_sequence ();
952 emit_insn_after (cleanup_insns, f->before_jump);
954 f->before_jump = 0;
958 /* For any still-undefined labels, do the cleanups for this block now.
959 We must do this now since items in the cleanup list may go out
960 of scope when the block ends. */
961 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
962 if (f->before_jump != 0
963 && PREV_INSN (f->target_rtl) == 0
964 /* Label has still not appeared. If we are exiting a block with
965 a stack level to restore, that started before the fixup,
966 mark this stack level as needing restoration
967 when the fixup is later finalized. */
968 && thisblock != 0
969 /* Note: if THISBLOCK == 0 and we have a label that hasn't appeared, it
970 means the label is undefined. That's erroneous, but possible. */
971 && (thisblock->data.block.block_start_count
972 <= f->block_start_count))
974 tree lists = f->cleanup_list_list;
975 rtx cleanup_insns;
977 for (; lists; lists = TREE_CHAIN (lists))
978 /* If the following elt. corresponds to our containing block
979 then the elt. must be for this block. */
980 if (TREE_CHAIN (lists) == thisblock->data.block.outer_cleanups)
982 start_sequence ();
983 lang_hooks.decls.pushlevel (0);
984 lang_hooks.decls.set_block (f->context);
985 expand_cleanups (TREE_VALUE (lists), 1, 1);
986 do_pending_stack_adjust ();
987 cleanup_insns = get_insns ();
988 lang_hooks.decls.poplevel (1, 0, 0);
989 end_sequence ();
990 if (cleanup_insns != 0)
991 f->before_jump
992 = emit_insn_after (cleanup_insns, f->before_jump);
994 f->cleanup_list_list = TREE_CHAIN (lists);
997 if (stack_level)
998 f->stack_level = stack_level;
1002 /* Return the number of times character C occurs in string S. */
1003 static int
1004 n_occurrences (int c, const char *s)
1006 int n = 0;
1007 while (*s)
1008 n += (*s++ == c);
1009 return n;
1012 /* Generate RTL for an asm statement (explicit assembler code).
1013 STRING is a STRING_CST node containing the assembler code text,
1014 or an ADDR_EXPR containing a STRING_CST. VOL nonzero means the
1015 insn is volatile; don't optimize it. */
1017 void
1018 expand_asm (tree string, int vol)
1020 rtx body;
1022 if (TREE_CODE (string) == ADDR_EXPR)
1023 string = TREE_OPERAND (string, 0);
1025 body = gen_rtx_ASM_INPUT (VOIDmode, TREE_STRING_POINTER (string));
1027 MEM_VOLATILE_P (body) = vol;
1029 emit_insn (body);
1031 clear_last_expr ();
1034 /* Parse the output constraint pointed to by *CONSTRAINT_P. It is the
1035 OPERAND_NUMth output operand, indexed from zero. There are NINPUTS
1036 inputs and NOUTPUTS outputs to this extended-asm. Upon return,
1037 *ALLOWS_MEM will be TRUE iff the constraint allows the use of a
1038 memory operand. Similarly, *ALLOWS_REG will be TRUE iff the
1039 constraint allows the use of a register operand. And, *IS_INOUT
1040 will be true if the operand is read-write, i.e., if it is used as
1041 an input as well as an output. If *CONSTRAINT_P is not in
1042 canonical form, it will be made canonical. (Note that `+' will be
1043 replaced with `=' as part of this process.)
1045 Returns TRUE if all went well; FALSE if an error occurred. */
1047 bool
1048 parse_output_constraint (const char **constraint_p, int operand_num,
1049 int ninputs, int noutputs, bool *allows_mem,
1050 bool *allows_reg, bool *is_inout)
1052 const char *constraint = *constraint_p;
1053 const char *p;
1055 /* Assume the constraint doesn't allow the use of either a register
1056 or memory. */
1057 *allows_mem = false;
1058 *allows_reg = false;
1060 /* Allow the `=' or `+' to not be at the beginning of the string,
1061 since it wasn't explicitly documented that way, and there is a
1062 large body of code that puts it last. Swap the character to
1063 the front, so as not to uglify any place else. */
1064 p = strchr (constraint, '=');
1065 if (!p)
1066 p = strchr (constraint, '+');
1068 /* If the string doesn't contain an `=', issue an error
1069 message. */
1070 if (!p)
1072 error ("output operand constraint lacks `='");
1073 return false;
1076 /* If the constraint begins with `+', then the operand is both read
1077 from and written to. */
1078 *is_inout = (*p == '+');
1080 /* Canonicalize the output constraint so that it begins with `='. */
1081 if (p != constraint || is_inout)
1083 char *buf;
1084 size_t c_len = strlen (constraint);
1086 if (p != constraint)
1087 warning ("output constraint `%c' for operand %d is not at the beginning",
1088 *p, operand_num);
1090 /* Make a copy of the constraint. */
1091 buf = alloca (c_len + 1);
1092 strcpy (buf, constraint);
1093 /* Swap the first character and the `=' or `+'. */
1094 buf[p - constraint] = buf[0];
1095 /* Make sure the first character is an `='. (Until we do this,
1096 it might be a `+'.) */
1097 buf[0] = '=';
1098 /* Replace the constraint with the canonicalized string. */
1099 *constraint_p = ggc_alloc_string (buf, c_len);
1100 constraint = *constraint_p;
1103 /* Loop through the constraint string. */
1104 for (p = constraint + 1; *p; p += CONSTRAINT_LEN (*p, p))
1105 switch (*p)
1107 case '+':
1108 case '=':
1109 error ("operand constraint contains incorrectly positioned '+' or '='");
1110 return false;
1112 case '%':
1113 if (operand_num + 1 == ninputs + noutputs)
1115 error ("`%%' constraint used with last operand");
1116 return false;
1118 break;
1120 case 'V': case 'm': case 'o':
1121 *allows_mem = true;
1122 break;
1124 case '?': case '!': case '*': case '&': case '#':
1125 case 'E': case 'F': case 'G': case 'H':
1126 case 's': case 'i': case 'n':
1127 case 'I': case 'J': case 'K': case 'L': case 'M':
1128 case 'N': case 'O': case 'P': case ',':
1129 break;
1131 case '0': case '1': case '2': case '3': case '4':
1132 case '5': case '6': case '7': case '8': case '9':
1133 case '[':
1134 error ("matching constraint not valid in output operand");
1135 return false;
1137 case '<': case '>':
1138 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
1139 excepting those that expand_call created. So match memory
1140 and hope. */
1141 *allows_mem = true;
1142 break;
1144 case 'g': case 'X':
1145 *allows_reg = true;
1146 *allows_mem = true;
1147 break;
1149 case 'p': case 'r':
1150 *allows_reg = true;
1151 break;
1153 default:
1154 if (!ISALPHA (*p))
1155 break;
1156 if (REG_CLASS_FROM_CONSTRAINT (*p, p) != NO_REGS)
1157 *allows_reg = true;
1158 #ifdef EXTRA_CONSTRAINT_STR
1159 else if (EXTRA_ADDRESS_CONSTRAINT (*p, p))
1160 *allows_reg = true;
1161 else if (EXTRA_MEMORY_CONSTRAINT (*p, p))
1162 *allows_mem = true;
1163 else
1165 /* Otherwise we can't assume anything about the nature of
1166 the constraint except that it isn't purely registers.
1167 Treat it like "g" and hope for the best. */
1168 *allows_reg = true;
1169 *allows_mem = true;
1171 #endif
1172 break;
1175 return true;
1178 /* Similar, but for input constraints. */
1180 bool
1181 parse_input_constraint (const char **constraint_p, int input_num,
1182 int ninputs, int noutputs, int ninout,
1183 const char * const * constraints,
1184 bool *allows_mem, bool *allows_reg)
1186 const char *constraint = *constraint_p;
1187 const char *orig_constraint = constraint;
1188 size_t c_len = strlen (constraint);
1189 size_t j;
1190 bool saw_match = false;
1192 /* Assume the constraint doesn't allow the use of either
1193 a register or memory. */
1194 *allows_mem = false;
1195 *allows_reg = false;
1197 /* Make sure constraint has neither `=', `+', nor '&'. */
1199 for (j = 0; j < c_len; j += CONSTRAINT_LEN (constraint[j], constraint+j))
1200 switch (constraint[j])
1202 case '+': case '=': case '&':
1203 if (constraint == orig_constraint)
1205 error ("input operand constraint contains `%c'", constraint[j]);
1206 return false;
1208 break;
1210 case '%':
1211 if (constraint == orig_constraint
1212 && input_num + 1 == ninputs - ninout)
1214 error ("`%%' constraint used with last operand");
1215 return false;
1217 break;
1219 case 'V': case 'm': case 'o':
1220 *allows_mem = true;
1221 break;
1223 case '<': case '>':
1224 case '?': case '!': case '*': case '#':
1225 case 'E': case 'F': case 'G': case 'H':
1226 case 's': case 'i': case 'n':
1227 case 'I': case 'J': case 'K': case 'L': case 'M':
1228 case 'N': case 'O': case 'P': case ',':
1229 break;
1231 /* Whether or not a numeric constraint allows a register is
1232 decided by the matching constraint, and so there is no need
1233 to do anything special with them. We must handle them in
1234 the default case, so that we don't unnecessarily force
1235 operands to memory. */
1236 case '0': case '1': case '2': case '3': case '4':
1237 case '5': case '6': case '7': case '8': case '9':
1239 char *end;
1240 unsigned long match;
1242 saw_match = true;
1244 match = strtoul (constraint + j, &end, 10);
1245 if (match >= (unsigned long) noutputs)
1247 error ("matching constraint references invalid operand number");
1248 return false;
1251 /* Try and find the real constraint for this dup. Only do this
1252 if the matching constraint is the only alternative. */
1253 if (*end == '\0'
1254 && (j == 0 || (j == 1 && constraint[0] == '%')))
1256 constraint = constraints[match];
1257 *constraint_p = constraint;
1258 c_len = strlen (constraint);
1259 j = 0;
1260 /* ??? At the end of the loop, we will skip the first part of
1261 the matched constraint. This assumes not only that the
1262 other constraint is an output constraint, but also that
1263 the '=' or '+' come first. */
1264 break;
1266 else
1267 j = end - constraint;
1268 /* Anticipate increment at end of loop. */
1269 j--;
1271 /* Fall through. */
1273 case 'p': case 'r':
1274 *allows_reg = true;
1275 break;
1277 case 'g': case 'X':
1278 *allows_reg = true;
1279 *allows_mem = true;
1280 break;
1282 default:
1283 if (! ISALPHA (constraint[j]))
1285 error ("invalid punctuation `%c' in constraint", constraint[j]);
1286 return false;
1288 if (REG_CLASS_FROM_CONSTRAINT (constraint[j], constraint + j)
1289 != NO_REGS)
1290 *allows_reg = true;
1291 #ifdef EXTRA_CONSTRAINT_STR
1292 else if (EXTRA_ADDRESS_CONSTRAINT (constraint[j], constraint + j))
1293 *allows_reg = true;
1294 else if (EXTRA_MEMORY_CONSTRAINT (constraint[j], constraint + j))
1295 *allows_mem = true;
1296 else
1298 /* Otherwise we can't assume anything about the nature of
1299 the constraint except that it isn't purely registers.
1300 Treat it like "g" and hope for the best. */
1301 *allows_reg = true;
1302 *allows_mem = true;
1304 #endif
1305 break;
1308 if (saw_match && !*allows_reg)
1309 warning ("matching constraint does not allow a register");
1311 return true;
1314 /* INPUT is one of the input operands from EXPR, an ASM_EXPR. Returns true
1315 if it is an operand which must be passed in memory (i.e. an "m"
1316 constraint), false otherwise. */
1318 bool
1319 asm_op_is_mem_input (tree input, tree expr)
1321 const char *constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (input)));
1322 tree outputs = ASM_OUTPUTS (expr);
1323 int noutputs = list_length (outputs);
1324 const char **constraints
1325 = (const char **) alloca ((noutputs) * sizeof (const char *));
1326 int i = 0;
1327 bool allows_mem, allows_reg;
1328 tree t;
1330 /* Collect output constraints. */
1331 for (t = outputs; t ; t = TREE_CHAIN (t), i++)
1332 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
1334 /* We pass 0 for input_num, ninputs and ninout; they are only used for
1335 error checking which will be done at expand time. */
1336 parse_input_constraint (&constraint, 0, 0, noutputs, 0, constraints,
1337 &allows_mem, &allows_reg);
1338 return (!allows_reg && allows_mem);
1341 /* Check for overlap between registers marked in CLOBBERED_REGS and
1342 anything inappropriate in DECL. Emit error and return TRUE for error,
1343 FALSE for ok. */
1345 static bool
1346 decl_conflicts_with_clobbers_p (tree decl, const HARD_REG_SET clobbered_regs)
1348 /* Conflicts between asm-declared register variables and the clobber
1349 list are not allowed. */
1350 if ((TREE_CODE (decl) == VAR_DECL || TREE_CODE (decl) == PARM_DECL)
1351 && DECL_REGISTER (decl)
1352 && REG_P (DECL_RTL (decl))
1353 && REGNO (DECL_RTL (decl)) < FIRST_PSEUDO_REGISTER)
1355 rtx reg = DECL_RTL (decl);
1356 unsigned int regno;
1358 for (regno = REGNO (reg);
1359 regno < (REGNO (reg)
1360 + hard_regno_nregs[REGNO (reg)][GET_MODE (reg)]);
1361 regno++)
1362 if (TEST_HARD_REG_BIT (clobbered_regs, regno))
1364 error ("asm-specifier for variable `%s' conflicts with asm clobber list",
1365 IDENTIFIER_POINTER (DECL_NAME (decl)));
1367 /* Reset registerness to stop multiple errors emitted for a
1368 single variable. */
1369 DECL_REGISTER (decl) = 0;
1370 return true;
1373 return false;
1376 /* Generate RTL for an asm statement with arguments.
1377 STRING is the instruction template.
1378 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
1379 Each output or input has an expression in the TREE_VALUE and
1380 and a tree list in TREE_PURPOSE which in turn contains a constraint
1381 name in TREE_VALUE (or NULL_TREE) and a constraint string
1382 in TREE_PURPOSE.
1383 CLOBBERS is a list of STRING_CST nodes each naming a hard register
1384 that is clobbered by this insn.
1386 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
1387 Some elements of OUTPUTS may be replaced with trees representing temporary
1388 values. The caller should copy those temporary values to the originally
1389 specified lvalues.
1391 VOL nonzero means the insn is volatile; don't optimize it. */
1393 void
1394 expand_asm_operands (tree string, tree outputs, tree inputs,
1395 tree clobbers, int vol, location_t locus)
1397 rtvec argvec, constraintvec;
1398 rtx body;
1399 int ninputs = list_length (inputs);
1400 int noutputs = list_length (outputs);
1401 int ninout;
1402 int nclobbers;
1403 HARD_REG_SET clobbered_regs;
1404 int clobber_conflict_found = 0;
1405 tree tail;
1406 tree t;
1407 int i;
1408 /* Vector of RTX's of evaluated output operands. */
1409 rtx *output_rtx = alloca (noutputs * sizeof (rtx));
1410 int *inout_opnum = alloca (noutputs * sizeof (int));
1411 rtx *real_output_rtx = alloca (noutputs * sizeof (rtx));
1412 enum machine_mode *inout_mode
1413 = alloca (noutputs * sizeof (enum machine_mode));
1414 const char **constraints
1415 = alloca ((noutputs + ninputs) * sizeof (const char *));
1416 int old_generating_concat_p = generating_concat_p;
1418 /* An ASM with no outputs needs to be treated as volatile, for now. */
1419 if (noutputs == 0)
1420 vol = 1;
1422 if (! check_operand_nalternatives (outputs, inputs))
1423 return;
1425 string = resolve_asm_operand_names (string, outputs, inputs);
1427 /* Collect constraints. */
1428 i = 0;
1429 for (t = outputs; t ; t = TREE_CHAIN (t), i++)
1430 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
1431 for (t = inputs; t ; t = TREE_CHAIN (t), i++)
1432 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
1434 /* Sometimes we wish to automatically clobber registers across an asm.
1435 Case in point is when the i386 backend moved from cc0 to a hard reg --
1436 maintaining source-level compatibility means automatically clobbering
1437 the flags register. */
1438 clobbers = targetm.md_asm_clobbers (clobbers);
1440 /* Count the number of meaningful clobbered registers, ignoring what
1441 we would ignore later. */
1442 nclobbers = 0;
1443 CLEAR_HARD_REG_SET (clobbered_regs);
1444 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1446 const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1448 i = decode_reg_name (regname);
1449 if (i >= 0 || i == -4)
1450 ++nclobbers;
1451 else if (i == -2)
1452 error ("unknown register name `%s' in `asm'", regname);
1454 /* Mark clobbered registers. */
1455 if (i >= 0)
1457 /* Clobbering the PIC register is an error */
1458 if (i == (int) PIC_OFFSET_TABLE_REGNUM)
1460 error ("PIC register `%s' clobbered in `asm'", regname);
1461 return;
1464 SET_HARD_REG_BIT (clobbered_regs, i);
1468 clear_last_expr ();
1470 /* First pass over inputs and outputs checks validity and sets
1471 mark_addressable if needed. */
1473 ninout = 0;
1474 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1476 tree val = TREE_VALUE (tail);
1477 tree type = TREE_TYPE (val);
1478 const char *constraint;
1479 bool is_inout;
1480 bool allows_reg;
1481 bool allows_mem;
1483 /* If there's an erroneous arg, emit no insn. */
1484 if (type == error_mark_node)
1485 return;
1487 /* Try to parse the output constraint. If that fails, there's
1488 no point in going further. */
1489 constraint = constraints[i];
1490 if (!parse_output_constraint (&constraint, i, ninputs, noutputs,
1491 &allows_mem, &allows_reg, &is_inout))
1492 return;
1494 if (! allows_reg
1495 && (allows_mem
1496 || is_inout
1497 || (DECL_P (val)
1498 && GET_CODE (DECL_RTL (val)) == REG
1499 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type))))
1500 lang_hooks.mark_addressable (val);
1502 if (is_inout)
1503 ninout++;
1506 ninputs += ninout;
1507 if (ninputs + noutputs > MAX_RECOG_OPERANDS)
1509 error ("more than %d operands in `asm'", MAX_RECOG_OPERANDS);
1510 return;
1513 for (i = 0, tail = inputs; tail; i++, tail = TREE_CHAIN (tail))
1515 bool allows_reg, allows_mem;
1516 const char *constraint;
1518 /* If there's an erroneous arg, emit no insn, because the ASM_INPUT
1519 would get VOIDmode and that could cause a crash in reload. */
1520 if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
1521 return;
1523 constraint = constraints[i + noutputs];
1524 if (! parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
1525 constraints, &allows_mem, &allows_reg))
1526 return;
1528 if (! allows_reg && allows_mem)
1529 lang_hooks.mark_addressable (TREE_VALUE (tail));
1532 /* Second pass evaluates arguments. */
1534 ninout = 0;
1535 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1537 tree val = TREE_VALUE (tail);
1538 tree type = TREE_TYPE (val);
1539 bool is_inout;
1540 bool allows_reg;
1541 bool allows_mem;
1542 rtx op;
1544 if (!parse_output_constraint (&constraints[i], i, ninputs,
1545 noutputs, &allows_mem, &allows_reg,
1546 &is_inout))
1547 abort ();
1549 /* If an output operand is not a decl or indirect ref and our constraint
1550 allows a register, make a temporary to act as an intermediate.
1551 Make the asm insn write into that, then our caller will copy it to
1552 the real output operand. Likewise for promoted variables. */
1554 generating_concat_p = 0;
1556 real_output_rtx[i] = NULL_RTX;
1557 if ((TREE_CODE (val) == INDIRECT_REF
1558 && allows_mem)
1559 || (DECL_P (val)
1560 && (allows_mem || GET_CODE (DECL_RTL (val)) == REG)
1561 && ! (GET_CODE (DECL_RTL (val)) == REG
1562 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
1563 || ! allows_reg
1564 || is_inout)
1566 op = expand_expr (val, NULL_RTX, VOIDmode, EXPAND_WRITE);
1567 if (GET_CODE (op) == MEM)
1568 op = validize_mem (op);
1570 if (! allows_reg && GET_CODE (op) != MEM)
1571 error ("output number %d not directly addressable", i);
1572 if ((! allows_mem && GET_CODE (op) == MEM)
1573 || GET_CODE (op) == CONCAT)
1575 real_output_rtx[i] = protect_from_queue (op, 1);
1576 op = gen_reg_rtx (GET_MODE (op));
1577 if (is_inout)
1578 emit_move_insn (op, real_output_rtx[i]);
1581 else
1583 op = assign_temp (type, 0, 0, 1);
1584 op = validize_mem (op);
1585 TREE_VALUE (tail) = make_tree (type, op);
1587 output_rtx[i] = op;
1589 generating_concat_p = old_generating_concat_p;
1591 if (is_inout)
1593 inout_mode[ninout] = TYPE_MODE (type);
1594 inout_opnum[ninout++] = i;
1597 if (decl_conflicts_with_clobbers_p (val, clobbered_regs))
1598 clobber_conflict_found = 1;
1601 /* Make vectors for the expression-rtx, constraint strings,
1602 and named operands. */
1604 argvec = rtvec_alloc (ninputs);
1605 constraintvec = rtvec_alloc (ninputs);
1607 body = gen_rtx_ASM_OPERANDS ((noutputs == 0 ? VOIDmode
1608 : GET_MODE (output_rtx[0])),
1609 TREE_STRING_POINTER (string),
1610 empty_string, 0, argvec, constraintvec,
1611 locus.file, locus.line);
1613 MEM_VOLATILE_P (body) = vol;
1615 /* Eval the inputs and put them into ARGVEC.
1616 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
1618 for (i = 0, tail = inputs; tail; tail = TREE_CHAIN (tail), ++i)
1620 bool allows_reg, allows_mem;
1621 const char *constraint;
1622 tree val, type;
1623 rtx op;
1625 constraint = constraints[i + noutputs];
1626 if (! parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
1627 constraints, &allows_mem, &allows_reg))
1628 abort ();
1630 generating_concat_p = 0;
1632 val = TREE_VALUE (tail);
1633 type = TREE_TYPE (val);
1634 op = expand_expr (val, NULL_RTX, VOIDmode,
1635 (allows_mem && !allows_reg
1636 ? EXPAND_MEMORY : EXPAND_NORMAL));
1638 /* Never pass a CONCAT to an ASM. */
1639 if (GET_CODE (op) == CONCAT)
1640 op = force_reg (GET_MODE (op), op);
1641 else if (GET_CODE (op) == MEM)
1642 op = validize_mem (op);
1644 if (asm_operand_ok (op, constraint) <= 0)
1646 if (allows_reg)
1647 op = force_reg (TYPE_MODE (type), op);
1648 else if (!allows_mem)
1649 warning ("asm operand %d probably doesn't match constraints",
1650 i + noutputs);
1651 else if (GET_CODE (op) == MEM)
1653 /* We won't recognize either volatile memory or memory
1654 with a queued address as available a memory_operand
1655 at this point. Ignore it: clearly this *is* a memory. */
1657 else
1659 warning ("use of memory input without lvalue in "
1660 "asm operand %d is deprecated", i + noutputs);
1662 if (CONSTANT_P (op))
1664 rtx mem = force_const_mem (TYPE_MODE (type), op);
1665 if (mem)
1666 op = validize_mem (mem);
1667 else
1668 op = force_reg (TYPE_MODE (type), op);
1670 if (GET_CODE (op) == REG
1671 || GET_CODE (op) == SUBREG
1672 || GET_CODE (op) == ADDRESSOF
1673 || GET_CODE (op) == CONCAT)
1675 tree qual_type = build_qualified_type (type,
1676 (TYPE_QUALS (type)
1677 | TYPE_QUAL_CONST));
1678 rtx memloc = assign_temp (qual_type, 1, 1, 1);
1679 memloc = validize_mem (memloc);
1680 emit_move_insn (memloc, op);
1681 op = memloc;
1686 generating_concat_p = old_generating_concat_p;
1687 ASM_OPERANDS_INPUT (body, i) = op;
1689 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, i)
1690 = gen_rtx_ASM_INPUT (TYPE_MODE (type), constraints[i + noutputs]);
1692 if (decl_conflicts_with_clobbers_p (val, clobbered_regs))
1693 clobber_conflict_found = 1;
1696 /* Protect all the operands from the queue now that they have all been
1697 evaluated. */
1699 generating_concat_p = 0;
1701 for (i = 0; i < ninputs - ninout; i++)
1702 ASM_OPERANDS_INPUT (body, i)
1703 = protect_from_queue (ASM_OPERANDS_INPUT (body, i), 0);
1705 for (i = 0; i < noutputs; i++)
1706 output_rtx[i] = protect_from_queue (output_rtx[i], 1);
1708 /* For in-out operands, copy output rtx to input rtx. */
1709 for (i = 0; i < ninout; i++)
1711 int j = inout_opnum[i];
1712 char buffer[16];
1714 ASM_OPERANDS_INPUT (body, ninputs - ninout + i)
1715 = output_rtx[j];
1717 sprintf (buffer, "%d", j);
1718 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, ninputs - ninout + i)
1719 = gen_rtx_ASM_INPUT (inout_mode[i], ggc_strdup (buffer));
1722 generating_concat_p = old_generating_concat_p;
1724 /* Now, for each output, construct an rtx
1725 (set OUTPUT (asm_operands INSN OUTPUTCONSTRAINT OUTPUTNUMBER
1726 ARGVEC CONSTRAINTS OPNAMES))
1727 If there is more than one, put them inside a PARALLEL. */
1729 if (noutputs == 1 && nclobbers == 0)
1731 ASM_OPERANDS_OUTPUT_CONSTRAINT (body) = constraints[0];
1732 emit_insn (gen_rtx_SET (VOIDmode, output_rtx[0], body));
1735 else if (noutputs == 0 && nclobbers == 0)
1737 /* No output operands: put in a raw ASM_OPERANDS rtx. */
1738 emit_insn (body);
1741 else
1743 rtx obody = body;
1744 int num = noutputs;
1746 if (num == 0)
1747 num = 1;
1749 body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
1751 /* For each output operand, store a SET. */
1752 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1754 XVECEXP (body, 0, i)
1755 = gen_rtx_SET (VOIDmode,
1756 output_rtx[i],
1757 gen_rtx_ASM_OPERANDS
1758 (GET_MODE (output_rtx[i]),
1759 TREE_STRING_POINTER (string),
1760 constraints[i], i, argvec, constraintvec,
1761 locus.file, locus.line));
1763 MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
1766 /* If there are no outputs (but there are some clobbers)
1767 store the bare ASM_OPERANDS into the PARALLEL. */
1769 if (i == 0)
1770 XVECEXP (body, 0, i++) = obody;
1772 /* Store (clobber REG) for each clobbered register specified. */
1774 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1776 const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1777 int j = decode_reg_name (regname);
1778 rtx clobbered_reg;
1780 if (j < 0)
1782 if (j == -3) /* `cc', which is not a register */
1783 continue;
1785 if (j == -4) /* `memory', don't cache memory across asm */
1787 XVECEXP (body, 0, i++)
1788 = gen_rtx_CLOBBER (VOIDmode,
1789 gen_rtx_MEM
1790 (BLKmode,
1791 gen_rtx_SCRATCH (VOIDmode)));
1792 continue;
1795 /* Ignore unknown register, error already signaled. */
1796 continue;
1799 /* Use QImode since that's guaranteed to clobber just one reg. */
1800 clobbered_reg = gen_rtx_REG (QImode, j);
1802 /* Do sanity check for overlap between clobbers and respectively
1803 input and outputs that hasn't been handled. Such overlap
1804 should have been detected and reported above. */
1805 if (!clobber_conflict_found)
1807 int opno;
1809 /* We test the old body (obody) contents to avoid tripping
1810 over the under-construction body. */
1811 for (opno = 0; opno < noutputs; opno++)
1812 if (reg_overlap_mentioned_p (clobbered_reg, output_rtx[opno]))
1813 internal_error ("asm clobber conflict with output operand");
1815 for (opno = 0; opno < ninputs - ninout; opno++)
1816 if (reg_overlap_mentioned_p (clobbered_reg,
1817 ASM_OPERANDS_INPUT (obody, opno)))
1818 internal_error ("asm clobber conflict with input operand");
1821 XVECEXP (body, 0, i++)
1822 = gen_rtx_CLOBBER (VOIDmode, clobbered_reg);
1825 emit_insn (body);
1828 /* For any outputs that needed reloading into registers, spill them
1829 back to where they belong. */
1830 for (i = 0; i < noutputs; ++i)
1831 if (real_output_rtx[i])
1832 emit_move_insn (real_output_rtx[i], output_rtx[i]);
1834 free_temp_slots ();
1837 void
1838 expand_asm_expr (tree exp)
1840 int noutputs, i;
1841 tree outputs, tail;
1842 tree *o;
1844 if (ASM_INPUT_P (exp))
1846 expand_asm (ASM_STRING (exp), ASM_VOLATILE_P (exp));
1847 return;
1850 outputs = ASM_OUTPUTS (exp);
1851 noutputs = list_length (outputs);
1852 /* o[I] is the place that output number I should be written. */
1853 o = (tree *) alloca (noutputs * sizeof (tree));
1855 /* Record the contents of OUTPUTS before it is modified. */
1856 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1857 o[i] = TREE_VALUE (tail);
1859 /* Generate the ASM_OPERANDS insn; store into the TREE_VALUEs of
1860 OUTPUTS some trees for where the values were actually stored. */
1861 expand_asm_operands (ASM_STRING (exp), outputs, ASM_INPUTS (exp),
1862 ASM_CLOBBERS (exp), ASM_VOLATILE_P (exp),
1863 input_location);
1865 /* Copy all the intermediate outputs into the specified outputs. */
1866 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1868 if (o[i] != TREE_VALUE (tail))
1870 expand_assignment (o[i], TREE_VALUE (tail), 0);
1871 free_temp_slots ();
1873 /* Restore the original value so that it's correct the next
1874 time we expand this function. */
1875 TREE_VALUE (tail) = o[i];
1879 /* Those MODIFY_EXPRs could do autoincrements. */
1880 emit_queue ();
1883 /* A subroutine of expand_asm_operands. Check that all operands have
1884 the same number of alternatives. Return true if so. */
1886 static bool
1887 check_operand_nalternatives (tree outputs, tree inputs)
1889 if (outputs || inputs)
1891 tree tmp = TREE_PURPOSE (outputs ? outputs : inputs);
1892 int nalternatives
1893 = n_occurrences (',', TREE_STRING_POINTER (TREE_VALUE (tmp)));
1894 tree next = inputs;
1896 if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
1898 error ("too many alternatives in `asm'");
1899 return false;
1902 tmp = outputs;
1903 while (tmp)
1905 const char *constraint
1906 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (tmp)));
1908 if (n_occurrences (',', constraint) != nalternatives)
1910 error ("operand constraints for `asm' differ in number of alternatives");
1911 return false;
1914 if (TREE_CHAIN (tmp))
1915 tmp = TREE_CHAIN (tmp);
1916 else
1917 tmp = next, next = 0;
1921 return true;
1924 /* A subroutine of expand_asm_operands. Check that all operand names
1925 are unique. Return true if so. We rely on the fact that these names
1926 are identifiers, and so have been canonicalized by get_identifier,
1927 so all we need are pointer comparisons. */
1929 static bool
1930 check_unique_operand_names (tree outputs, tree inputs)
1932 tree i, j;
1934 for (i = outputs; i ; i = TREE_CHAIN (i))
1936 tree i_name = TREE_PURPOSE (TREE_PURPOSE (i));
1937 if (! i_name)
1938 continue;
1940 for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
1941 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1942 goto failure;
1945 for (i = inputs; i ; i = TREE_CHAIN (i))
1947 tree i_name = TREE_PURPOSE (TREE_PURPOSE (i));
1948 if (! i_name)
1949 continue;
1951 for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
1952 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1953 goto failure;
1954 for (j = outputs; j ; j = TREE_CHAIN (j))
1955 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1956 goto failure;
1959 return true;
1961 failure:
1962 error ("duplicate asm operand name '%s'",
1963 TREE_STRING_POINTER (TREE_PURPOSE (TREE_PURPOSE (i))));
1964 return false;
1967 /* A subroutine of expand_asm_operands. Resolve the names of the operands
1968 in *POUTPUTS and *PINPUTS to numbers, and replace the name expansions in
1969 STRING and in the constraints to those numbers. */
1971 tree
1972 resolve_asm_operand_names (tree string, tree outputs, tree inputs)
1974 char *buffer;
1975 char *p;
1976 const char *c;
1977 tree t;
1979 check_unique_operand_names (outputs, inputs);
1981 /* Substitute [<name>] in input constraint strings. There should be no
1982 named operands in output constraints. */
1983 for (t = inputs; t ; t = TREE_CHAIN (t))
1985 c = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
1986 if (strchr (c, '[') != NULL)
1988 p = buffer = xstrdup (c);
1989 while ((p = strchr (p, '[')) != NULL)
1990 p = resolve_operand_name_1 (p, outputs, inputs);
1991 TREE_VALUE (TREE_PURPOSE (t))
1992 = build_string (strlen (buffer), buffer);
1993 free (buffer);
1997 /* Now check for any needed substitutions in the template. */
1998 c = TREE_STRING_POINTER (string);
1999 while ((c = strchr (c, '%')) != NULL)
2001 if (c[1] == '[')
2002 break;
2003 else if (ISALPHA (c[1]) && c[2] == '[')
2004 break;
2005 else
2007 c += 1;
2008 continue;
2012 if (c)
2014 /* OK, we need to make a copy so we can perform the substitutions.
2015 Assume that we will not need extra space--we get to remove '['
2016 and ']', which means we cannot have a problem until we have more
2017 than 999 operands. */
2018 buffer = xstrdup (TREE_STRING_POINTER (string));
2019 p = buffer + (c - TREE_STRING_POINTER (string));
2021 while ((p = strchr (p, '%')) != NULL)
2023 if (p[1] == '[')
2024 p += 1;
2025 else if (ISALPHA (p[1]) && p[2] == '[')
2026 p += 2;
2027 else
2029 p += 1;
2030 continue;
2033 p = resolve_operand_name_1 (p, outputs, inputs);
2036 string = build_string (strlen (buffer), buffer);
2037 free (buffer);
2040 return string;
2043 /* A subroutine of resolve_operand_names. P points to the '[' for a
2044 potential named operand of the form [<name>]. In place, replace
2045 the name and brackets with a number. Return a pointer to the
2046 balance of the string after substitution. */
2048 static char *
2049 resolve_operand_name_1 (char *p, tree outputs, tree inputs)
2051 char *q;
2052 int op;
2053 tree t;
2054 size_t len;
2056 /* Collect the operand name. */
2057 q = strchr (p, ']');
2058 if (!q)
2060 error ("missing close brace for named operand");
2061 return strchr (p, '\0');
2063 len = q - p - 1;
2065 /* Resolve the name to a number. */
2066 for (op = 0, t = outputs; t ; t = TREE_CHAIN (t), op++)
2068 tree name = TREE_PURPOSE (TREE_PURPOSE (t));
2069 if (name)
2071 const char *c = TREE_STRING_POINTER (name);
2072 if (strncmp (c, p + 1, len) == 0 && c[len] == '\0')
2073 goto found;
2076 for (t = inputs; t ; t = TREE_CHAIN (t), op++)
2078 tree name = TREE_PURPOSE (TREE_PURPOSE (t));
2079 if (name)
2081 const char *c = TREE_STRING_POINTER (name);
2082 if (strncmp (c, p + 1, len) == 0 && c[len] == '\0')
2083 goto found;
2087 *q = '\0';
2088 error ("undefined named operand '%s'", p + 1);
2089 op = 0;
2090 found:
2092 /* Replace the name with the number. Unfortunately, not all libraries
2093 get the return value of sprintf correct, so search for the end of the
2094 generated string by hand. */
2095 sprintf (p, "%d", op);
2096 p = strchr (p, '\0');
2098 /* Verify the no extra buffer space assumption. */
2099 if (p > q)
2100 abort ();
2102 /* Shift the rest of the buffer down to fill the gap. */
2103 memmove (p, q + 1, strlen (q + 1) + 1);
2105 return p;
2108 /* Generate RTL to evaluate the expression EXP
2109 and remember it in case this is the VALUE in a ({... VALUE; }) constr.
2110 Provided just for backward-compatibility. expand_expr_stmt_value()
2111 should be used for new code. */
2113 void
2114 expand_expr_stmt (tree exp)
2116 expand_expr_stmt_value (exp, -1, 1);
2119 /* Generate RTL to evaluate the expression EXP. WANT_VALUE tells
2120 whether to (1) save the value of the expression, (0) discard it or
2121 (-1) use expr_stmts_for_value to tell. The use of -1 is
2122 deprecated, and retained only for backward compatibility. */
2124 void
2125 expand_expr_stmt_value (tree exp, int want_value, int maybe_last)
2127 rtx value;
2128 tree type;
2129 rtx alt_rtl = NULL;
2131 if (want_value == -1)
2132 want_value = expr_stmts_for_value != 0;
2134 /* If -Wextra, warn about statements with no side effects,
2135 except for an explicit cast to void (e.g. for assert()), and
2136 except for last statement in ({...}) where they may be useful. */
2137 if (! want_value
2138 && (expr_stmts_for_value == 0 || ! maybe_last)
2139 && exp != error_mark_node
2140 && warn_unused_value)
2142 if (TREE_SIDE_EFFECTS (exp))
2143 warn_if_unused_value (exp);
2144 else if (!VOID_TYPE_P (TREE_TYPE (exp)) && !TREE_NO_WARNING (exp))
2145 warning ("%Hstatement with no effect", &emit_locus);
2148 /* If EXP is of function type and we are expanding statements for
2149 value, convert it to pointer-to-function. */
2150 if (want_value && TREE_CODE (TREE_TYPE (exp)) == FUNCTION_TYPE)
2151 exp = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (exp)), exp);
2153 /* The call to `expand_expr' could cause last_expr_type and
2154 last_expr_value to get reset. Therefore, we set last_expr_value
2155 and last_expr_type *after* calling expand_expr. */
2156 value = expand_expr_real (exp, want_value ? NULL_RTX : const0_rtx,
2157 VOIDmode, 0, &alt_rtl);
2158 type = TREE_TYPE (exp);
2160 /* If all we do is reference a volatile value in memory,
2161 copy it to a register to be sure it is actually touched. */
2162 if (value && GET_CODE (value) == MEM && TREE_THIS_VOLATILE (exp))
2164 if (TYPE_MODE (type) == VOIDmode)
2166 else if (TYPE_MODE (type) != BLKmode)
2167 value = copy_to_reg (value);
2168 else
2170 rtx lab = gen_label_rtx ();
2172 /* Compare the value with itself to reference it. */
2173 emit_cmp_and_jump_insns (value, value, EQ,
2174 expand_expr (TYPE_SIZE (type),
2175 NULL_RTX, VOIDmode, 0),
2176 BLKmode, 0, lab);
2177 emit_label (lab);
2181 /* If this expression is part of a ({...}) and is in memory, we may have
2182 to preserve temporaries. */
2183 preserve_temp_slots (value);
2185 /* Free any temporaries used to evaluate this expression. Any temporary
2186 used as a result of this expression will already have been preserved
2187 above. */
2188 free_temp_slots ();
2190 if (want_value)
2192 last_expr_value = value;
2193 last_expr_alt_rtl = alt_rtl;
2194 last_expr_type = type;
2197 emit_queue ();
2200 /* Warn if EXP contains any computations whose results are not used.
2201 Return 1 if a warning is printed; 0 otherwise. */
2204 warn_if_unused_value (tree exp)
2206 if (TREE_USED (exp))
2207 return 0;
2209 /* Don't warn about void constructs. This includes casting to void,
2210 void function calls, and statement expressions with a final cast
2211 to void. */
2212 if (VOID_TYPE_P (TREE_TYPE (exp)))
2213 return 0;
2215 switch (TREE_CODE (exp))
2217 case PREINCREMENT_EXPR:
2218 case POSTINCREMENT_EXPR:
2219 case PREDECREMENT_EXPR:
2220 case POSTDECREMENT_EXPR:
2221 case MODIFY_EXPR:
2222 case INIT_EXPR:
2223 case TARGET_EXPR:
2224 case CALL_EXPR:
2225 case RTL_EXPR:
2226 case TRY_CATCH_EXPR:
2227 case WITH_CLEANUP_EXPR:
2228 case EXIT_EXPR:
2229 return 0;
2231 case BIND_EXPR:
2232 /* For a binding, warn if no side effect within it. */
2233 return warn_if_unused_value (TREE_OPERAND (exp, 1));
2235 case SAVE_EXPR:
2236 return warn_if_unused_value (TREE_OPERAND (exp, 0));
2238 case TRUTH_ORIF_EXPR:
2239 case TRUTH_ANDIF_EXPR:
2240 /* In && or ||, warn if 2nd operand has no side effect. */
2241 return warn_if_unused_value (TREE_OPERAND (exp, 1));
2243 case COMPOUND_EXPR:
2244 if (TREE_NO_WARNING (exp))
2245 return 0;
2246 if (warn_if_unused_value (TREE_OPERAND (exp, 0)))
2247 return 1;
2248 /* Let people do `(foo (), 0)' without a warning. */
2249 if (TREE_CONSTANT (TREE_OPERAND (exp, 1)))
2250 return 0;
2251 return warn_if_unused_value (TREE_OPERAND (exp, 1));
2253 case NOP_EXPR:
2254 case CONVERT_EXPR:
2255 case NON_LVALUE_EXPR:
2256 /* Don't warn about conversions not explicit in the user's program. */
2257 if (TREE_NO_WARNING (exp))
2258 return 0;
2259 /* Assignment to a cast usually results in a cast of a modify.
2260 Don't complain about that. There can be an arbitrary number of
2261 casts before the modify, so we must loop until we find the first
2262 non-cast expression and then test to see if that is a modify. */
2264 tree tem = TREE_OPERAND (exp, 0);
2266 while (TREE_CODE (tem) == CONVERT_EXPR || TREE_CODE (tem) == NOP_EXPR)
2267 tem = TREE_OPERAND (tem, 0);
2269 if (TREE_CODE (tem) == MODIFY_EXPR || TREE_CODE (tem) == INIT_EXPR
2270 || TREE_CODE (tem) == CALL_EXPR)
2271 return 0;
2273 goto maybe_warn;
2275 case INDIRECT_REF:
2276 /* Don't warn about automatic dereferencing of references, since
2277 the user cannot control it. */
2278 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == REFERENCE_TYPE)
2279 return warn_if_unused_value (TREE_OPERAND (exp, 0));
2280 /* Fall through. */
2282 default:
2283 /* Referencing a volatile value is a side effect, so don't warn. */
2284 if ((DECL_P (exp)
2285 || TREE_CODE_CLASS (TREE_CODE (exp)) == 'r')
2286 && TREE_THIS_VOLATILE (exp))
2287 return 0;
2289 /* If this is an expression which has no operands, there is no value
2290 to be unused. There are no such language-independent codes,
2291 but front ends may define such. */
2292 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'e'
2293 && TREE_CODE_LENGTH (TREE_CODE (exp)) == 0)
2294 return 0;
2296 maybe_warn:
2297 /* If this is an expression with side effects, don't warn. */
2298 if (TREE_SIDE_EFFECTS (exp))
2299 return 0;
2301 warning ("%Hvalue computed is not used", &emit_locus);
2302 return 1;
2306 /* Clear out the memory of the last expression evaluated. */
2308 void
2309 clear_last_expr (void)
2311 last_expr_type = NULL_TREE;
2312 last_expr_value = NULL_RTX;
2313 last_expr_alt_rtl = NULL_RTX;
2316 /* Begin a statement-expression, i.e., a series of statements which
2317 may return a value. Return the RTL_EXPR for this statement expr.
2318 The caller must save that value and pass it to
2319 expand_end_stmt_expr. If HAS_SCOPE is nonzero, temporaries created
2320 in the statement-expression are deallocated at the end of the
2321 expression. */
2323 tree
2324 expand_start_stmt_expr (int has_scope)
2326 tree t;
2328 /* Make the RTL_EXPR node temporary, not momentary,
2329 so that rtl_expr_chain doesn't become garbage. */
2330 t = make_node (RTL_EXPR);
2331 do_pending_stack_adjust ();
2332 if (has_scope)
2333 start_sequence_for_rtl_expr (t);
2334 else
2335 start_sequence ();
2336 NO_DEFER_POP;
2337 expr_stmts_for_value++;
2338 return t;
2341 /* Restore the previous state at the end of a statement that returns a value.
2342 Returns a tree node representing the statement's value and the
2343 insns to compute the value.
2345 The nodes of that expression have been freed by now, so we cannot use them.
2346 But we don't want to do that anyway; the expression has already been
2347 evaluated and now we just want to use the value. So generate a RTL_EXPR
2348 with the proper type and RTL value.
2350 If the last substatement was not an expression,
2351 return something with type `void'. */
2353 tree
2354 expand_end_stmt_expr (tree t)
2356 OK_DEFER_POP;
2358 if (! last_expr_value || ! last_expr_type)
2360 last_expr_value = const0_rtx;
2361 last_expr_alt_rtl = NULL_RTX;
2362 last_expr_type = void_type_node;
2364 else if (GET_CODE (last_expr_value) != REG && ! CONSTANT_P (last_expr_value))
2365 /* Remove any possible QUEUED. */
2366 last_expr_value = protect_from_queue (last_expr_value, 0);
2368 emit_queue ();
2370 TREE_TYPE (t) = last_expr_type;
2371 RTL_EXPR_RTL (t) = last_expr_value;
2372 RTL_EXPR_ALT_RTL (t) = last_expr_alt_rtl;
2373 RTL_EXPR_SEQUENCE (t) = get_insns ();
2375 rtl_expr_chain = tree_cons (NULL_TREE, t, rtl_expr_chain);
2377 end_sequence ();
2379 /* Don't consider deleting this expr or containing exprs at tree level. */
2380 TREE_SIDE_EFFECTS (t) = 1;
2381 /* Propagate volatility of the actual RTL expr. */
2382 TREE_THIS_VOLATILE (t) = volatile_refs_p (last_expr_value);
2384 clear_last_expr ();
2385 expr_stmts_for_value--;
2387 return t;
2390 /* Generate RTL for the start of an if-then. COND is the expression
2391 whose truth should be tested.
2393 If EXITFLAG is nonzero, this conditional is visible to
2394 `exit_something'. */
2396 void
2397 expand_start_cond (tree cond, int exitflag)
2399 struct nesting *thiscond = ALLOC_NESTING ();
2401 /* Make an entry on cond_stack for the cond we are entering. */
2403 thiscond->desc = COND_NESTING;
2404 thiscond->next = cond_stack;
2405 thiscond->all = nesting_stack;
2406 thiscond->depth = ++nesting_depth;
2407 thiscond->data.cond.next_label = gen_label_rtx ();
2408 /* Before we encounter an `else', we don't need a separate exit label
2409 unless there are supposed to be exit statements
2410 to exit this conditional. */
2411 thiscond->exit_label = exitflag ? gen_label_rtx () : 0;
2412 thiscond->data.cond.endif_label = thiscond->exit_label;
2413 cond_stack = thiscond;
2414 nesting_stack = thiscond;
2416 do_jump (cond, thiscond->data.cond.next_label, NULL_RTX);
2419 /* Generate RTL between then-clause and the elseif-clause
2420 of an if-then-elseif-.... */
2422 void
2423 expand_start_elseif (tree cond)
2425 if (cond_stack->data.cond.endif_label == 0)
2426 cond_stack->data.cond.endif_label = gen_label_rtx ();
2427 emit_jump (cond_stack->data.cond.endif_label);
2428 emit_label (cond_stack->data.cond.next_label);
2429 cond_stack->data.cond.next_label = gen_label_rtx ();
2430 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
2433 /* Generate RTL between the then-clause and the else-clause
2434 of an if-then-else. */
2436 void
2437 expand_start_else (void)
2439 if (cond_stack->data.cond.endif_label == 0)
2440 cond_stack->data.cond.endif_label = gen_label_rtx ();
2442 emit_jump (cond_stack->data.cond.endif_label);
2443 emit_label (cond_stack->data.cond.next_label);
2444 cond_stack->data.cond.next_label = 0; /* No more _else or _elseif calls. */
2447 /* After calling expand_start_else, turn this "else" into an "else if"
2448 by providing another condition. */
2450 void
2451 expand_elseif (tree cond)
2453 cond_stack->data.cond.next_label = gen_label_rtx ();
2454 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
2457 /* Generate RTL for the end of an if-then.
2458 Pop the record for it off of cond_stack. */
2460 void
2461 expand_end_cond (void)
2463 struct nesting *thiscond = cond_stack;
2465 do_pending_stack_adjust ();
2466 if (thiscond->data.cond.next_label)
2467 emit_label (thiscond->data.cond.next_label);
2468 if (thiscond->data.cond.endif_label)
2469 emit_label (thiscond->data.cond.endif_label);
2471 POPSTACK (cond_stack);
2472 clear_last_expr ();
2475 /* Generate RTL for the start of a loop. EXIT_FLAG is nonzero if this
2476 loop should be exited by `exit_something'. This is a loop for which
2477 `expand_continue' will jump to the top of the loop.
2479 Make an entry on loop_stack to record the labels associated with
2480 this loop. */
2482 struct nesting *
2483 expand_start_loop (int exit_flag)
2485 struct nesting *thisloop = ALLOC_NESTING ();
2487 /* Make an entry on loop_stack for the loop we are entering. */
2489 thisloop->desc = LOOP_NESTING;
2490 thisloop->next = loop_stack;
2491 thisloop->all = nesting_stack;
2492 thisloop->depth = ++nesting_depth;
2493 thisloop->data.loop.start_label = gen_label_rtx ();
2494 thisloop->data.loop.end_label = gen_label_rtx ();
2495 thisloop->data.loop.continue_label = thisloop->data.loop.start_label;
2496 thisloop->exit_label = exit_flag ? thisloop->data.loop.end_label : 0;
2497 loop_stack = thisloop;
2498 nesting_stack = thisloop;
2500 do_pending_stack_adjust ();
2501 emit_queue ();
2502 emit_label (thisloop->data.loop.start_label);
2504 return thisloop;
2507 /* Like expand_start_loop but for a loop where the continuation point
2508 (for expand_continue_loop) will be specified explicitly. */
2510 struct nesting *
2511 expand_start_loop_continue_elsewhere (int exit_flag)
2513 struct nesting *thisloop = expand_start_loop (exit_flag);
2514 loop_stack->data.loop.continue_label = gen_label_rtx ();
2515 return thisloop;
2518 /* Begin a null, aka do { } while (0) "loop". But since the contents
2519 of said loop can still contain a break, we must frob the loop nest. */
2521 struct nesting *
2522 expand_start_null_loop (void)
2524 struct nesting *thisloop = ALLOC_NESTING ();
2526 /* Make an entry on loop_stack for the loop we are entering. */
2528 thisloop->desc = LOOP_NESTING;
2529 thisloop->next = loop_stack;
2530 thisloop->all = nesting_stack;
2531 thisloop->depth = ++nesting_depth;
2532 thisloop->data.loop.start_label = emit_note (NOTE_INSN_DELETED);
2533 thisloop->data.loop.end_label = gen_label_rtx ();
2534 thisloop->data.loop.continue_label = thisloop->data.loop.end_label;
2535 thisloop->exit_label = thisloop->data.loop.end_label;
2536 loop_stack = thisloop;
2537 nesting_stack = thisloop;
2539 return thisloop;
2542 /* Specify the continuation point for a loop started with
2543 expand_start_loop_continue_elsewhere.
2544 Use this at the point in the code to which a continue statement
2545 should jump. */
2547 void
2548 expand_loop_continue_here (void)
2550 do_pending_stack_adjust ();
2551 emit_label (loop_stack->data.loop.continue_label);
2554 /* Finish a loop. Generate a jump back to the top and the loop-exit label.
2555 Pop the block off of loop_stack. */
2557 void
2558 expand_end_loop (void)
2560 rtx start_label = loop_stack->data.loop.start_label;
2561 rtx etc_note;
2562 int eh_regions, debug_blocks;
2563 bool empty_test;
2565 do_pending_stack_adjust ();
2567 /* If the loop starts with a loop exit, roll that to the end where
2568 it will optimize together with the jump back.
2570 If the loop presently looks like this (in pseudo-C):
2572 start_label:
2573 if (test) goto end_label;
2574 LOOP_END_TOP_COND
2575 body;
2576 goto start_label;
2577 end_label:
2579 transform it to look like:
2581 goto start_label;
2582 top_label:
2583 body;
2584 start_label:
2585 if (test) goto end_label;
2586 goto top_label;
2587 end_label:
2589 We rely on the presence of NOTE_INSN_LOOP_END_TOP_COND to mark
2590 the end of the entry conditional. Without this, our lexical scan
2591 can't tell the difference between an entry conditional and a
2592 body conditional that exits the loop. Mistaking the two means
2593 that we can misplace the NOTE_INSN_LOOP_CONT note, which can
2594 screw up loop unrolling.
2596 Things will be oh so much better when loop optimization is done
2597 off of a proper control flow graph... */
2599 /* Scan insns from the top of the loop looking for the END_TOP_COND note. */
2601 empty_test = true;
2602 eh_regions = debug_blocks = 0;
2603 for (etc_note = start_label; etc_note ; etc_note = NEXT_INSN (etc_note))
2604 if (GET_CODE (etc_note) == NOTE)
2606 if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_LOOP_END_TOP_COND)
2607 break;
2609 if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_LOOP_BEG)
2610 abort ();
2612 /* At the same time, scan for EH region notes, as we don't want
2613 to scrog region nesting. This shouldn't happen, but... */
2614 if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_EH_REGION_BEG)
2615 eh_regions++;
2616 else if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_EH_REGION_END)
2618 if (--eh_regions < 0)
2619 /* We've come to the end of an EH region, but never saw the
2620 beginning of that region. That means that an EH region
2621 begins before the top of the loop, and ends in the middle
2622 of it. The existence of such a situation violates a basic
2623 assumption in this code, since that would imply that even
2624 when EH_REGIONS is zero, we might move code out of an
2625 exception region. */
2626 abort ();
2629 /* Likewise for debug scopes. In this case we'll either (1) move
2630 all of the notes if they are properly nested or (2) leave the
2631 notes alone and only rotate the loop at high optimization
2632 levels when we expect to scrog debug info. */
2633 else if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_BLOCK_BEG)
2634 debug_blocks++;
2635 else if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_BLOCK_END)
2636 debug_blocks--;
2638 else if (INSN_P (etc_note))
2639 empty_test = false;
2641 if (etc_note
2642 && optimize
2643 && ! empty_test
2644 && eh_regions == 0
2645 && (debug_blocks == 0 || optimize >= 2)
2646 && NEXT_INSN (etc_note) != NULL_RTX
2647 && ! any_condjump_p (get_last_insn ()))
2649 /* We found one. Move everything from START to ETC to the end
2650 of the loop, and add a jump from the top of the loop. */
2651 rtx top_label = gen_label_rtx ();
2652 rtx start_move = start_label;
2654 emit_label_before (top_label, start_move);
2656 /* Actually move the insns. If the debug scopes are nested, we
2657 can move everything at once. Otherwise we have to move them
2658 one by one and squeeze out the block notes. */
2659 if (debug_blocks == 0)
2660 reorder_insns (start_move, etc_note, get_last_insn ());
2661 else
2663 rtx insn, next_insn;
2664 for (insn = start_move; insn; insn = next_insn)
2666 /* Figure out which insn comes after this one. We have
2667 to do this before we move INSN. */
2668 next_insn = (insn == etc_note ? NULL : NEXT_INSN (insn));
2670 if (GET_CODE (insn) == NOTE
2671 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2672 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
2673 continue;
2675 reorder_insns (insn, insn, get_last_insn ());
2679 /* Add the jump from the top of the loop. */
2680 emit_jump_insn_before (gen_jump (start_label), top_label);
2681 emit_barrier_before (top_label);
2682 start_label = top_label;
2685 if (etc_note)
2686 delete_insn (etc_note);
2688 emit_jump (start_label);
2689 emit_label (loop_stack->data.loop.end_label);
2691 POPSTACK (loop_stack);
2693 clear_last_expr ();
2696 /* Finish a null loop, aka do { } while (0). */
2698 void
2699 expand_end_null_loop (void)
2701 do_pending_stack_adjust ();
2702 emit_label (loop_stack->data.loop.end_label);
2704 POPSTACK (loop_stack);
2706 clear_last_expr ();
2709 /* Generate a jump to the current loop's continue-point.
2710 This is usually the top of the loop, but may be specified
2711 explicitly elsewhere. If not currently inside a loop,
2712 return 0 and do nothing; caller will print an error message. */
2715 expand_continue_loop (struct nesting *whichloop)
2717 /* Emit information for branch prediction. */
2718 rtx note;
2720 if (flag_guess_branch_prob)
2722 note = emit_note (NOTE_INSN_PREDICTION);
2723 NOTE_PREDICTION (note) = NOTE_PREDICT (PRED_CONTINUE, IS_TAKEN);
2725 clear_last_expr ();
2726 if (whichloop == 0)
2727 whichloop = loop_stack;
2728 if (whichloop == 0)
2729 return 0;
2730 expand_goto_internal (NULL_TREE, whichloop->data.loop.continue_label,
2731 NULL_RTX);
2732 return 1;
2735 /* Generate a jump to exit the current loop. If not currently inside a loop,
2736 return 0 and do nothing; caller will print an error message. */
2739 expand_exit_loop (struct nesting *whichloop)
2741 clear_last_expr ();
2742 if (whichloop == 0)
2743 whichloop = loop_stack;
2744 if (whichloop == 0)
2745 return 0;
2746 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label, NULL_RTX);
2747 return 1;
2750 /* Generate a conditional jump to exit the current loop if COND
2751 evaluates to zero. If not currently inside a loop,
2752 return 0 and do nothing; caller will print an error message. */
2755 expand_exit_loop_if_false (struct nesting *whichloop, tree cond)
2757 rtx label;
2758 clear_last_expr ();
2760 if (whichloop == 0)
2761 whichloop = loop_stack;
2762 if (whichloop == 0)
2763 return 0;
2765 if (integer_nonzerop (cond))
2766 return 1;
2767 if (integer_zerop (cond))
2768 return expand_exit_loop (whichloop);
2770 /* Check if we definitely won't need a fixup. */
2771 if (whichloop == nesting_stack)
2773 jumpifnot (cond, whichloop->data.loop.end_label);
2774 return 1;
2777 /* In order to handle fixups, we actually create a conditional jump
2778 around an unconditional branch to exit the loop. If fixups are
2779 necessary, they go before the unconditional branch. */
2781 label = gen_label_rtx ();
2782 jumpif (cond, label);
2783 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label,
2784 NULL_RTX);
2785 emit_label (label);
2787 return 1;
2790 /* Like expand_exit_loop_if_false except also emit a note marking
2791 the end of the conditional. Should only be used immediately
2792 after expand_loop_start. */
2795 expand_exit_loop_top_cond (struct nesting *whichloop, tree cond)
2797 if (! expand_exit_loop_if_false (whichloop, cond))
2798 return 0;
2800 emit_note (NOTE_INSN_LOOP_END_TOP_COND);
2801 return 1;
2804 /* Return nonzero if we should preserve sub-expressions as separate
2805 pseudos. We never do so if we aren't optimizing. We always do so
2806 if -fexpensive-optimizations.
2808 Otherwise, we only do so if we are in the "early" part of a loop. I.e.,
2809 the loop may still be a small one. */
2812 preserve_subexpressions_p (void)
2814 rtx insn;
2816 if (flag_expensive_optimizations)
2817 return 1;
2819 if (optimize == 0 || cfun == 0 || cfun->stmt == 0 || loop_stack == 0)
2820 return 0;
2822 insn = get_last_insn_anywhere ();
2824 return (insn
2825 && (INSN_UID (insn) - INSN_UID (loop_stack->data.loop.start_label)
2826 < n_non_fixed_regs * 3));
2830 /* Generate a jump to exit the current loop, conditional, binding contour
2831 or case statement. Not all such constructs are visible to this function,
2832 only those started with EXIT_FLAG nonzero. Individual languages use
2833 the EXIT_FLAG parameter to control which kinds of constructs you can
2834 exit this way.
2836 If not currently inside anything that can be exited,
2837 return 0 and do nothing; caller will print an error message. */
2840 expand_exit_something (void)
2842 struct nesting *n;
2843 clear_last_expr ();
2844 for (n = nesting_stack; n; n = n->all)
2845 if (n->exit_label != 0)
2847 expand_goto_internal (NULL_TREE, n->exit_label, NULL_RTX);
2848 return 1;
2851 return 0;
2854 /* Generate RTL to return from the current function, with no value.
2855 (That is, we do not do anything about returning any value.) */
2857 void
2858 expand_null_return (void)
2860 rtx last_insn;
2862 last_insn = get_last_insn ();
2864 /* If this function was declared to return a value, but we
2865 didn't, clobber the return registers so that they are not
2866 propagated live to the rest of the function. */
2867 clobber_return_register ();
2869 expand_null_return_1 (last_insn);
2872 /* Generate RTL to return directly from the current function.
2873 (That is, we bypass any return value.) */
2875 void
2876 expand_naked_return (void)
2878 rtx last_insn, end_label;
2880 last_insn = get_last_insn ();
2881 end_label = naked_return_label;
2883 clear_pending_stack_adjust ();
2884 do_pending_stack_adjust ();
2885 clear_last_expr ();
2887 if (end_label == 0)
2888 end_label = naked_return_label = gen_label_rtx ();
2889 expand_goto_internal (NULL_TREE, end_label, last_insn);
2892 /* Try to guess whether the value of return means error code. */
2893 static enum br_predictor
2894 return_prediction (rtx val)
2896 /* Different heuristics for pointers and scalars. */
2897 if (POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (current_function_decl))))
2899 /* NULL is usually not returned. */
2900 if (val == const0_rtx)
2901 return PRED_NULL_RETURN;
2903 else
2905 /* Negative return values are often used to indicate
2906 errors. */
2907 if (GET_CODE (val) == CONST_INT
2908 && INTVAL (val) < 0)
2909 return PRED_NEGATIVE_RETURN;
2910 /* Constant return values are also usually erors,
2911 zero/one often mean booleans so exclude them from the
2912 heuristics. */
2913 if (CONSTANT_P (val)
2914 && (val != const0_rtx && val != const1_rtx))
2915 return PRED_CONST_RETURN;
2917 return PRED_NO_PREDICTION;
2921 /* If the current function returns values in the most significant part
2922 of a register, shift return value VAL appropriately. The mode of
2923 the function's return type is known not to be BLKmode. */
2925 static rtx
2926 shift_return_value (rtx val)
2928 tree type;
2930 type = TREE_TYPE (DECL_RESULT (current_function_decl));
2931 if (targetm.calls.return_in_msb (type))
2933 rtx target;
2934 HOST_WIDE_INT shift;
2936 target = DECL_RTL (DECL_RESULT (current_function_decl));
2937 shift = (GET_MODE_BITSIZE (GET_MODE (target))
2938 - BITS_PER_UNIT * int_size_in_bytes (type));
2939 if (shift > 0)
2940 val = expand_binop (GET_MODE (target), ashl_optab,
2941 gen_lowpart (GET_MODE (target), val),
2942 GEN_INT (shift), target, 1, OPTAB_WIDEN);
2944 return val;
2948 /* Generate RTL to return from the current function, with value VAL. */
2950 static void
2951 expand_value_return (rtx val)
2953 rtx last_insn;
2954 rtx return_reg;
2955 enum br_predictor pred;
2957 if (flag_guess_branch_prob
2958 && (pred = return_prediction (val)) != PRED_NO_PREDICTION)
2960 /* Emit information for branch prediction. */
2961 rtx note;
2963 note = emit_note (NOTE_INSN_PREDICTION);
2965 NOTE_PREDICTION (note) = NOTE_PREDICT (pred, NOT_TAKEN);
2969 last_insn = get_last_insn ();
2970 return_reg = DECL_RTL (DECL_RESULT (current_function_decl));
2972 /* Copy the value to the return location
2973 unless it's already there. */
2975 if (return_reg != val)
2977 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
2978 if (targetm.calls.promote_function_return (TREE_TYPE (current_function_decl)))
2980 int unsignedp = TYPE_UNSIGNED (type);
2981 enum machine_mode old_mode
2982 = DECL_MODE (DECL_RESULT (current_function_decl));
2983 enum machine_mode mode
2984 = promote_mode (type, old_mode, &unsignedp, 1);
2986 if (mode != old_mode)
2987 val = convert_modes (mode, old_mode, val, unsignedp);
2989 if (GET_CODE (return_reg) == PARALLEL)
2990 emit_group_load (return_reg, val, type, int_size_in_bytes (type));
2991 else
2992 emit_move_insn (return_reg, val);
2995 expand_null_return_1 (last_insn);
2998 /* Output a return with no value. If LAST_INSN is nonzero,
2999 pretend that the return takes place after LAST_INSN. */
3001 static void
3002 expand_null_return_1 (rtx last_insn)
3004 rtx end_label = cleanup_label ? cleanup_label : return_label;
3006 clear_pending_stack_adjust ();
3007 do_pending_stack_adjust ();
3008 clear_last_expr ();
3010 if (end_label == 0)
3011 end_label = return_label = gen_label_rtx ();
3012 expand_goto_internal (NULL_TREE, end_label, last_insn);
3015 /* Generate RTL to evaluate the expression RETVAL and return it
3016 from the current function. */
3018 void
3019 expand_return (tree retval)
3021 /* If there are any cleanups to be performed, then they will
3022 be inserted following LAST_INSN. It is desirable
3023 that the last_insn, for such purposes, should be the
3024 last insn before computing the return value. Otherwise, cleanups
3025 which call functions can clobber the return value. */
3026 /* ??? rms: I think that is erroneous, because in C++ it would
3027 run destructors on variables that might be used in the subsequent
3028 computation of the return value. */
3029 rtx last_insn = 0;
3030 rtx result_rtl;
3031 rtx val = 0;
3032 tree retval_rhs;
3034 /* If function wants no value, give it none. */
3035 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
3037 expand_expr (retval, NULL_RTX, VOIDmode, 0);
3038 emit_queue ();
3039 expand_null_return ();
3040 return;
3043 if (retval == error_mark_node)
3045 /* Treat this like a return of no value from a function that
3046 returns a value. */
3047 expand_null_return ();
3048 return;
3050 else if (TREE_CODE (retval) == RESULT_DECL)
3051 retval_rhs = retval;
3052 else if ((TREE_CODE (retval) == MODIFY_EXPR || TREE_CODE (retval) == INIT_EXPR)
3053 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
3054 retval_rhs = TREE_OPERAND (retval, 1);
3055 else
3056 retval_rhs = retval;
3058 last_insn = get_last_insn ();
3060 /* Distribute return down conditional expr if either of the sides
3061 may involve tail recursion (see test below). This enhances the number
3062 of tail recursions we see. Don't do this always since it can produce
3063 sub-optimal code in some cases and we distribute assignments into
3064 conditional expressions when it would help. */
3066 if (optimize && retval_rhs != 0
3067 && frame_offset == 0
3068 && TREE_CODE (retval_rhs) == COND_EXPR
3069 && (TREE_CODE (TREE_OPERAND (retval_rhs, 1)) == CALL_EXPR
3070 || TREE_CODE (TREE_OPERAND (retval_rhs, 2)) == CALL_EXPR))
3072 rtx label = gen_label_rtx ();
3073 tree expr;
3075 do_jump (TREE_OPERAND (retval_rhs, 0), label, NULL_RTX);
3076 start_cleanup_deferral ();
3077 expr = build (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (current_function_decl)),
3078 DECL_RESULT (current_function_decl),
3079 TREE_OPERAND (retval_rhs, 1));
3080 TREE_SIDE_EFFECTS (expr) = 1;
3081 expand_return (expr);
3082 emit_label (label);
3084 expr = build (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (current_function_decl)),
3085 DECL_RESULT (current_function_decl),
3086 TREE_OPERAND (retval_rhs, 2));
3087 TREE_SIDE_EFFECTS (expr) = 1;
3088 expand_return (expr);
3089 end_cleanup_deferral ();
3090 return;
3093 result_rtl = DECL_RTL (DECL_RESULT (current_function_decl));
3095 /* If the result is an aggregate that is being returned in one (or more)
3096 registers, load the registers here. The compiler currently can't handle
3097 copying a BLKmode value into registers. We could put this code in a
3098 more general area (for use by everyone instead of just function
3099 call/return), but until this feature is generally usable it is kept here
3100 (and in expand_call). The value must go into a pseudo in case there
3101 are cleanups that will clobber the real return register. */
3103 if (retval_rhs != 0
3104 && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
3105 && GET_CODE (result_rtl) == REG)
3107 int i;
3108 unsigned HOST_WIDE_INT bitpos, xbitpos;
3109 unsigned HOST_WIDE_INT padding_correction = 0;
3110 unsigned HOST_WIDE_INT bytes
3111 = int_size_in_bytes (TREE_TYPE (retval_rhs));
3112 int n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
3113 unsigned int bitsize
3114 = MIN (TYPE_ALIGN (TREE_TYPE (retval_rhs)), BITS_PER_WORD);
3115 rtx *result_pseudos = alloca (sizeof (rtx) * n_regs);
3116 rtx result_reg, src = NULL_RTX, dst = NULL_RTX;
3117 rtx result_val = expand_expr (retval_rhs, NULL_RTX, VOIDmode, 0);
3118 enum machine_mode tmpmode, result_reg_mode;
3120 if (bytes == 0)
3122 expand_null_return ();
3123 return;
3126 /* If the structure doesn't take up a whole number of words, see
3127 whether the register value should be padded on the left or on
3128 the right. Set PADDING_CORRECTION to the number of padding
3129 bits needed on the left side.
3131 In most ABIs, the structure will be returned at the least end of
3132 the register, which translates to right padding on little-endian
3133 targets and left padding on big-endian targets. The opposite
3134 holds if the structure is returned at the most significant
3135 end of the register. */
3136 if (bytes % UNITS_PER_WORD != 0
3137 && (targetm.calls.return_in_msb (TREE_TYPE (retval_rhs))
3138 ? !BYTES_BIG_ENDIAN
3139 : BYTES_BIG_ENDIAN))
3140 padding_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
3141 * BITS_PER_UNIT));
3143 /* Copy the structure BITSIZE bits at a time. */
3144 for (bitpos = 0, xbitpos = padding_correction;
3145 bitpos < bytes * BITS_PER_UNIT;
3146 bitpos += bitsize, xbitpos += bitsize)
3148 /* We need a new destination pseudo each time xbitpos is
3149 on a word boundary and when xbitpos == padding_correction
3150 (the first time through). */
3151 if (xbitpos % BITS_PER_WORD == 0
3152 || xbitpos == padding_correction)
3154 /* Generate an appropriate register. */
3155 dst = gen_reg_rtx (word_mode);
3156 result_pseudos[xbitpos / BITS_PER_WORD] = dst;
3158 /* Clear the destination before we move anything into it. */
3159 emit_move_insn (dst, CONST0_RTX (GET_MODE (dst)));
3162 /* We need a new source operand each time bitpos is on a word
3163 boundary. */
3164 if (bitpos % BITS_PER_WORD == 0)
3165 src = operand_subword_force (result_val,
3166 bitpos / BITS_PER_WORD,
3167 BLKmode);
3169 /* Use bitpos for the source extraction (left justified) and
3170 xbitpos for the destination store (right justified). */
3171 store_bit_field (dst, bitsize, xbitpos % BITS_PER_WORD, word_mode,
3172 extract_bit_field (src, bitsize,
3173 bitpos % BITS_PER_WORD, 1,
3174 NULL_RTX, word_mode, word_mode,
3175 BITS_PER_WORD),
3176 BITS_PER_WORD);
3179 tmpmode = GET_MODE (result_rtl);
3180 if (tmpmode == BLKmode)
3182 /* Find the smallest integer mode large enough to hold the
3183 entire structure and use that mode instead of BLKmode
3184 on the USE insn for the return register. */
3185 for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3186 tmpmode != VOIDmode;
3187 tmpmode = GET_MODE_WIDER_MODE (tmpmode))
3188 /* Have we found a large enough mode? */
3189 if (GET_MODE_SIZE (tmpmode) >= bytes)
3190 break;
3192 /* No suitable mode found. */
3193 if (tmpmode == VOIDmode)
3194 abort ();
3196 PUT_MODE (result_rtl, tmpmode);
3199 if (GET_MODE_SIZE (tmpmode) < GET_MODE_SIZE (word_mode))
3200 result_reg_mode = word_mode;
3201 else
3202 result_reg_mode = tmpmode;
3203 result_reg = gen_reg_rtx (result_reg_mode);
3205 emit_queue ();
3206 for (i = 0; i < n_regs; i++)
3207 emit_move_insn (operand_subword (result_reg, i, 0, result_reg_mode),
3208 result_pseudos[i]);
3210 if (tmpmode != result_reg_mode)
3211 result_reg = gen_lowpart (tmpmode, result_reg);
3213 expand_value_return (result_reg);
3215 else if (retval_rhs != 0
3216 && !VOID_TYPE_P (TREE_TYPE (retval_rhs))
3217 && (GET_CODE (result_rtl) == REG
3218 || (GET_CODE (result_rtl) == PARALLEL)))
3220 /* Calculate the return value into a temporary (usually a pseudo
3221 reg). */
3222 tree ot = TREE_TYPE (DECL_RESULT (current_function_decl));
3223 tree nt = build_qualified_type (ot, TYPE_QUALS (ot) | TYPE_QUAL_CONST);
3225 val = assign_temp (nt, 0, 0, 1);
3226 val = expand_expr (retval_rhs, val, GET_MODE (val), 0);
3227 val = force_not_mem (val);
3228 emit_queue ();
3229 /* Return the calculated value, doing cleanups first. */
3230 expand_value_return (shift_return_value (val));
3232 else
3234 /* No cleanups or no hard reg used;
3235 calculate value into hard return reg. */
3236 expand_expr (retval, const0_rtx, VOIDmode, 0);
3237 emit_queue ();
3238 expand_value_return (result_rtl);
3242 /* Attempt to optimize a potential tail recursion call into a goto.
3243 ARGUMENTS are the arguments to a CALL_EXPR; LAST_INSN indicates
3244 where to place the jump to the tail recursion label.
3246 Return TRUE if the call was optimized into a goto. */
3249 optimize_tail_recursion (tree arguments, rtx last_insn)
3251 /* Finish checking validity, and if valid emit code to set the
3252 argument variables for the new call. */
3253 if (tail_recursion_args (arguments, DECL_ARGUMENTS (current_function_decl)))
3255 if (tail_recursion_label == 0)
3257 tail_recursion_label = gen_label_rtx ();
3258 emit_label_after (tail_recursion_label,
3259 tail_recursion_reentry);
3261 emit_queue ();
3262 expand_goto_internal (NULL_TREE, tail_recursion_label, last_insn);
3263 emit_barrier ();
3264 return 1;
3266 return 0;
3269 /* Emit code to alter this function's formal parms for a tail-recursive call.
3270 ACTUALS is a list of actual parameter expressions (chain of TREE_LISTs).
3271 FORMALS is the chain of decls of formals.
3272 Return 1 if this can be done;
3273 otherwise return 0 and do not emit any code. */
3275 static int
3276 tail_recursion_args (tree actuals, tree formals)
3278 tree a = actuals, f = formals;
3279 int i;
3280 rtx *argvec;
3282 /* Check that number and types of actuals are compatible
3283 with the formals. This is not always true in valid C code.
3284 Also check that no formal needs to be addressable
3285 and that all formals are scalars. */
3287 /* Also count the args. */
3289 for (a = actuals, f = formals, i = 0; a && f; a = TREE_CHAIN (a), f = TREE_CHAIN (f), i++)
3291 if (!lang_hooks.types_compatible_p (TREE_TYPE (TREE_VALUE (a)),
3292 TREE_TYPE (f)))
3293 return 0;
3294 if (GET_CODE (DECL_RTL (f)) != REG || DECL_MODE (f) == BLKmode)
3295 return 0;
3297 if (a != 0 || f != 0)
3298 return 0;
3300 /* Compute all the actuals. */
3302 argvec = alloca (i * sizeof (rtx));
3304 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
3305 argvec[i] = expand_expr (TREE_VALUE (a), NULL_RTX, VOIDmode, 0);
3307 /* Find which actual values refer to current values of previous formals.
3308 Copy each of them now, before any formal is changed. */
3310 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
3312 int copy = 0;
3313 int j;
3314 for (f = formals, j = 0; j < i; f = TREE_CHAIN (f), j++)
3315 if (reg_mentioned_p (DECL_RTL (f), argvec[i]))
3317 copy = 1;
3318 break;
3320 if (copy)
3321 argvec[i] = copy_to_reg (argvec[i]);
3324 /* Store the values of the actuals into the formals. */
3326 for (f = formals, a = actuals, i = 0; f;
3327 f = TREE_CHAIN (f), a = TREE_CHAIN (a), i++)
3329 if (GET_MODE (DECL_RTL (f)) == GET_MODE (argvec[i]))
3330 emit_move_insn (DECL_RTL (f), argvec[i]);
3331 else
3333 rtx tmp = argvec[i];
3334 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (TREE_VALUE (a)));
3335 promote_mode(TREE_TYPE (TREE_VALUE (a)), GET_MODE (tmp),
3336 &unsignedp, 0);
3337 if (DECL_MODE (f) != GET_MODE (DECL_RTL (f)))
3339 tmp = gen_reg_rtx (DECL_MODE (f));
3340 convert_move (tmp, argvec[i], unsignedp);
3342 convert_move (DECL_RTL (f), tmp, unsignedp);
3346 free_temp_slots ();
3347 return 1;
3350 /* Generate the RTL code for entering a binding contour.
3351 The variables are declared one by one, by calls to `expand_decl'.
3353 FLAGS is a bitwise or of the following flags:
3355 1 - Nonzero if this construct should be visible to
3356 `exit_something'.
3358 2 - Nonzero if this contour does not require a
3359 NOTE_INSN_BLOCK_BEG note. Virtually all calls from
3360 language-independent code should set this flag because they
3361 will not create corresponding BLOCK nodes. (There should be
3362 a one-to-one correspondence between NOTE_INSN_BLOCK_BEG notes
3363 and BLOCKs.) If this flag is set, MARK_ENDS should be zero
3364 when expand_end_bindings is called.
3366 If we are creating a NOTE_INSN_BLOCK_BEG note, a BLOCK may
3367 optionally be supplied. If so, it becomes the NOTE_BLOCK for the
3368 note. */
3370 void
3371 expand_start_bindings_and_block (int flags, tree block)
3373 struct nesting *thisblock = ALLOC_NESTING ();
3374 rtx note;
3375 int exit_flag = ((flags & 1) != 0);
3376 int block_flag = ((flags & 2) == 0);
3378 /* If a BLOCK is supplied, then the caller should be requesting a
3379 NOTE_INSN_BLOCK_BEG note. */
3380 if (!block_flag && block)
3381 abort ();
3383 /* Create a note to mark the beginning of the block. */
3384 if (block_flag && !cfun->dont_emit_block_notes)
3386 note = emit_note (NOTE_INSN_BLOCK_BEG);
3387 NOTE_BLOCK (note) = block;
3389 else
3390 note = emit_note (NOTE_INSN_DELETED);
3392 /* Make an entry on block_stack for the block we are entering. */
3394 thisblock->desc = BLOCK_NESTING;
3395 thisblock->next = block_stack;
3396 thisblock->all = nesting_stack;
3397 thisblock->depth = ++nesting_depth;
3398 thisblock->data.block.stack_level = 0;
3399 thisblock->data.block.cleanups = 0;
3400 thisblock->data.block.exception_region = 0;
3401 thisblock->data.block.block_target_temp_slot_level = target_temp_slot_level;
3403 thisblock->data.block.conditional_code = 0;
3404 thisblock->data.block.last_unconditional_cleanup = note;
3405 /* When we insert instructions after the last unconditional cleanup,
3406 we don't adjust last_insn. That means that a later add_insn will
3407 clobber the instructions we've just added. The easiest way to
3408 fix this is to just insert another instruction here, so that the
3409 instructions inserted after the last unconditional cleanup are
3410 never the last instruction. */
3411 emit_note (NOTE_INSN_DELETED);
3413 if (block_stack
3414 && !(block_stack->data.block.cleanups == NULL_TREE
3415 && block_stack->data.block.outer_cleanups == NULL_TREE))
3416 thisblock->data.block.outer_cleanups
3417 = tree_cons (NULL_TREE, block_stack->data.block.cleanups,
3418 block_stack->data.block.outer_cleanups);
3419 else
3420 thisblock->data.block.outer_cleanups = 0;
3421 thisblock->data.block.label_chain = 0;
3422 thisblock->data.block.innermost_stack_block = stack_block_stack;
3423 thisblock->data.block.first_insn = note;
3424 thisblock->data.block.block_start_count = ++current_block_start_count;
3425 thisblock->exit_label = exit_flag ? gen_label_rtx () : 0;
3426 block_stack = thisblock;
3427 nesting_stack = thisblock;
3429 /* Make a new level for allocating stack slots. */
3430 push_temp_slots ();
3433 /* Specify the scope of temporaries created by TARGET_EXPRs. Similar
3434 to CLEANUP_POINT_EXPR, but handles cases when a series of calls to
3435 expand_expr are made. After we end the region, we know that all
3436 space for all temporaries that were created by TARGET_EXPRs will be
3437 destroyed and their space freed for reuse. */
3439 void
3440 expand_start_target_temps (void)
3442 /* This is so that even if the result is preserved, the space
3443 allocated will be freed, as we know that it is no longer in use. */
3444 push_temp_slots ();
3446 /* Start a new binding layer that will keep track of all cleanup
3447 actions to be performed. */
3448 expand_start_bindings (2);
3450 target_temp_slot_level = temp_slot_level;
3453 void
3454 expand_end_target_temps (void)
3456 expand_end_bindings (NULL_TREE, 0, 0);
3458 /* This is so that even if the result is preserved, the space
3459 allocated will be freed, as we know that it is no longer in use. */
3460 pop_temp_slots ();
3463 /* Given a pointer to a BLOCK node return nonzero if (and only if) the node
3464 in question represents the outermost pair of curly braces (i.e. the "body
3465 block") of a function or method.
3467 For any BLOCK node representing a "body block" of a function or method, the
3468 BLOCK_SUPERCONTEXT of the node will point to another BLOCK node which
3469 represents the outermost (function) scope for the function or method (i.e.
3470 the one which includes the formal parameters). The BLOCK_SUPERCONTEXT of
3471 *that* node in turn will point to the relevant FUNCTION_DECL node. */
3474 is_body_block (tree stmt)
3476 if (lang_hooks.no_body_blocks)
3477 return 0;
3479 if (TREE_CODE (stmt) == BLOCK)
3481 tree parent = BLOCK_SUPERCONTEXT (stmt);
3483 if (parent && TREE_CODE (parent) == BLOCK)
3485 tree grandparent = BLOCK_SUPERCONTEXT (parent);
3487 if (grandparent && TREE_CODE (grandparent) == FUNCTION_DECL)
3488 return 1;
3492 return 0;
3495 /* True if we are currently emitting insns in an area of output code
3496 that is controlled by a conditional expression. This is used by
3497 the cleanup handling code to generate conditional cleanup actions. */
3500 conditional_context (void)
3502 return block_stack && block_stack->data.block.conditional_code;
3505 /* Return an opaque pointer to the current nesting level, so frontend code
3506 can check its own sanity. */
3508 struct nesting *
3509 current_nesting_level (void)
3511 return cfun ? block_stack : 0;
3514 /* Emit code to restore vital registers at the beginning of a nonlocal goto
3515 handler. */
3516 static void
3517 expand_nl_goto_receiver (void)
3519 /* Clobber the FP when we get here, so we have to make sure it's
3520 marked as used by this function. */
3521 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
3523 /* Mark the static chain as clobbered here so life information
3524 doesn't get messed up for it. */
3525 emit_insn (gen_rtx_CLOBBER (VOIDmode, static_chain_rtx));
3527 #ifdef HAVE_nonlocal_goto
3528 if (! HAVE_nonlocal_goto)
3529 #endif
3530 /* First adjust our frame pointer to its actual value. It was
3531 previously set to the start of the virtual area corresponding to
3532 the stacked variables when we branched here and now needs to be
3533 adjusted to the actual hardware fp value.
3535 Assignments are to virtual registers are converted by
3536 instantiate_virtual_regs into the corresponding assignment
3537 to the underlying register (fp in this case) that makes
3538 the original assignment true.
3539 So the following insn will actually be
3540 decrementing fp by STARTING_FRAME_OFFSET. */
3541 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
3543 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3544 if (fixed_regs[ARG_POINTER_REGNUM])
3546 #ifdef ELIMINABLE_REGS
3547 /* If the argument pointer can be eliminated in favor of the
3548 frame pointer, we don't need to restore it. We assume here
3549 that if such an elimination is present, it can always be used.
3550 This is the case on all known machines; if we don't make this
3551 assumption, we do unnecessary saving on many machines. */
3552 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
3553 size_t i;
3555 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
3556 if (elim_regs[i].from == ARG_POINTER_REGNUM
3557 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
3558 break;
3560 if (i == ARRAY_SIZE (elim_regs))
3561 #endif
3563 /* Now restore our arg pointer from the address at which it
3564 was saved in our stack frame. */
3565 emit_move_insn (virtual_incoming_args_rtx,
3566 copy_to_reg (get_arg_pointer_save_area (cfun)));
3569 #endif
3571 #ifdef HAVE_nonlocal_goto_receiver
3572 if (HAVE_nonlocal_goto_receiver)
3573 emit_insn (gen_nonlocal_goto_receiver ());
3574 #endif
3576 /* @@@ This is a kludge. Not all machine descriptions define a blockage
3577 insn, but we must not allow the code we just generated to be reordered
3578 by scheduling. Specifically, the update of the frame pointer must
3579 happen immediately, not later. So emit an ASM_INPUT to act as blockage
3580 insn. */
3581 emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
3584 /* Warn about any unused VARS (which may contain nodes other than
3585 VAR_DECLs, but such nodes are ignored). The nodes are connected
3586 via the TREE_CHAIN field. */
3588 void
3589 warn_about_unused_variables (tree vars)
3591 tree decl;
3593 if (warn_unused_variable)
3594 for (decl = vars; decl; decl = TREE_CHAIN (decl))
3595 if (TREE_CODE (decl) == VAR_DECL
3596 && ! TREE_USED (decl)
3597 && ! DECL_IN_SYSTEM_HEADER (decl)
3598 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
3599 warning ("%Junused variable '%D'", decl, decl);
3602 /* Generate RTL code to terminate a binding contour.
3604 VARS is the chain of VAR_DECL nodes for the variables bound in this
3605 contour. There may actually be other nodes in this chain, but any
3606 nodes other than VAR_DECLS are ignored.
3608 MARK_ENDS is nonzero if we should put a note at the beginning
3609 and end of this binding contour.
3611 DONT_JUMP_IN is positive if it is not valid to jump into this contour,
3612 zero if we can jump into this contour only if it does not have a saved
3613 stack level, and negative if we are not to check for invalid use of
3614 labels (because the front end does that). */
3616 void
3617 expand_end_bindings (tree vars, int mark_ends, int dont_jump_in)
3619 struct nesting *thisblock = block_stack;
3621 /* If any of the variables in this scope were not used, warn the
3622 user. */
3623 warn_about_unused_variables (vars);
3625 if (thisblock->exit_label)
3627 do_pending_stack_adjust ();
3628 emit_label (thisblock->exit_label);
3631 /* Don't allow jumping into a block that has a stack level.
3632 Cleanups are allowed, though. */
3633 if (dont_jump_in > 0
3634 || (dont_jump_in == 0 && thisblock->data.block.stack_level != 0))
3636 struct label_chain *chain;
3638 /* Any labels in this block are no longer valid to go to.
3639 Mark them to cause an error message. */
3640 for (chain = thisblock->data.block.label_chain; chain; chain = chain->next)
3642 DECL_TOO_LATE (chain->label) = 1;
3643 /* If any goto without a fixup came to this label,
3644 that must be an error, because gotos without fixups
3645 come from outside all saved stack-levels. */
3646 if (TREE_ADDRESSABLE (chain->label))
3647 error ("%Jlabel '%D' used before containing binding contour",
3648 chain->label, chain->label);
3652 /* Restore stack level in effect before the block
3653 (only if variable-size objects allocated). */
3654 /* Perform any cleanups associated with the block. */
3656 if (thisblock->data.block.stack_level != 0
3657 || thisblock->data.block.cleanups != 0)
3659 int reachable;
3660 rtx insn;
3662 /* Don't let cleanups affect ({...}) constructs. */
3663 int old_expr_stmts_for_value = expr_stmts_for_value;
3664 rtx old_last_expr_value = last_expr_value;
3665 rtx old_last_expr_alt_rtl = last_expr_alt_rtl;
3666 tree old_last_expr_type = last_expr_type;
3667 expr_stmts_for_value = 0;
3669 /* Only clean up here if this point can actually be reached. */
3670 insn = get_last_insn ();
3671 if (GET_CODE (insn) == NOTE)
3672 insn = prev_nonnote_insn (insn);
3673 reachable = (! insn || GET_CODE (insn) != BARRIER);
3675 /* Do the cleanups. */
3676 expand_cleanups (thisblock->data.block.cleanups, 0, reachable);
3677 if (reachable)
3678 do_pending_stack_adjust ();
3680 expr_stmts_for_value = old_expr_stmts_for_value;
3681 last_expr_value = old_last_expr_value;
3682 last_expr_alt_rtl = old_last_expr_alt_rtl;
3683 last_expr_type = old_last_expr_type;
3685 /* Restore the stack level. */
3687 if (reachable && thisblock->data.block.stack_level != 0)
3689 emit_stack_restore (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3690 thisblock->data.block.stack_level, NULL_RTX);
3691 if (cfun->nonlocal_goto_save_area)
3692 update_nonlocal_goto_save_area ();
3695 /* Any gotos out of this block must also do these things.
3696 Also report any gotos with fixups that came to labels in this
3697 level. */
3698 fixup_gotos (thisblock,
3699 thisblock->data.block.stack_level,
3700 thisblock->data.block.cleanups,
3701 thisblock->data.block.first_insn,
3702 dont_jump_in);
3705 /* Mark the beginning and end of the scope if requested.
3706 We do this now, after running cleanups on the variables
3707 just going out of scope, so they are in scope for their cleanups. */
3709 if (mark_ends && !cfun->dont_emit_block_notes)
3711 rtx note = emit_note (NOTE_INSN_BLOCK_END);
3712 NOTE_BLOCK (note) = NOTE_BLOCK (thisblock->data.block.first_insn);
3714 else
3715 /* Get rid of the beginning-mark if we don't make an end-mark. */
3716 NOTE_LINE_NUMBER (thisblock->data.block.first_insn) = NOTE_INSN_DELETED;
3718 /* Restore the temporary level of TARGET_EXPRs. */
3719 target_temp_slot_level = thisblock->data.block.block_target_temp_slot_level;
3721 /* Restore block_stack level for containing block. */
3723 stack_block_stack = thisblock->data.block.innermost_stack_block;
3724 POPSTACK (block_stack);
3726 /* Pop the stack slot nesting and free any slots at this level. */
3727 pop_temp_slots ();
3730 /* Generate code to save the stack pointer at the start of the current block
3731 and set up to restore it on exit. */
3733 void
3734 save_stack_pointer (void)
3736 struct nesting *thisblock = block_stack;
3738 if (thisblock->data.block.stack_level == 0)
3740 emit_stack_save (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3741 &thisblock->data.block.stack_level,
3742 thisblock->data.block.first_insn);
3743 stack_block_stack = thisblock;
3747 /* Generate RTL for the automatic variable declaration DECL.
3748 (Other kinds of declarations are simply ignored if seen here.) */
3750 void
3751 expand_decl (tree decl)
3753 tree type;
3755 type = TREE_TYPE (decl);
3757 /* For a CONST_DECL, set mode, alignment, and sizes from those of the
3758 type in case this node is used in a reference. */
3759 if (TREE_CODE (decl) == CONST_DECL)
3761 DECL_MODE (decl) = TYPE_MODE (type);
3762 DECL_ALIGN (decl) = TYPE_ALIGN (type);
3763 DECL_SIZE (decl) = TYPE_SIZE (type);
3764 DECL_SIZE_UNIT (decl) = TYPE_SIZE_UNIT (type);
3765 return;
3768 /* Otherwise, only automatic variables need any expansion done. Static and
3769 external variables, and external functions, will be handled by
3770 `assemble_variable' (called from finish_decl). TYPE_DECL requires
3771 nothing. PARM_DECLs are handled in `assign_parms'. */
3772 if (TREE_CODE (decl) != VAR_DECL)
3773 return;
3775 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
3776 return;
3778 /* Create the RTL representation for the variable. */
3780 if (type == error_mark_node)
3781 SET_DECL_RTL (decl, gen_rtx_MEM (BLKmode, const0_rtx));
3783 else if (DECL_SIZE (decl) == 0)
3784 /* Variable with incomplete type. */
3786 rtx x;
3787 if (DECL_INITIAL (decl) == 0)
3788 /* Error message was already done; now avoid a crash. */
3789 x = gen_rtx_MEM (BLKmode, const0_rtx);
3790 else
3791 /* An initializer is going to decide the size of this array.
3792 Until we know the size, represent its address with a reg. */
3793 x = gen_rtx_MEM (BLKmode, gen_reg_rtx (Pmode));
3795 set_mem_attributes (x, decl, 1);
3796 SET_DECL_RTL (decl, x);
3798 else if (DECL_MODE (decl) != BLKmode
3799 /* If -ffloat-store, don't put explicit float vars
3800 into regs. */
3801 && !(flag_float_store
3802 && TREE_CODE (type) == REAL_TYPE)
3803 && ! TREE_THIS_VOLATILE (decl)
3804 && ! DECL_NONLOCAL (decl)
3805 && (DECL_REGISTER (decl) || DECL_ARTIFICIAL (decl) || optimize))
3807 /* Automatic variable that can go in a register. */
3808 int unsignedp = TYPE_UNSIGNED (type);
3809 enum machine_mode reg_mode
3810 = promote_mode (type, DECL_MODE (decl), &unsignedp, 0);
3812 SET_DECL_RTL (decl, gen_reg_rtx (reg_mode));
3814 if (!DECL_ARTIFICIAL (decl))
3815 mark_user_reg (DECL_RTL (decl));
3817 if (POINTER_TYPE_P (type))
3818 mark_reg_pointer (DECL_RTL (decl),
3819 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (decl))));
3821 maybe_set_unchanging (DECL_RTL (decl), decl);
3823 /* If something wants our address, try to use ADDRESSOF. */
3824 if (TREE_ADDRESSABLE (decl))
3825 put_var_into_stack (decl, /*rescan=*/false);
3828 else if (TREE_CODE (DECL_SIZE_UNIT (decl)) == INTEGER_CST
3829 && ! (flag_stack_check && ! STACK_CHECK_BUILTIN
3830 && 0 < compare_tree_int (DECL_SIZE_UNIT (decl),
3831 STACK_CHECK_MAX_VAR_SIZE)))
3833 /* Variable of fixed size that goes on the stack. */
3834 rtx oldaddr = 0;
3835 rtx addr;
3836 rtx x;
3838 /* If we previously made RTL for this decl, it must be an array
3839 whose size was determined by the initializer.
3840 The old address was a register; set that register now
3841 to the proper address. */
3842 if (DECL_RTL_SET_P (decl))
3844 if (GET_CODE (DECL_RTL (decl)) != MEM
3845 || GET_CODE (XEXP (DECL_RTL (decl), 0)) != REG)
3846 abort ();
3847 oldaddr = XEXP (DECL_RTL (decl), 0);
3850 /* Set alignment we actually gave this decl. */
3851 DECL_ALIGN (decl) = (DECL_MODE (decl) == BLKmode ? BIGGEST_ALIGNMENT
3852 : GET_MODE_BITSIZE (DECL_MODE (decl)));
3853 DECL_USER_ALIGN (decl) = 0;
3855 x = assign_temp (decl, 1, 1, 1);
3856 set_mem_attributes (x, decl, 1);
3857 SET_DECL_RTL (decl, x);
3859 if (oldaddr)
3861 addr = force_operand (XEXP (DECL_RTL (decl), 0), oldaddr);
3862 if (addr != oldaddr)
3863 emit_move_insn (oldaddr, addr);
3866 else
3867 /* Dynamic-size object: must push space on the stack. */
3869 rtx address, size, x;
3871 /* Record the stack pointer on entry to block, if have
3872 not already done so. */
3873 do_pending_stack_adjust ();
3874 save_stack_pointer ();
3876 /* Compute the variable's size, in bytes. This will expand any
3877 needed SAVE_EXPRs for the first time. */
3878 size = expand_expr (DECL_SIZE_UNIT (decl), NULL_RTX, VOIDmode, 0);
3879 free_temp_slots ();
3881 /* Allocate space on the stack for the variable. Note that
3882 DECL_ALIGN says how the variable is to be aligned and we
3883 cannot use it to conclude anything about the alignment of
3884 the size. */
3885 address = allocate_dynamic_stack_space (size, NULL_RTX,
3886 TYPE_ALIGN (TREE_TYPE (decl)));
3888 /* Reference the variable indirect through that rtx. */
3889 x = gen_rtx_MEM (DECL_MODE (decl), address);
3890 set_mem_attributes (x, decl, 1);
3891 SET_DECL_RTL (decl, x);
3894 /* Indicate the alignment we actually gave this variable. */
3895 #ifdef STACK_BOUNDARY
3896 DECL_ALIGN (decl) = STACK_BOUNDARY;
3897 #else
3898 DECL_ALIGN (decl) = BIGGEST_ALIGNMENT;
3899 #endif
3900 DECL_USER_ALIGN (decl) = 0;
3904 /* Emit code to allocate T_SIZE bytes of dynamic stack space for ALLOC. */
3905 void
3906 expand_stack_alloc (tree alloc, tree t_size)
3908 rtx address, dest, size;
3909 tree var, type;
3911 if (TREE_CODE (alloc) != ADDR_EXPR)
3912 abort ();
3913 var = TREE_OPERAND (alloc, 0);
3914 if (TREE_CODE (var) != VAR_DECL)
3915 abort ();
3917 type = TREE_TYPE (var);
3919 /* In function-at-a-time mode, variable_size doesn't expand this,
3920 so do it now. */
3921 if (TREE_CODE (type) == ARRAY_TYPE && TYPE_DOMAIN (type))
3922 expand_expr (TYPE_MAX_VALUE (TYPE_DOMAIN (type)),
3923 const0_rtx, VOIDmode, 0);
3925 /* Compute the variable's size, in bytes. */
3926 size = expand_expr (t_size, NULL_RTX, VOIDmode, 0);
3927 free_temp_slots ();
3929 /* Allocate space on the stack for the variable. */
3930 address = XEXP (DECL_RTL (var), 0);
3931 dest = allocate_dynamic_stack_space (size, address, TYPE_ALIGN (type));
3932 if (dest != address)
3933 emit_move_insn (address, dest);
3935 /* Indicate the alignment we actually gave this variable. */
3936 #ifdef STACK_BOUNDARY
3937 DECL_ALIGN (var) = STACK_BOUNDARY;
3938 #else
3939 DECL_ALIGN (var) = BIGGEST_ALIGNMENT;
3940 #endif
3941 DECL_USER_ALIGN (var) = 0;
3944 /* Emit code to save the current value of stack. */
3946 expand_stack_save (void)
3948 rtx ret = NULL_RTX;
3950 do_pending_stack_adjust ();
3951 emit_stack_save (SAVE_BLOCK, &ret, NULL_RTX);
3952 return ret;
3955 /* Emit code to restore the current value of stack. */
3956 void
3957 expand_stack_restore (tree var)
3959 rtx sa = DECL_RTL (var);
3961 emit_stack_restore (SAVE_BLOCK, sa, NULL_RTX);
3964 /* Emit code to perform the initialization of a declaration DECL. */
3966 void
3967 expand_decl_init (tree decl)
3969 int was_used = TREE_USED (decl);
3971 /* If this is a CONST_DECL, we don't have to generate any code. Likewise
3972 for static decls. */
3973 if (TREE_CODE (decl) == CONST_DECL
3974 || TREE_STATIC (decl))
3975 return;
3977 /* Compute and store the initial value now. */
3979 push_temp_slots ();
3981 if (DECL_INITIAL (decl) == error_mark_node)
3983 enum tree_code code = TREE_CODE (TREE_TYPE (decl));
3985 if (code == INTEGER_TYPE || code == REAL_TYPE || code == ENUMERAL_TYPE
3986 || code == POINTER_TYPE || code == REFERENCE_TYPE)
3987 expand_assignment (decl, convert (TREE_TYPE (decl), integer_zero_node),
3989 emit_queue ();
3991 else if (DECL_INITIAL (decl) && TREE_CODE (DECL_INITIAL (decl)) != TREE_LIST)
3993 emit_line_note (DECL_SOURCE_LOCATION (decl));
3994 expand_assignment (decl, DECL_INITIAL (decl), 0);
3995 emit_queue ();
3998 /* Don't let the initialization count as "using" the variable. */
3999 TREE_USED (decl) = was_used;
4001 /* Free any temporaries we made while initializing the decl. */
4002 preserve_temp_slots (NULL_RTX);
4003 free_temp_slots ();
4004 pop_temp_slots ();
4007 /* CLEANUP is an expression to be executed at exit from this binding contour;
4008 for example, in C++, it might call the destructor for this variable.
4010 We wrap CLEANUP in an UNSAVE_EXPR node, so that we can expand the
4011 CLEANUP multiple times, and have the correct semantics. This
4012 happens in exception handling, for gotos, returns, breaks that
4013 leave the current scope.
4015 If CLEANUP is nonzero and DECL is zero, we record a cleanup
4016 that is not associated with any particular variable. */
4019 expand_decl_cleanup (tree decl, tree cleanup)
4021 struct nesting *thisblock;
4023 /* Error if we are not in any block. */
4024 if (cfun == 0 || block_stack == 0)
4025 return 0;
4027 thisblock = block_stack;
4029 /* Record the cleanup if there is one. */
4031 if (cleanup != 0)
4033 tree t;
4034 rtx seq;
4035 tree *cleanups = &thisblock->data.block.cleanups;
4036 int cond_context = conditional_context ();
4038 if (cond_context)
4040 rtx flag = gen_reg_rtx (word_mode);
4041 rtx set_flag_0;
4042 tree cond;
4044 start_sequence ();
4045 emit_move_insn (flag, const0_rtx);
4046 set_flag_0 = get_insns ();
4047 end_sequence ();
4049 thisblock->data.block.last_unconditional_cleanup
4050 = emit_insn_after (set_flag_0,
4051 thisblock->data.block.last_unconditional_cleanup);
4053 emit_move_insn (flag, const1_rtx);
4055 cond = build_decl (VAR_DECL, NULL_TREE,
4056 lang_hooks.types.type_for_mode (word_mode, 1));
4057 SET_DECL_RTL (cond, flag);
4059 /* Conditionalize the cleanup. */
4060 cleanup = build (COND_EXPR, void_type_node,
4061 lang_hooks.truthvalue_conversion (cond),
4062 cleanup, integer_zero_node);
4063 cleanup = fold (cleanup);
4065 cleanups = &thisblock->data.block.cleanups;
4068 cleanup = unsave_expr (cleanup);
4070 t = *cleanups = tree_cons (decl, cleanup, *cleanups);
4072 if (! cond_context)
4073 /* If this block has a cleanup, it belongs in stack_block_stack. */
4074 stack_block_stack = thisblock;
4076 if (cond_context)
4078 start_sequence ();
4081 if (! using_eh_for_cleanups_p)
4082 TREE_ADDRESSABLE (t) = 1;
4083 else
4084 expand_eh_region_start ();
4086 if (cond_context)
4088 seq = get_insns ();
4089 end_sequence ();
4090 if (seq)
4091 thisblock->data.block.last_unconditional_cleanup
4092 = emit_insn_after (seq,
4093 thisblock->data.block.last_unconditional_cleanup);
4095 else
4097 thisblock->data.block.last_unconditional_cleanup
4098 = get_last_insn ();
4099 /* When we insert instructions after the last unconditional cleanup,
4100 we don't adjust last_insn. That means that a later add_insn will
4101 clobber the instructions we've just added. The easiest way to
4102 fix this is to just insert another instruction here, so that the
4103 instructions inserted after the last unconditional cleanup are
4104 never the last instruction. */
4105 emit_note (NOTE_INSN_DELETED);
4108 return 1;
4111 /* Like expand_decl_cleanup, but maybe only run the cleanup if an exception
4112 is thrown. */
4115 expand_decl_cleanup_eh (tree decl, tree cleanup, int eh_only)
4117 int ret = expand_decl_cleanup (decl, cleanup);
4118 if (cleanup && ret)
4120 tree node = block_stack->data.block.cleanups;
4121 CLEANUP_EH_ONLY (node) = eh_only;
4123 return ret;
4126 /* DECL is an anonymous union. CLEANUP is a cleanup for DECL.
4127 DECL_ELTS is the list of elements that belong to DECL's type.
4128 In each, the TREE_VALUE is a VAR_DECL, and the TREE_PURPOSE a cleanup. */
4130 void
4131 expand_anon_union_decl (tree decl, tree cleanup, tree decl_elts)
4133 struct nesting *thisblock = cfun == 0 ? 0 : block_stack;
4134 rtx x;
4135 tree t;
4137 /* If any of the elements are addressable, so is the entire union. */
4138 for (t = decl_elts; t; t = TREE_CHAIN (t))
4139 if (TREE_ADDRESSABLE (TREE_VALUE (t)))
4141 TREE_ADDRESSABLE (decl) = 1;
4142 break;
4145 expand_decl (decl);
4146 expand_decl_cleanup (decl, cleanup);
4147 x = DECL_RTL (decl);
4149 /* Go through the elements, assigning RTL to each. */
4150 for (t = decl_elts; t; t = TREE_CHAIN (t))
4152 tree decl_elt = TREE_VALUE (t);
4153 tree cleanup_elt = TREE_PURPOSE (t);
4154 enum machine_mode mode = TYPE_MODE (TREE_TYPE (decl_elt));
4156 /* If any of the elements are addressable, so is the entire
4157 union. */
4158 if (TREE_USED (decl_elt))
4159 TREE_USED (decl) = 1;
4161 /* Propagate the union's alignment to the elements. */
4162 DECL_ALIGN (decl_elt) = DECL_ALIGN (decl);
4163 DECL_USER_ALIGN (decl_elt) = DECL_USER_ALIGN (decl);
4165 /* If the element has BLKmode and the union doesn't, the union is
4166 aligned such that the element doesn't need to have BLKmode, so
4167 change the element's mode to the appropriate one for its size. */
4168 if (mode == BLKmode && DECL_MODE (decl) != BLKmode)
4169 DECL_MODE (decl_elt) = mode
4170 = mode_for_size_tree (DECL_SIZE (decl_elt), MODE_INT, 1);
4172 /* (SUBREG (MEM ...)) at RTL generation time is invalid, so we
4173 instead create a new MEM rtx with the proper mode. */
4174 if (GET_CODE (x) == MEM)
4176 if (mode == GET_MODE (x))
4177 SET_DECL_RTL (decl_elt, x);
4178 else
4179 SET_DECL_RTL (decl_elt, adjust_address_nv (x, mode, 0));
4181 else if (GET_CODE (x) == REG)
4183 if (mode == GET_MODE (x))
4184 SET_DECL_RTL (decl_elt, x);
4185 else
4186 SET_DECL_RTL (decl_elt, gen_lowpart_SUBREG (mode, x));
4188 else
4189 abort ();
4191 /* Record the cleanup if there is one. */
4193 if (cleanup != 0)
4194 thisblock->data.block.cleanups
4195 = tree_cons (decl_elt, cleanup_elt,
4196 thisblock->data.block.cleanups);
4200 /* Expand a list of cleanups LIST.
4201 Elements may be expressions or may be nested lists.
4203 If IN_FIXUP is nonzero, we are generating this cleanup for a fixup
4204 goto and handle protection regions specially in that case.
4206 If REACHABLE, we emit code, otherwise just inform the exception handling
4207 code about this finalization. */
4209 static void
4210 expand_cleanups (tree list, int in_fixup, int reachable)
4212 tree tail;
4213 for (tail = list; tail; tail = TREE_CHAIN (tail))
4214 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
4215 expand_cleanups (TREE_VALUE (tail), in_fixup, reachable);
4216 else
4218 if (! in_fixup && using_eh_for_cleanups_p)
4219 expand_eh_region_end_cleanup (TREE_VALUE (tail));
4221 if (reachable && !CLEANUP_EH_ONLY (tail))
4223 /* Cleanups may be run multiple times. For example,
4224 when exiting a binding contour, we expand the
4225 cleanups associated with that contour. When a goto
4226 within that binding contour has a target outside that
4227 contour, it will expand all cleanups from its scope to
4228 the target. Though the cleanups are expanded multiple
4229 times, the control paths are non-overlapping so the
4230 cleanups will not be executed twice. */
4232 /* We may need to protect from outer cleanups. */
4233 if (in_fixup && using_eh_for_cleanups_p)
4235 expand_eh_region_start ();
4237 expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
4239 expand_eh_region_end_fixup (TREE_VALUE (tail));
4241 else
4242 expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
4244 free_temp_slots ();
4249 /* Mark when the context we are emitting RTL for as a conditional
4250 context, so that any cleanup actions we register with
4251 expand_decl_init will be properly conditionalized when those
4252 cleanup actions are later performed. Must be called before any
4253 expression (tree) is expanded that is within a conditional context. */
4255 void
4256 start_cleanup_deferral (void)
4258 /* block_stack can be NULL if we are inside the parameter list. It is
4259 OK to do nothing, because cleanups aren't possible here. */
4260 if (block_stack)
4261 ++block_stack->data.block.conditional_code;
4264 /* Mark the end of a conditional region of code. Because cleanup
4265 deferrals may be nested, we may still be in a conditional region
4266 after we end the currently deferred cleanups, only after we end all
4267 deferred cleanups, are we back in unconditional code. */
4269 void
4270 end_cleanup_deferral (void)
4272 /* block_stack can be NULL if we are inside the parameter list. It is
4273 OK to do nothing, because cleanups aren't possible here. */
4274 if (block_stack)
4275 --block_stack->data.block.conditional_code;
4278 tree
4279 last_cleanup_this_contour (void)
4281 if (block_stack == 0)
4282 return 0;
4284 return block_stack->data.block.cleanups;
4288 /* Return nonzero if any containing block has a stack level or
4289 cleanups. */
4292 containing_blocks_have_cleanups_or_stack_level (void)
4294 struct nesting *block;
4296 for (block = block_stack; block; block = block->next)
4297 if (block->data.block.stack_level != 0
4298 || block->data.block.cleanups != 0)
4299 return 1;
4301 return 0;
4304 /* Return 1 if there are any pending cleanups at this point.
4305 Check the current contour as well as contours that enclose
4306 the current contour. */
4309 any_pending_cleanups (void)
4311 struct nesting *block;
4313 if (cfun == NULL || cfun->stmt == NULL || block_stack == 0)
4314 return 0;
4316 if (block_stack->data.block.cleanups != NULL)
4317 return 1;
4319 if (block_stack->data.block.outer_cleanups == 0)
4320 return 0;
4322 for (block = block_stack->next; block; block = block->next)
4323 if (block->data.block.cleanups != 0)
4324 return 1;
4326 return 0;
4329 /* Enter a case (Pascal) or switch (C) statement.
4330 Push a block onto case_stack and nesting_stack
4331 to accumulate the case-labels that are seen
4332 and to record the labels generated for the statement.
4334 EXIT_FLAG is nonzero if `exit_something' should exit this case stmt.
4335 Otherwise, this construct is transparent for `exit_something'.
4337 EXPR is the index-expression to be dispatched on.
4338 TYPE is its nominal type. We could simply convert EXPR to this type,
4339 but instead we take short cuts. */
4341 void
4342 expand_start_case (int exit_flag, tree expr, tree type,
4343 const char *printname)
4345 struct nesting *thiscase = ALLOC_NESTING ();
4347 /* Make an entry on case_stack for the case we are entering. */
4349 thiscase->desc = CASE_NESTING;
4350 thiscase->next = case_stack;
4351 thiscase->all = nesting_stack;
4352 thiscase->depth = ++nesting_depth;
4353 thiscase->exit_label = exit_flag ? gen_label_rtx () : 0;
4354 thiscase->data.case_stmt.case_list = 0;
4355 thiscase->data.case_stmt.index_expr = expr;
4356 thiscase->data.case_stmt.nominal_type = type;
4357 thiscase->data.case_stmt.default_label = 0;
4358 thiscase->data.case_stmt.printname = printname;
4359 thiscase->data.case_stmt.line_number_status = force_line_numbers ();
4360 case_stack = thiscase;
4361 nesting_stack = thiscase;
4363 do_pending_stack_adjust ();
4364 emit_queue ();
4366 /* Make sure case_stmt.start points to something that won't
4367 need any transformation before expand_end_case. */
4368 if (GET_CODE (get_last_insn ()) != NOTE)
4369 emit_note (NOTE_INSN_DELETED);
4371 thiscase->data.case_stmt.start = get_last_insn ();
4373 start_cleanup_deferral ();
4376 static void
4377 check_seenlabel (void)
4379 /* If this is the first label, warn if any insns have been emitted. */
4380 if (case_stack->data.case_stmt.line_number_status >= 0)
4382 rtx insn;
4384 restore_line_number_status
4385 (case_stack->data.case_stmt.line_number_status);
4386 case_stack->data.case_stmt.line_number_status = -1;
4388 for (insn = case_stack->data.case_stmt.start;
4389 insn;
4390 insn = NEXT_INSN (insn))
4392 if (GET_CODE (insn) == CODE_LABEL)
4393 break;
4394 if (GET_CODE (insn) != NOTE
4395 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn)) != USE))
4398 insn = PREV_INSN (insn);
4399 while (insn && (GET_CODE (insn) != NOTE || NOTE_LINE_NUMBER (insn) < 0));
4401 /* If insn is zero, then there must have been a syntax error. */
4402 if (insn)
4404 location_t locus;
4405 locus.file = NOTE_SOURCE_FILE (insn);
4406 locus.line = NOTE_LINE_NUMBER (insn);
4407 warning ("%Hunreachable code at beginning of %s", &locus,
4408 case_stack->data.case_stmt.printname);
4410 break;
4416 /* Accumulate one case or default label inside a case or switch statement.
4417 VALUE is the value of the case (a null pointer, for a default label).
4418 The function CONVERTER, when applied to arguments T and V,
4419 converts the value V to the type T.
4421 If not currently inside a case or switch statement, return 1 and do
4422 nothing. The caller will print a language-specific error message.
4423 If VALUE is a duplicate or overlaps, return 2 and do nothing
4424 except store the (first) duplicate node in *DUPLICATE.
4425 If VALUE is out of range, return 3 and do nothing.
4426 If we are jumping into the scope of a cleanup or var-sized array, return 5.
4427 Return 0 on success.
4429 Extended to handle range statements. */
4432 pushcase (tree value, tree (*converter) (tree, tree), tree label,
4433 tree *duplicate)
4435 tree index_type;
4436 tree nominal_type;
4438 /* Fail if not inside a real case statement. */
4439 if (! (case_stack && case_stack->data.case_stmt.start))
4440 return 1;
4442 if (stack_block_stack
4443 && stack_block_stack->depth > case_stack->depth)
4444 return 5;
4446 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4447 nominal_type = case_stack->data.case_stmt.nominal_type;
4449 /* If the index is erroneous, avoid more problems: pretend to succeed. */
4450 if (index_type == error_mark_node)
4451 return 0;
4453 /* Convert VALUE to the type in which the comparisons are nominally done. */
4454 if (value != 0)
4455 value = (*converter) (nominal_type, value);
4457 check_seenlabel ();
4459 /* Fail if this value is out of range for the actual type of the index
4460 (which may be narrower than NOMINAL_TYPE). */
4461 if (value != 0
4462 && (TREE_CONSTANT_OVERFLOW (value)
4463 || ! int_fits_type_p (value, index_type)))
4464 return 3;
4466 return add_case_node (value, value, label, duplicate, false);
4469 /* Like pushcase but this case applies to all values between VALUE1 and
4470 VALUE2 (inclusive). If VALUE1 is NULL, the range starts at the lowest
4471 value of the index type and ends at VALUE2. If VALUE2 is NULL, the range
4472 starts at VALUE1 and ends at the highest value of the index type.
4473 If both are NULL, this case applies to all values.
4475 The return value is the same as that of pushcase but there is one
4476 additional error code: 4 means the specified range was empty. */
4479 pushcase_range (tree value1, tree value2, tree (*converter) (tree, tree),
4480 tree label, tree *duplicate)
4482 tree index_type;
4483 tree nominal_type;
4485 /* Fail if not inside a real case statement. */
4486 if (! (case_stack && case_stack->data.case_stmt.start))
4487 return 1;
4489 if (stack_block_stack
4490 && stack_block_stack->depth > case_stack->depth)
4491 return 5;
4493 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4494 nominal_type = case_stack->data.case_stmt.nominal_type;
4496 /* If the index is erroneous, avoid more problems: pretend to succeed. */
4497 if (index_type == error_mark_node)
4498 return 0;
4500 check_seenlabel ();
4502 /* Convert VALUEs to type in which the comparisons are nominally done
4503 and replace any unspecified value with the corresponding bound. */
4504 if (value1 == 0)
4505 value1 = TYPE_MIN_VALUE (index_type);
4506 if (value2 == 0)
4507 value2 = TYPE_MAX_VALUE (index_type);
4509 /* Fail if the range is empty. Do this before any conversion since
4510 we want to allow out-of-range empty ranges. */
4511 if (value2 != 0 && tree_int_cst_lt (value2, value1))
4512 return 4;
4514 /* If the max was unbounded, use the max of the nominal_type we are
4515 converting to. Do this after the < check above to suppress false
4516 positives. */
4517 if (value2 == 0)
4518 value2 = TYPE_MAX_VALUE (nominal_type);
4520 value1 = (*converter) (nominal_type, value1);
4521 value2 = (*converter) (nominal_type, value2);
4523 /* Fail if these values are out of range. */
4524 if (TREE_CONSTANT_OVERFLOW (value1)
4525 || ! int_fits_type_p (value1, index_type))
4526 return 3;
4528 if (TREE_CONSTANT_OVERFLOW (value2)
4529 || ! int_fits_type_p (value2, index_type))
4530 return 3;
4532 return add_case_node (value1, value2, label, duplicate, false);
4535 /* Do the actual insertion of a case label for pushcase and pushcase_range
4536 into case_stack->data.case_stmt.case_list. Use an AVL tree to avoid
4537 slowdown for large switch statements. */
4540 add_case_node (tree low, tree high, tree label, tree *duplicate,
4541 bool dont_expand_label)
4543 struct case_node *p, **q, *r;
4545 /* If there's no HIGH value, then this is not a case range; it's
4546 just a simple case label. But that's just a degenerate case
4547 range. */
4548 if (!high)
4549 high = low;
4551 /* Handle default labels specially. */
4552 if (!high && !low)
4554 if (case_stack->data.case_stmt.default_label != 0)
4556 *duplicate = case_stack->data.case_stmt.default_label;
4557 return 2;
4559 case_stack->data.case_stmt.default_label = label;
4560 if (!dont_expand_label)
4561 expand_label (label);
4562 return 0;
4565 q = &case_stack->data.case_stmt.case_list;
4566 p = *q;
4568 while ((r = *q))
4570 p = r;
4572 /* Keep going past elements distinctly greater than HIGH. */
4573 if (tree_int_cst_lt (high, p->low))
4574 q = &p->left;
4576 /* or distinctly less than LOW. */
4577 else if (tree_int_cst_lt (p->high, low))
4578 q = &p->right;
4580 else
4582 /* We have an overlap; this is an error. */
4583 *duplicate = p->code_label;
4584 return 2;
4588 /* Add this label to the chain, and succeed. */
4590 r = ggc_alloc (sizeof (struct case_node));
4591 r->low = low;
4593 /* If the bounds are equal, turn this into the one-value case. */
4594 if (tree_int_cst_equal (low, high))
4595 r->high = r->low;
4596 else
4597 r->high = high;
4599 r->code_label = label;
4600 if (!dont_expand_label)
4601 expand_label (label);
4603 *q = r;
4604 r->parent = p;
4605 r->left = 0;
4606 r->right = 0;
4607 r->balance = 0;
4609 while (p)
4611 struct case_node *s;
4613 if (r == p->left)
4615 int b;
4617 if (! (b = p->balance))
4618 /* Growth propagation from left side. */
4619 p->balance = -1;
4620 else if (b < 0)
4622 if (r->balance < 0)
4624 /* R-Rotation */
4625 if ((p->left = s = r->right))
4626 s->parent = p;
4628 r->right = p;
4629 p->balance = 0;
4630 r->balance = 0;
4631 s = p->parent;
4632 p->parent = r;
4634 if ((r->parent = s))
4636 if (s->left == p)
4637 s->left = r;
4638 else
4639 s->right = r;
4641 else
4642 case_stack->data.case_stmt.case_list = r;
4644 else
4645 /* r->balance == +1 */
4647 /* LR-Rotation */
4649 int b2;
4650 struct case_node *t = r->right;
4652 if ((p->left = s = t->right))
4653 s->parent = p;
4655 t->right = p;
4656 if ((r->right = s = t->left))
4657 s->parent = r;
4659 t->left = r;
4660 b = t->balance;
4661 b2 = b < 0;
4662 p->balance = b2;
4663 b2 = -b2 - b;
4664 r->balance = b2;
4665 t->balance = 0;
4666 s = p->parent;
4667 p->parent = t;
4668 r->parent = t;
4670 if ((t->parent = s))
4672 if (s->left == p)
4673 s->left = t;
4674 else
4675 s->right = t;
4677 else
4678 case_stack->data.case_stmt.case_list = t;
4680 break;
4683 else
4685 /* p->balance == +1; growth of left side balances the node. */
4686 p->balance = 0;
4687 break;
4690 else
4691 /* r == p->right */
4693 int b;
4695 if (! (b = p->balance))
4696 /* Growth propagation from right side. */
4697 p->balance++;
4698 else if (b > 0)
4700 if (r->balance > 0)
4702 /* L-Rotation */
4704 if ((p->right = s = r->left))
4705 s->parent = p;
4707 r->left = p;
4708 p->balance = 0;
4709 r->balance = 0;
4710 s = p->parent;
4711 p->parent = r;
4712 if ((r->parent = s))
4714 if (s->left == p)
4715 s->left = r;
4716 else
4717 s->right = r;
4720 else
4721 case_stack->data.case_stmt.case_list = r;
4724 else
4725 /* r->balance == -1 */
4727 /* RL-Rotation */
4728 int b2;
4729 struct case_node *t = r->left;
4731 if ((p->right = s = t->left))
4732 s->parent = p;
4734 t->left = p;
4736 if ((r->left = s = t->right))
4737 s->parent = r;
4739 t->right = r;
4740 b = t->balance;
4741 b2 = b < 0;
4742 r->balance = b2;
4743 b2 = -b2 - b;
4744 p->balance = b2;
4745 t->balance = 0;
4746 s = p->parent;
4747 p->parent = t;
4748 r->parent = t;
4750 if ((t->parent = s))
4752 if (s->left == p)
4753 s->left = t;
4754 else
4755 s->right = t;
4758 else
4759 case_stack->data.case_stmt.case_list = t;
4761 break;
4763 else
4765 /* p->balance == -1; growth of right side balances the node. */
4766 p->balance = 0;
4767 break;
4771 r = p;
4772 p = p->parent;
4775 return 0;
4778 /* Maximum number of case bit tests. */
4779 #define MAX_CASE_BIT_TESTS 3
4781 /* By default, enable case bit tests on targets with ashlsi3. */
4782 #ifndef CASE_USE_BIT_TESTS
4783 #define CASE_USE_BIT_TESTS (ashl_optab->handlers[word_mode].insn_code \
4784 != CODE_FOR_nothing)
4785 #endif
4788 /* A case_bit_test represents a set of case nodes that may be
4789 selected from using a bit-wise comparison. HI and LO hold
4790 the integer to be tested against, LABEL contains the label
4791 to jump to upon success and BITS counts the number of case
4792 nodes handled by this test, typically the number of bits
4793 set in HI:LO. */
4795 struct case_bit_test
4797 HOST_WIDE_INT hi;
4798 HOST_WIDE_INT lo;
4799 rtx label;
4800 int bits;
4803 /* Determine whether "1 << x" is relatively cheap in word_mode. */
4805 static
4806 bool lshift_cheap_p (void)
4808 static bool init = false;
4809 static bool cheap = true;
4811 if (!init)
4813 rtx reg = gen_rtx_REG (word_mode, 10000);
4814 int cost = rtx_cost (gen_rtx_ASHIFT (word_mode, const1_rtx, reg), SET);
4815 cheap = cost < COSTS_N_INSNS (3);
4816 init = true;
4819 return cheap;
4822 /* Comparison function for qsort to order bit tests by decreasing
4823 number of case nodes, i.e. the node with the most cases gets
4824 tested first. */
4826 static
4827 int case_bit_test_cmp (const void *p1, const void *p2)
4829 const struct case_bit_test *d1 = p1;
4830 const struct case_bit_test *d2 = p2;
4832 return d2->bits - d1->bits;
4835 /* Expand a switch statement by a short sequence of bit-wise
4836 comparisons. "switch(x)" is effectively converted into
4837 "if ((1 << (x-MINVAL)) & CST)" where CST and MINVAL are
4838 integer constants.
4840 INDEX_EXPR is the value being switched on, which is of
4841 type INDEX_TYPE. MINVAL is the lowest case value of in
4842 the case nodes, of INDEX_TYPE type, and RANGE is highest
4843 value minus MINVAL, also of type INDEX_TYPE. NODES is
4844 the set of case nodes, and DEFAULT_LABEL is the label to
4845 branch to should none of the cases match.
4847 There *MUST* be MAX_CASE_BIT_TESTS or less unique case
4848 node targets. */
4850 static void
4851 emit_case_bit_tests (tree index_type, tree index_expr, tree minval,
4852 tree range, case_node_ptr nodes, rtx default_label)
4854 struct case_bit_test test[MAX_CASE_BIT_TESTS];
4855 enum machine_mode mode;
4856 rtx expr, index, label;
4857 unsigned int i,j,lo,hi;
4858 struct case_node *n;
4859 unsigned int count;
4861 count = 0;
4862 for (n = nodes; n; n = n->right)
4864 label = label_rtx (n->code_label);
4865 for (i = 0; i < count; i++)
4866 if (same_case_target_p (label, test[i].label))
4867 break;
4869 if (i == count)
4871 if (count >= MAX_CASE_BIT_TESTS)
4872 abort ();
4873 test[i].hi = 0;
4874 test[i].lo = 0;
4875 test[i].label = label;
4876 test[i].bits = 1;
4877 count++;
4879 else
4880 test[i].bits++;
4882 lo = tree_low_cst (fold (build (MINUS_EXPR, index_type,
4883 n->low, minval)), 1);
4884 hi = tree_low_cst (fold (build (MINUS_EXPR, index_type,
4885 n->high, minval)), 1);
4886 for (j = lo; j <= hi; j++)
4887 if (j >= HOST_BITS_PER_WIDE_INT)
4888 test[i].hi |= (HOST_WIDE_INT) 1 << (j - HOST_BITS_PER_INT);
4889 else
4890 test[i].lo |= (HOST_WIDE_INT) 1 << j;
4893 qsort (test, count, sizeof(*test), case_bit_test_cmp);
4895 index_expr = fold (build (MINUS_EXPR, index_type,
4896 convert (index_type, index_expr),
4897 convert (index_type, minval)));
4898 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
4899 emit_queue ();
4900 index = protect_from_queue (index, 0);
4901 do_pending_stack_adjust ();
4903 mode = TYPE_MODE (index_type);
4904 expr = expand_expr (range, NULL_RTX, VOIDmode, 0);
4905 emit_cmp_and_jump_insns (index, expr, GTU, NULL_RTX, mode, 1,
4906 default_label);
4908 index = convert_to_mode (word_mode, index, 0);
4909 index = expand_binop (word_mode, ashl_optab, const1_rtx,
4910 index, NULL_RTX, 1, OPTAB_WIDEN);
4912 for (i = 0; i < count; i++)
4914 expr = immed_double_const (test[i].lo, test[i].hi, word_mode);
4915 expr = expand_binop (word_mode, and_optab, index, expr,
4916 NULL_RTX, 1, OPTAB_WIDEN);
4917 emit_cmp_and_jump_insns (expr, const0_rtx, NE, NULL_RTX,
4918 word_mode, 1, test[i].label);
4921 emit_jump (default_label);
4924 #ifndef HAVE_casesi
4925 #define HAVE_casesi 0
4926 #endif
4928 #ifndef HAVE_tablejump
4929 #define HAVE_tablejump 0
4930 #endif
4932 /* Terminate a case (Pascal) or switch (C) statement
4933 in which ORIG_INDEX is the expression to be tested.
4934 If ORIG_TYPE is not NULL, it is the original ORIG_INDEX
4935 type as given in the source before any compiler conversions.
4936 Generate the code to test it and jump to the right place. */
4938 void
4939 expand_end_case_type (tree orig_index, tree orig_type)
4941 tree minval = NULL_TREE, maxval = NULL_TREE, range = NULL_TREE;
4942 rtx default_label = 0;
4943 struct case_node *n, *m;
4944 unsigned int count, uniq;
4945 rtx index;
4946 rtx table_label;
4947 int ncases;
4948 rtx *labelvec;
4949 int i;
4950 rtx before_case, end, lab;
4951 struct nesting *thiscase = case_stack;
4952 tree index_expr, index_type;
4953 bool exit_done = false;
4954 int unsignedp;
4956 /* Don't crash due to previous errors. */
4957 if (thiscase == NULL)
4958 return;
4960 index_expr = thiscase->data.case_stmt.index_expr;
4961 index_type = TREE_TYPE (index_expr);
4962 unsignedp = TYPE_UNSIGNED (index_type);
4963 if (orig_type == NULL)
4964 orig_type = TREE_TYPE (orig_index);
4966 do_pending_stack_adjust ();
4968 /* This might get a spurious warning in the presence of a syntax error;
4969 it could be fixed by moving the call to check_seenlabel after the
4970 check for error_mark_node, and copying the code of check_seenlabel that
4971 deals with case_stack->data.case_stmt.line_number_status /
4972 restore_line_number_status in front of the call to end_cleanup_deferral;
4973 However, this might miss some useful warnings in the presence of
4974 non-syntax errors. */
4975 check_seenlabel ();
4977 /* An ERROR_MARK occurs for various reasons including invalid data type. */
4978 if (index_type != error_mark_node)
4980 /* If we don't have a default-label, create one here,
4981 after the body of the switch. */
4982 if (thiscase->data.case_stmt.default_label == 0)
4984 thiscase->data.case_stmt.default_label
4985 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
4986 /* Share the exit label if possible. */
4987 if (thiscase->exit_label)
4989 SET_DECL_RTL (thiscase->data.case_stmt.default_label,
4990 thiscase->exit_label);
4991 exit_done = true;
4993 expand_label (thiscase->data.case_stmt.default_label);
4995 default_label = label_rtx (thiscase->data.case_stmt.default_label);
4997 before_case = get_last_insn ();
4999 if (thiscase->data.case_stmt.case_list
5000 && thiscase->data.case_stmt.case_list->left)
5001 thiscase->data.case_stmt.case_list
5002 = case_tree2list (thiscase->data.case_stmt.case_list, 0);
5004 /* Simplify the case-list before we count it. */
5005 group_case_nodes (thiscase->data.case_stmt.case_list);
5006 strip_default_case_nodes (&thiscase->data.case_stmt.case_list,
5007 default_label);
5009 /* Get upper and lower bounds of case values.
5010 Also convert all the case values to the index expr's data type. */
5012 uniq = 0;
5013 count = 0;
5014 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5016 /* Check low and high label values are integers. */
5017 if (TREE_CODE (n->low) != INTEGER_CST)
5018 abort ();
5019 if (TREE_CODE (n->high) != INTEGER_CST)
5020 abort ();
5022 n->low = convert (index_type, n->low);
5023 n->high = convert (index_type, n->high);
5025 /* Count the elements and track the largest and smallest
5026 of them (treating them as signed even if they are not). */
5027 if (count++ == 0)
5029 minval = n->low;
5030 maxval = n->high;
5032 else
5034 if (INT_CST_LT (n->low, minval))
5035 minval = n->low;
5036 if (INT_CST_LT (maxval, n->high))
5037 maxval = n->high;
5039 /* A range counts double, since it requires two compares. */
5040 if (! tree_int_cst_equal (n->low, n->high))
5041 count++;
5043 /* Count the number of unique case node targets. */
5044 uniq++;
5045 lab = label_rtx (n->code_label);
5046 for (m = thiscase->data.case_stmt.case_list; m != n; m = m->right)
5047 if (same_case_target_p (label_rtx (m->code_label), lab))
5049 uniq--;
5050 break;
5054 /* Compute span of values. */
5055 if (count != 0)
5056 range = fold (build (MINUS_EXPR, index_type, maxval, minval));
5058 end_cleanup_deferral ();
5060 if (count == 0)
5062 expand_expr (index_expr, const0_rtx, VOIDmode, 0);
5063 emit_queue ();
5064 emit_jump (default_label);
5067 /* Try implementing this switch statement by a short sequence of
5068 bit-wise comparisons. However, we let the binary-tree case
5069 below handle constant index expressions. */
5070 else if (CASE_USE_BIT_TESTS
5071 && ! TREE_CONSTANT (index_expr)
5072 && compare_tree_int (range, GET_MODE_BITSIZE (word_mode)) < 0
5073 && compare_tree_int (range, 0) > 0
5074 && lshift_cheap_p ()
5075 && ((uniq == 1 && count >= 3)
5076 || (uniq == 2 && count >= 5)
5077 || (uniq == 3 && count >= 6)))
5079 /* Optimize the case where all the case values fit in a
5080 word without having to subtract MINVAL. In this case,
5081 we can optimize away the subtraction. */
5082 if (compare_tree_int (minval, 0) > 0
5083 && compare_tree_int (maxval, GET_MODE_BITSIZE (word_mode)) < 0)
5085 minval = integer_zero_node;
5086 range = maxval;
5088 emit_case_bit_tests (index_type, index_expr, minval, range,
5089 thiscase->data.case_stmt.case_list,
5090 default_label);
5093 /* If range of values is much bigger than number of values,
5094 make a sequence of conditional branches instead of a dispatch.
5095 If the switch-index is a constant, do it this way
5096 because we can optimize it. */
5098 else if (count < case_values_threshold ()
5099 || compare_tree_int (range,
5100 (optimize_size ? 3 : 10) * count) > 0
5101 /* RANGE may be signed, and really large ranges will show up
5102 as negative numbers. */
5103 || compare_tree_int (range, 0) < 0
5104 #ifndef ASM_OUTPUT_ADDR_DIFF_ELT
5105 || flag_pic
5106 #endif
5107 || TREE_CONSTANT (index_expr)
5108 /* If neither casesi or tablejump is available, we can
5109 only go this way. */
5110 || (!HAVE_casesi && !HAVE_tablejump))
5112 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5114 /* If the index is a short or char that we do not have
5115 an insn to handle comparisons directly, convert it to
5116 a full integer now, rather than letting each comparison
5117 generate the conversion. */
5119 if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT
5120 && ! have_insn_for (COMPARE, GET_MODE (index)))
5122 enum machine_mode wider_mode;
5123 for (wider_mode = GET_MODE (index); wider_mode != VOIDmode;
5124 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
5125 if (have_insn_for (COMPARE, wider_mode))
5127 index = convert_to_mode (wider_mode, index, unsignedp);
5128 break;
5132 emit_queue ();
5133 do_pending_stack_adjust ();
5135 index = protect_from_queue (index, 0);
5136 if (GET_CODE (index) == MEM)
5137 index = copy_to_reg (index);
5138 if (GET_CODE (index) == CONST_INT
5139 || TREE_CODE (index_expr) == INTEGER_CST)
5141 /* Make a tree node with the proper constant value
5142 if we don't already have one. */
5143 if (TREE_CODE (index_expr) != INTEGER_CST)
5145 index_expr
5146 = build_int_2 (INTVAL (index),
5147 unsignedp || INTVAL (index) >= 0 ? 0 : -1);
5148 index_expr = convert (index_type, index_expr);
5151 /* For constant index expressions we need only
5152 issue an unconditional branch to the appropriate
5153 target code. The job of removing any unreachable
5154 code is left to the optimization phase if the
5155 "-O" option is specified. */
5156 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5157 if (! tree_int_cst_lt (index_expr, n->low)
5158 && ! tree_int_cst_lt (n->high, index_expr))
5159 break;
5161 if (n)
5162 emit_jump (label_rtx (n->code_label));
5163 else
5164 emit_jump (default_label);
5166 else
5168 /* If the index expression is not constant we generate
5169 a binary decision tree to select the appropriate
5170 target code. This is done as follows:
5172 The list of cases is rearranged into a binary tree,
5173 nearly optimal assuming equal probability for each case.
5175 The tree is transformed into RTL, eliminating
5176 redundant test conditions at the same time.
5178 If program flow could reach the end of the
5179 decision tree an unconditional jump to the
5180 default code is emitted. */
5182 use_cost_table
5183 = (TREE_CODE (orig_type) != ENUMERAL_TYPE
5184 && estimate_case_costs (thiscase->data.case_stmt.case_list));
5185 balance_case_nodes (&thiscase->data.case_stmt.case_list, NULL);
5186 emit_case_nodes (index, thiscase->data.case_stmt.case_list,
5187 default_label, index_type);
5188 emit_jump_if_reachable (default_label);
5191 else
5193 table_label = gen_label_rtx ();
5194 if (! try_casesi (index_type, index_expr, minval, range,
5195 table_label, default_label))
5197 index_type = thiscase->data.case_stmt.nominal_type;
5199 /* Index jumptables from zero for suitable values of
5200 minval to avoid a subtraction. */
5201 if (! optimize_size
5202 && compare_tree_int (minval, 0) > 0
5203 && compare_tree_int (minval, 3) < 0)
5205 minval = integer_zero_node;
5206 range = maxval;
5209 if (! try_tablejump (index_type, index_expr, minval, range,
5210 table_label, default_label))
5211 abort ();
5214 /* Get table of labels to jump to, in order of case index. */
5216 ncases = tree_low_cst (range, 0) + 1;
5217 labelvec = alloca (ncases * sizeof (rtx));
5218 memset (labelvec, 0, ncases * sizeof (rtx));
5220 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5222 /* Compute the low and high bounds relative to the minimum
5223 value since that should fit in a HOST_WIDE_INT while the
5224 actual values may not. */
5225 HOST_WIDE_INT i_low
5226 = tree_low_cst (fold (build (MINUS_EXPR, index_type,
5227 n->low, minval)), 1);
5228 HOST_WIDE_INT i_high
5229 = tree_low_cst (fold (build (MINUS_EXPR, index_type,
5230 n->high, minval)), 1);
5231 HOST_WIDE_INT i;
5233 for (i = i_low; i <= i_high; i ++)
5234 labelvec[i]
5235 = gen_rtx_LABEL_REF (Pmode, label_rtx (n->code_label));
5238 /* Fill in the gaps with the default. */
5239 for (i = 0; i < ncases; i++)
5240 if (labelvec[i] == 0)
5241 labelvec[i] = gen_rtx_LABEL_REF (Pmode, default_label);
5243 /* Output the table. */
5244 emit_label (table_label);
5246 if (CASE_VECTOR_PC_RELATIVE || flag_pic)
5247 emit_jump_insn (gen_rtx_ADDR_DIFF_VEC (CASE_VECTOR_MODE,
5248 gen_rtx_LABEL_REF (Pmode, table_label),
5249 gen_rtvec_v (ncases, labelvec),
5250 const0_rtx, const0_rtx));
5251 else
5252 emit_jump_insn (gen_rtx_ADDR_VEC (CASE_VECTOR_MODE,
5253 gen_rtvec_v (ncases, labelvec)));
5255 /* If the case insn drops through the table,
5256 after the table we must jump to the default-label.
5257 Otherwise record no drop-through after the table. */
5258 #ifdef CASE_DROPS_THROUGH
5259 emit_jump (default_label);
5260 #else
5261 emit_barrier ();
5262 #endif
5265 before_case = NEXT_INSN (before_case);
5266 end = get_last_insn ();
5267 if (squeeze_notes (&before_case, &end))
5268 abort ();
5269 reorder_insns (before_case, end,
5270 thiscase->data.case_stmt.start);
5272 else
5273 end_cleanup_deferral ();
5275 if (thiscase->exit_label && !exit_done)
5276 emit_label (thiscase->exit_label);
5278 POPSTACK (case_stack);
5280 free_temp_slots ();
5283 /* Convert the tree NODE into a list linked by the right field, with the left
5284 field zeroed. RIGHT is used for recursion; it is a list to be placed
5285 rightmost in the resulting list. */
5287 static struct case_node *
5288 case_tree2list (struct case_node *node, struct case_node *right)
5290 struct case_node *left;
5292 if (node->right)
5293 right = case_tree2list (node->right, right);
5295 node->right = right;
5296 if ((left = node->left))
5298 node->left = 0;
5299 return case_tree2list (left, node);
5302 return node;
5305 /* Generate code to jump to LABEL if OP1 and OP2 are equal. */
5307 static void
5308 do_jump_if_equal (rtx op1, rtx op2, rtx label, int unsignedp)
5310 if (GET_CODE (op1) == CONST_INT && GET_CODE (op2) == CONST_INT)
5312 if (op1 == op2)
5313 emit_jump (label);
5315 else
5316 emit_cmp_and_jump_insns (op1, op2, EQ, NULL_RTX,
5317 (GET_MODE (op1) == VOIDmode
5318 ? GET_MODE (op2) : GET_MODE (op1)),
5319 unsignedp, label);
5322 /* Not all case values are encountered equally. This function
5323 uses a heuristic to weight case labels, in cases where that
5324 looks like a reasonable thing to do.
5326 Right now, all we try to guess is text, and we establish the
5327 following weights:
5329 chars above space: 16
5330 digits: 16
5331 default: 12
5332 space, punct: 8
5333 tab: 4
5334 newline: 2
5335 other "\" chars: 1
5336 remaining chars: 0
5338 If we find any cases in the switch that are not either -1 or in the range
5339 of valid ASCII characters, or are control characters other than those
5340 commonly used with "\", don't treat this switch scanning text.
5342 Return 1 if these nodes are suitable for cost estimation, otherwise
5343 return 0. */
5345 static int
5346 estimate_case_costs (case_node_ptr node)
5348 tree min_ascii = integer_minus_one_node;
5349 tree max_ascii = convert (TREE_TYPE (node->high), build_int_2 (127, 0));
5350 case_node_ptr n;
5351 int i;
5353 /* If we haven't already made the cost table, make it now. Note that the
5354 lower bound of the table is -1, not zero. */
5356 if (! cost_table_initialized)
5358 cost_table_initialized = 1;
5360 for (i = 0; i < 128; i++)
5362 if (ISALNUM (i))
5363 COST_TABLE (i) = 16;
5364 else if (ISPUNCT (i))
5365 COST_TABLE (i) = 8;
5366 else if (ISCNTRL (i))
5367 COST_TABLE (i) = -1;
5370 COST_TABLE (' ') = 8;
5371 COST_TABLE ('\t') = 4;
5372 COST_TABLE ('\0') = 4;
5373 COST_TABLE ('\n') = 2;
5374 COST_TABLE ('\f') = 1;
5375 COST_TABLE ('\v') = 1;
5376 COST_TABLE ('\b') = 1;
5379 /* See if all the case expressions look like text. It is text if the
5380 constant is >= -1 and the highest constant is <= 127. Do all comparisons
5381 as signed arithmetic since we don't want to ever access cost_table with a
5382 value less than -1. Also check that none of the constants in a range
5383 are strange control characters. */
5385 for (n = node; n; n = n->right)
5387 if ((INT_CST_LT (n->low, min_ascii)) || INT_CST_LT (max_ascii, n->high))
5388 return 0;
5390 for (i = (HOST_WIDE_INT) TREE_INT_CST_LOW (n->low);
5391 i <= (HOST_WIDE_INT) TREE_INT_CST_LOW (n->high); i++)
5392 if (COST_TABLE (i) < 0)
5393 return 0;
5396 /* All interesting values are within the range of interesting
5397 ASCII characters. */
5398 return 1;
5401 /* Determine whether two case labels branch to the same target. */
5403 static bool
5404 same_case_target_p (rtx l1, rtx l2)
5406 #if 0
5407 rtx i1, i2;
5409 if (l1 == l2)
5410 return true;
5412 i1 = next_real_insn (l1);
5413 i2 = next_real_insn (l2);
5414 if (i1 == i2)
5415 return true;
5417 if (i1 && simplejump_p (i1))
5419 l1 = XEXP (SET_SRC (PATTERN (i1)), 0);
5422 if (i2 && simplejump_p (i2))
5424 l2 = XEXP (SET_SRC (PATTERN (i2)), 0);
5426 #endif
5427 /* When coming from gimple, we usually won't have emitted either
5428 the labels or the body of the switch statement. The job being
5429 done here should be done via jump threading at the tree level.
5430 Cases that go the same place should have the same label. */
5431 return l1 == l2;
5434 /* Delete nodes that branch to the default label from a list of
5435 case nodes. Eg. case 5: default: becomes just default: */
5437 static void
5438 strip_default_case_nodes (case_node_ptr *prev, rtx deflab)
5440 case_node_ptr ptr;
5442 while (*prev)
5444 ptr = *prev;
5445 if (same_case_target_p (label_rtx (ptr->code_label), deflab))
5446 *prev = ptr->right;
5447 else
5448 prev = &ptr->right;
5452 /* Scan an ordered list of case nodes
5453 combining those with consecutive values or ranges.
5455 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
5457 static void
5458 group_case_nodes (case_node_ptr head)
5460 case_node_ptr node = head;
5462 while (node)
5464 rtx lab;
5465 case_node_ptr np = node;
5467 lab = label_rtx (node->code_label);
5469 /* Try to group the successors of NODE with NODE. */
5470 while (((np = np->right) != 0)
5471 /* Do they jump to the same place? */
5472 && same_case_target_p (label_rtx (np->code_label), lab)
5473 /* Are their ranges consecutive? */
5474 && tree_int_cst_equal (np->low,
5475 fold (build (PLUS_EXPR,
5476 TREE_TYPE (node->high),
5477 node->high,
5478 integer_one_node)))
5479 /* An overflow is not consecutive. */
5480 && tree_int_cst_lt (node->high,
5481 fold (build (PLUS_EXPR,
5482 TREE_TYPE (node->high),
5483 node->high,
5484 integer_one_node))))
5486 node->high = np->high;
5488 /* NP is the first node after NODE which can't be grouped with it.
5489 Delete the nodes in between, and move on to that node. */
5490 node->right = np;
5491 node = np;
5495 /* Take an ordered list of case nodes
5496 and transform them into a near optimal binary tree,
5497 on the assumption that any target code selection value is as
5498 likely as any other.
5500 The transformation is performed by splitting the ordered
5501 list into two equal sections plus a pivot. The parts are
5502 then attached to the pivot as left and right branches. Each
5503 branch is then transformed recursively. */
5505 static void
5506 balance_case_nodes (case_node_ptr *head, case_node_ptr parent)
5508 case_node_ptr np;
5510 np = *head;
5511 if (np)
5513 int cost = 0;
5514 int i = 0;
5515 int ranges = 0;
5516 case_node_ptr *npp;
5517 case_node_ptr left;
5519 /* Count the number of entries on branch. Also count the ranges. */
5521 while (np)
5523 if (!tree_int_cst_equal (np->low, np->high))
5525 ranges++;
5526 if (use_cost_table)
5527 cost += COST_TABLE (TREE_INT_CST_LOW (np->high));
5530 if (use_cost_table)
5531 cost += COST_TABLE (TREE_INT_CST_LOW (np->low));
5533 i++;
5534 np = np->right;
5537 if (i > 2)
5539 /* Split this list if it is long enough for that to help. */
5540 npp = head;
5541 left = *npp;
5542 if (use_cost_table)
5544 /* Find the place in the list that bisects the list's total cost,
5545 Here I gets half the total cost. */
5546 int n_moved = 0;
5547 i = (cost + 1) / 2;
5548 while (1)
5550 /* Skip nodes while their cost does not reach that amount. */
5551 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5552 i -= COST_TABLE (TREE_INT_CST_LOW ((*npp)->high));
5553 i -= COST_TABLE (TREE_INT_CST_LOW ((*npp)->low));
5554 if (i <= 0)
5555 break;
5556 npp = &(*npp)->right;
5557 n_moved += 1;
5559 if (n_moved == 0)
5561 /* Leave this branch lopsided, but optimize left-hand
5562 side and fill in `parent' fields for right-hand side. */
5563 np = *head;
5564 np->parent = parent;
5565 balance_case_nodes (&np->left, np);
5566 for (; np->right; np = np->right)
5567 np->right->parent = np;
5568 return;
5571 /* If there are just three nodes, split at the middle one. */
5572 else if (i == 3)
5573 npp = &(*npp)->right;
5574 else
5576 /* Find the place in the list that bisects the list's total cost,
5577 where ranges count as 2.
5578 Here I gets half the total cost. */
5579 i = (i + ranges + 1) / 2;
5580 while (1)
5582 /* Skip nodes while their cost does not reach that amount. */
5583 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5584 i--;
5585 i--;
5586 if (i <= 0)
5587 break;
5588 npp = &(*npp)->right;
5591 *head = np = *npp;
5592 *npp = 0;
5593 np->parent = parent;
5594 np->left = left;
5596 /* Optimize each of the two split parts. */
5597 balance_case_nodes (&np->left, np);
5598 balance_case_nodes (&np->right, np);
5600 else
5602 /* Else leave this branch as one level,
5603 but fill in `parent' fields. */
5604 np = *head;
5605 np->parent = parent;
5606 for (; np->right; np = np->right)
5607 np->right->parent = np;
5612 /* Search the parent sections of the case node tree
5613 to see if a test for the lower bound of NODE would be redundant.
5614 INDEX_TYPE is the type of the index expression.
5616 The instructions to generate the case decision tree are
5617 output in the same order as nodes are processed so it is
5618 known that if a parent node checks the range of the current
5619 node minus one that the current node is bounded at its lower
5620 span. Thus the test would be redundant. */
5622 static int
5623 node_has_low_bound (case_node_ptr node, tree index_type)
5625 tree low_minus_one;
5626 case_node_ptr pnode;
5628 /* If the lower bound of this node is the lowest value in the index type,
5629 we need not test it. */
5631 if (tree_int_cst_equal (node->low, TYPE_MIN_VALUE (index_type)))
5632 return 1;
5634 /* If this node has a left branch, the value at the left must be less
5635 than that at this node, so it cannot be bounded at the bottom and
5636 we need not bother testing any further. */
5638 if (node->left)
5639 return 0;
5641 low_minus_one = fold (build (MINUS_EXPR, TREE_TYPE (node->low),
5642 node->low, integer_one_node));
5644 /* If the subtraction above overflowed, we can't verify anything.
5645 Otherwise, look for a parent that tests our value - 1. */
5647 if (! tree_int_cst_lt (low_minus_one, node->low))
5648 return 0;
5650 for (pnode = node->parent; pnode; pnode = pnode->parent)
5651 if (tree_int_cst_equal (low_minus_one, pnode->high))
5652 return 1;
5654 return 0;
5657 /* Search the parent sections of the case node tree
5658 to see if a test for the upper bound of NODE would be redundant.
5659 INDEX_TYPE is the type of the index expression.
5661 The instructions to generate the case decision tree are
5662 output in the same order as nodes are processed so it is
5663 known that if a parent node checks the range of the current
5664 node plus one that the current node is bounded at its upper
5665 span. Thus the test would be redundant. */
5667 static int
5668 node_has_high_bound (case_node_ptr node, tree index_type)
5670 tree high_plus_one;
5671 case_node_ptr pnode;
5673 /* If there is no upper bound, obviously no test is needed. */
5675 if (TYPE_MAX_VALUE (index_type) == NULL)
5676 return 1;
5678 /* If the upper bound of this node is the highest value in the type
5679 of the index expression, we need not test against it. */
5681 if (tree_int_cst_equal (node->high, TYPE_MAX_VALUE (index_type)))
5682 return 1;
5684 /* If this node has a right branch, the value at the right must be greater
5685 than that at this node, so it cannot be bounded at the top and
5686 we need not bother testing any further. */
5688 if (node->right)
5689 return 0;
5691 high_plus_one = fold (build (PLUS_EXPR, TREE_TYPE (node->high),
5692 node->high, integer_one_node));
5694 /* If the addition above overflowed, we can't verify anything.
5695 Otherwise, look for a parent that tests our value + 1. */
5697 if (! tree_int_cst_lt (node->high, high_plus_one))
5698 return 0;
5700 for (pnode = node->parent; pnode; pnode = pnode->parent)
5701 if (tree_int_cst_equal (high_plus_one, pnode->low))
5702 return 1;
5704 return 0;
5707 /* Search the parent sections of the
5708 case node tree to see if both tests for the upper and lower
5709 bounds of NODE would be redundant. */
5711 static int
5712 node_is_bounded (case_node_ptr node, tree index_type)
5714 return (node_has_low_bound (node, index_type)
5715 && node_has_high_bound (node, index_type));
5718 /* Emit an unconditional jump to LABEL unless it would be dead code. */
5720 static void
5721 emit_jump_if_reachable (rtx label)
5723 if (GET_CODE (get_last_insn ()) != BARRIER)
5724 emit_jump (label);
5727 /* Emit step-by-step code to select a case for the value of INDEX.
5728 The thus generated decision tree follows the form of the
5729 case-node binary tree NODE, whose nodes represent test conditions.
5730 INDEX_TYPE is the type of the index of the switch.
5732 Care is taken to prune redundant tests from the decision tree
5733 by detecting any boundary conditions already checked by
5734 emitted rtx. (See node_has_high_bound, node_has_low_bound
5735 and node_is_bounded, above.)
5737 Where the test conditions can be shown to be redundant we emit
5738 an unconditional jump to the target code. As a further
5739 optimization, the subordinates of a tree node are examined to
5740 check for bounded nodes. In this case conditional and/or
5741 unconditional jumps as a result of the boundary check for the
5742 current node are arranged to target the subordinates associated
5743 code for out of bound conditions on the current node.
5745 We can assume that when control reaches the code generated here,
5746 the index value has already been compared with the parents
5747 of this node, and determined to be on the same side of each parent
5748 as this node is. Thus, if this node tests for the value 51,
5749 and a parent tested for 52, we don't need to consider
5750 the possibility of a value greater than 51. If another parent
5751 tests for the value 50, then this node need not test anything. */
5753 static void
5754 emit_case_nodes (rtx index, case_node_ptr node, rtx default_label,
5755 tree index_type)
5757 /* If INDEX has an unsigned type, we must make unsigned branches. */
5758 int unsignedp = TYPE_UNSIGNED (index_type);
5759 enum machine_mode mode = GET_MODE (index);
5760 enum machine_mode imode = TYPE_MODE (index_type);
5762 /* See if our parents have already tested everything for us.
5763 If they have, emit an unconditional jump for this node. */
5764 if (node_is_bounded (node, index_type))
5765 emit_jump (label_rtx (node->code_label));
5767 else if (tree_int_cst_equal (node->low, node->high))
5769 /* Node is single valued. First see if the index expression matches
5770 this node and then check our children, if any. */
5772 do_jump_if_equal (index,
5773 convert_modes (mode, imode,
5774 expand_expr (node->low, NULL_RTX,
5775 VOIDmode, 0),
5776 unsignedp),
5777 label_rtx (node->code_label), unsignedp);
5779 if (node->right != 0 && node->left != 0)
5781 /* This node has children on both sides.
5782 Dispatch to one side or the other
5783 by comparing the index value with this node's value.
5784 If one subtree is bounded, check that one first,
5785 so we can avoid real branches in the tree. */
5787 if (node_is_bounded (node->right, index_type))
5789 emit_cmp_and_jump_insns (index,
5790 convert_modes
5791 (mode, imode,
5792 expand_expr (node->high, NULL_RTX,
5793 VOIDmode, 0),
5794 unsignedp),
5795 GT, NULL_RTX, mode, unsignedp,
5796 label_rtx (node->right->code_label));
5797 emit_case_nodes (index, node->left, default_label, index_type);
5800 else if (node_is_bounded (node->left, index_type))
5802 emit_cmp_and_jump_insns (index,
5803 convert_modes
5804 (mode, imode,
5805 expand_expr (node->high, NULL_RTX,
5806 VOIDmode, 0),
5807 unsignedp),
5808 LT, NULL_RTX, mode, unsignedp,
5809 label_rtx (node->left->code_label));
5810 emit_case_nodes (index, node->right, default_label, index_type);
5813 /* If both children are single-valued cases with no
5814 children, finish up all the work. This way, we can save
5815 one ordered comparison. */
5816 else if (tree_int_cst_equal (node->right->low, node->right->high)
5817 && node->right->left == 0
5818 && node->right->right == 0
5819 && tree_int_cst_equal (node->left->low, node->left->high)
5820 && node->left->left == 0
5821 && node->left->right == 0)
5823 /* Neither node is bounded. First distinguish the two sides;
5824 then emit the code for one side at a time. */
5826 /* See if the value matches what the right hand side
5827 wants. */
5828 do_jump_if_equal (index,
5829 convert_modes (mode, imode,
5830 expand_expr (node->right->low,
5831 NULL_RTX,
5832 VOIDmode, 0),
5833 unsignedp),
5834 label_rtx (node->right->code_label),
5835 unsignedp);
5837 /* See if the value matches what the left hand side
5838 wants. */
5839 do_jump_if_equal (index,
5840 convert_modes (mode, imode,
5841 expand_expr (node->left->low,
5842 NULL_RTX,
5843 VOIDmode, 0),
5844 unsignedp),
5845 label_rtx (node->left->code_label),
5846 unsignedp);
5849 else
5851 /* Neither node is bounded. First distinguish the two sides;
5852 then emit the code for one side at a time. */
5854 tree test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
5856 /* See if the value is on the right. */
5857 emit_cmp_and_jump_insns (index,
5858 convert_modes
5859 (mode, imode,
5860 expand_expr (node->high, NULL_RTX,
5861 VOIDmode, 0),
5862 unsignedp),
5863 GT, NULL_RTX, mode, unsignedp,
5864 label_rtx (test_label));
5866 /* Value must be on the left.
5867 Handle the left-hand subtree. */
5868 emit_case_nodes (index, node->left, default_label, index_type);
5869 /* If left-hand subtree does nothing,
5870 go to default. */
5871 emit_jump_if_reachable (default_label);
5873 /* Code branches here for the right-hand subtree. */
5874 expand_label (test_label);
5875 emit_case_nodes (index, node->right, default_label, index_type);
5879 else if (node->right != 0 && node->left == 0)
5881 /* Here we have a right child but no left so we issue conditional
5882 branch to default and process the right child.
5884 Omit the conditional branch to default if we it avoid only one
5885 right child; it costs too much space to save so little time. */
5887 if (node->right->right || node->right->left
5888 || !tree_int_cst_equal (node->right->low, node->right->high))
5890 if (!node_has_low_bound (node, index_type))
5892 emit_cmp_and_jump_insns (index,
5893 convert_modes
5894 (mode, imode,
5895 expand_expr (node->high, NULL_RTX,
5896 VOIDmode, 0),
5897 unsignedp),
5898 LT, NULL_RTX, mode, unsignedp,
5899 default_label);
5902 emit_case_nodes (index, node->right, default_label, index_type);
5904 else
5905 /* We cannot process node->right normally
5906 since we haven't ruled out the numbers less than
5907 this node's value. So handle node->right explicitly. */
5908 do_jump_if_equal (index,
5909 convert_modes
5910 (mode, imode,
5911 expand_expr (node->right->low, NULL_RTX,
5912 VOIDmode, 0),
5913 unsignedp),
5914 label_rtx (node->right->code_label), unsignedp);
5917 else if (node->right == 0 && node->left != 0)
5919 /* Just one subtree, on the left. */
5920 if (node->left->left || node->left->right
5921 || !tree_int_cst_equal (node->left->low, node->left->high))
5923 if (!node_has_high_bound (node, index_type))
5925 emit_cmp_and_jump_insns (index,
5926 convert_modes
5927 (mode, imode,
5928 expand_expr (node->high, NULL_RTX,
5929 VOIDmode, 0),
5930 unsignedp),
5931 GT, NULL_RTX, mode, unsignedp,
5932 default_label);
5935 emit_case_nodes (index, node->left, default_label, index_type);
5937 else
5938 /* We cannot process node->left normally
5939 since we haven't ruled out the numbers less than
5940 this node's value. So handle node->left explicitly. */
5941 do_jump_if_equal (index,
5942 convert_modes
5943 (mode, imode,
5944 expand_expr (node->left->low, NULL_RTX,
5945 VOIDmode, 0),
5946 unsignedp),
5947 label_rtx (node->left->code_label), unsignedp);
5950 else
5952 /* Node is a range. These cases are very similar to those for a single
5953 value, except that we do not start by testing whether this node
5954 is the one to branch to. */
5956 if (node->right != 0 && node->left != 0)
5958 /* Node has subtrees on both sides.
5959 If the right-hand subtree is bounded,
5960 test for it first, since we can go straight there.
5961 Otherwise, we need to make a branch in the control structure,
5962 then handle the two subtrees. */
5963 tree test_label = 0;
5965 if (node_is_bounded (node->right, index_type))
5966 /* Right hand node is fully bounded so we can eliminate any
5967 testing and branch directly to the target code. */
5968 emit_cmp_and_jump_insns (index,
5969 convert_modes
5970 (mode, imode,
5971 expand_expr (node->high, NULL_RTX,
5972 VOIDmode, 0),
5973 unsignedp),
5974 GT, NULL_RTX, mode, unsignedp,
5975 label_rtx (node->right->code_label));
5976 else
5978 /* Right hand node requires testing.
5979 Branch to a label where we will handle it later. */
5981 test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
5982 emit_cmp_and_jump_insns (index,
5983 convert_modes
5984 (mode, imode,
5985 expand_expr (node->high, NULL_RTX,
5986 VOIDmode, 0),
5987 unsignedp),
5988 GT, NULL_RTX, mode, unsignedp,
5989 label_rtx (test_label));
5992 /* Value belongs to this node or to the left-hand subtree. */
5994 emit_cmp_and_jump_insns (index,
5995 convert_modes
5996 (mode, imode,
5997 expand_expr (node->low, NULL_RTX,
5998 VOIDmode, 0),
5999 unsignedp),
6000 GE, NULL_RTX, mode, unsignedp,
6001 label_rtx (node->code_label));
6003 /* Handle the left-hand subtree. */
6004 emit_case_nodes (index, node->left, default_label, index_type);
6006 /* If right node had to be handled later, do that now. */
6008 if (test_label)
6010 /* If the left-hand subtree fell through,
6011 don't let it fall into the right-hand subtree. */
6012 emit_jump_if_reachable (default_label);
6014 expand_label (test_label);
6015 emit_case_nodes (index, node->right, default_label, index_type);
6019 else if (node->right != 0 && node->left == 0)
6021 /* Deal with values to the left of this node,
6022 if they are possible. */
6023 if (!node_has_low_bound (node, index_type))
6025 emit_cmp_and_jump_insns (index,
6026 convert_modes
6027 (mode, imode,
6028 expand_expr (node->low, NULL_RTX,
6029 VOIDmode, 0),
6030 unsignedp),
6031 LT, NULL_RTX, mode, unsignedp,
6032 default_label);
6035 /* Value belongs to this node or to the right-hand subtree. */
6037 emit_cmp_and_jump_insns (index,
6038 convert_modes
6039 (mode, imode,
6040 expand_expr (node->high, NULL_RTX,
6041 VOIDmode, 0),
6042 unsignedp),
6043 LE, NULL_RTX, mode, unsignedp,
6044 label_rtx (node->code_label));
6046 emit_case_nodes (index, node->right, default_label, index_type);
6049 else if (node->right == 0 && node->left != 0)
6051 /* Deal with values to the right of this node,
6052 if they are possible. */
6053 if (!node_has_high_bound (node, index_type))
6055 emit_cmp_and_jump_insns (index,
6056 convert_modes
6057 (mode, imode,
6058 expand_expr (node->high, NULL_RTX,
6059 VOIDmode, 0),
6060 unsignedp),
6061 GT, NULL_RTX, mode, unsignedp,
6062 default_label);
6065 /* Value belongs to this node or to the left-hand subtree. */
6067 emit_cmp_and_jump_insns (index,
6068 convert_modes
6069 (mode, imode,
6070 expand_expr (node->low, NULL_RTX,
6071 VOIDmode, 0),
6072 unsignedp),
6073 GE, NULL_RTX, mode, unsignedp,
6074 label_rtx (node->code_label));
6076 emit_case_nodes (index, node->left, default_label, index_type);
6079 else
6081 /* Node has no children so we check low and high bounds to remove
6082 redundant tests. Only one of the bounds can exist,
6083 since otherwise this node is bounded--a case tested already. */
6084 int high_bound = node_has_high_bound (node, index_type);
6085 int low_bound = node_has_low_bound (node, index_type);
6087 if (!high_bound && low_bound)
6089 emit_cmp_and_jump_insns (index,
6090 convert_modes
6091 (mode, imode,
6092 expand_expr (node->high, NULL_RTX,
6093 VOIDmode, 0),
6094 unsignedp),
6095 GT, NULL_RTX, mode, unsignedp,
6096 default_label);
6099 else if (!low_bound && high_bound)
6101 emit_cmp_and_jump_insns (index,
6102 convert_modes
6103 (mode, imode,
6104 expand_expr (node->low, NULL_RTX,
6105 VOIDmode, 0),
6106 unsignedp),
6107 LT, NULL_RTX, mode, unsignedp,
6108 default_label);
6110 else if (!low_bound && !high_bound)
6112 /* Widen LOW and HIGH to the same width as INDEX. */
6113 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
6114 tree low = build1 (CONVERT_EXPR, type, node->low);
6115 tree high = build1 (CONVERT_EXPR, type, node->high);
6116 rtx low_rtx, new_index, new_bound;
6118 /* Instead of doing two branches, emit one unsigned branch for
6119 (index-low) > (high-low). */
6120 low_rtx = expand_expr (low, NULL_RTX, mode, 0);
6121 new_index = expand_simple_binop (mode, MINUS, index, low_rtx,
6122 NULL_RTX, unsignedp,
6123 OPTAB_WIDEN);
6124 new_bound = expand_expr (fold (build (MINUS_EXPR, type,
6125 high, low)),
6126 NULL_RTX, mode, 0);
6128 emit_cmp_and_jump_insns (new_index, new_bound, GT, NULL_RTX,
6129 mode, 1, default_label);
6132 emit_jump (label_rtx (node->code_label));
6137 #include "gt-stmt.h"