* system.h: Poison NO_RECURSIVE_FUNCTION_CSE.
[official-gcc.git] / gcc / stmt.c
blob330ae5f2b539709ab8dfce262d1799f76d4c56f7
1 /* Expands front end tree to back end RTL for GCC
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 /* This file handles the generation of rtl code from tree structure
23 above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
24 It also creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
27 The functions whose names start with `expand_' are called by the
28 parser to generate RTL instructions for various kinds of constructs.
30 Some control and binding constructs require calling several such
31 functions at different times. For example, a simple if-then
32 is expanded by calling `expand_start_cond' (with the condition-expression
33 as argument) before parsing the then-clause and calling `expand_end_cond'
34 after parsing the then-clause. */
36 #include "config.h"
37 #include "system.h"
38 #include "coretypes.h"
39 #include "tm.h"
41 #include "rtl.h"
42 #include "tree.h"
43 #include "tm_p.h"
44 #include "flags.h"
45 #include "except.h"
46 #include "function.h"
47 #include "insn-config.h"
48 #include "expr.h"
49 #include "libfuncs.h"
50 #include "hard-reg-set.h"
51 #include "loop.h"
52 #include "recog.h"
53 #include "machmode.h"
54 #include "toplev.h"
55 #include "output.h"
56 #include "ggc.h"
57 #include "langhooks.h"
58 #include "predict.h"
59 #include "optabs.h"
60 #include "target.h"
61 #include "regs.h"
63 /* Functions and data structures for expanding case statements. */
65 /* Case label structure, used to hold info on labels within case
66 statements. We handle "range" labels; for a single-value label
67 as in C, the high and low limits are the same.
69 An AVL tree of case nodes is initially created, and later transformed
70 to a list linked via the RIGHT fields in the nodes. Nodes with
71 higher case values are later in the list.
73 Switch statements can be output in one of two forms. A branch table
74 is used if there are more than a few labels and the labels are dense
75 within the range between the smallest and largest case value. If a
76 branch table is used, no further manipulations are done with the case
77 node chain.
79 The alternative to the use of a branch table is to generate a series
80 of compare and jump insns. When that is done, we use the LEFT, RIGHT,
81 and PARENT fields to hold a binary tree. Initially the tree is
82 totally unbalanced, with everything on the right. We balance the tree
83 with nodes on the left having lower case values than the parent
84 and nodes on the right having higher values. We then output the tree
85 in order. */
87 struct case_node GTY(())
89 struct case_node *left; /* Left son in binary tree */
90 struct case_node *right; /* Right son in binary tree; also node chain */
91 struct case_node *parent; /* Parent of node in binary tree */
92 tree low; /* Lowest index value for this label */
93 tree high; /* Highest index value for this label */
94 tree code_label; /* Label to jump to when node matches */
95 int balance;
98 typedef struct case_node case_node;
99 typedef struct case_node *case_node_ptr;
101 /* These are used by estimate_case_costs and balance_case_nodes. */
103 /* This must be a signed type, and non-ANSI compilers lack signed char. */
104 static short cost_table_[129];
105 static int use_cost_table;
106 static int cost_table_initialized;
108 /* Special care is needed because we allow -1, but TREE_INT_CST_LOW
109 is unsigned. */
110 #define COST_TABLE(I) cost_table_[(unsigned HOST_WIDE_INT) ((I) + 1)]
112 /* Stack of control and binding constructs we are currently inside.
114 These constructs begin when you call `expand_start_WHATEVER'
115 and end when you call `expand_end_WHATEVER'. This stack records
116 info about how the construct began that tells the end-function
117 what to do. It also may provide information about the construct
118 to alter the behavior of other constructs within the body.
119 For example, they may affect the behavior of C `break' and `continue'.
121 Each construct gets one `struct nesting' object.
122 All of these objects are chained through the `all' field.
123 `nesting_stack' points to the first object (innermost construct).
124 The position of an entry on `nesting_stack' is in its `depth' field.
126 Each type of construct has its own individual stack.
127 For example, loops have `cond_stack'. Each object points to the
128 next object of the same type through the `next' field.
130 Some constructs are visible to `break' exit-statements and others
131 are not. Which constructs are visible depends on the language.
132 Therefore, the data structure allows each construct to be visible
133 or not, according to the args given when the construct is started.
134 The construct is visible if the `exit_label' field is non-null.
135 In that case, the value should be a CODE_LABEL rtx. */
137 struct nesting GTY(())
139 struct nesting *all;
140 struct nesting *next;
141 int depth;
142 rtx exit_label;
143 enum nesting_desc {
144 COND_NESTING,
145 BLOCK_NESTING,
146 CASE_NESTING
147 } desc;
148 union nesting_u
150 /* For conds (if-then and if-then-else statements). */
151 struct nesting_cond
153 /* Label for the end of the if construct.
154 There is none if EXITFLAG was not set
155 and no `else' has been seen yet. */
156 rtx endif_label;
157 /* Label for the end of this alternative.
158 This may be the end of the if or the next else/elseif. */
159 rtx next_label;
160 } GTY ((tag ("COND_NESTING"))) cond;
161 /* For variable binding contours. */
162 struct nesting_block
164 /* Sequence number of this binding contour within the function,
165 in order of entry. */
166 int block_start_count;
167 /* Nonzero => value to restore stack to on exit. */
168 rtx stack_level;
169 /* The NOTE that starts this contour.
170 Used by expand_goto to check whether the destination
171 is within each contour or not. */
172 rtx first_insn;
173 /* Innermost containing binding contour that has a stack level. */
174 struct nesting *innermost_stack_block;
175 /* List of cleanups to be run on exit from this contour.
176 This is a list of expressions to be evaluated.
177 The TREE_PURPOSE of each link is the ..._DECL node
178 which the cleanup pertains to. */
179 tree cleanups;
180 /* List of cleanup-lists of blocks containing this block,
181 as they were at the locus where this block appears.
182 There is an element for each containing block,
183 ordered innermost containing block first.
184 The tail of this list can be 0,
185 if all remaining elements would be empty lists.
186 The element's TREE_VALUE is the cleanup-list of that block,
187 which may be null. */
188 tree outer_cleanups;
189 /* Chain of labels defined inside this binding contour.
190 For contours that have stack levels or cleanups. */
191 struct label_chain *label_chain;
192 /* Nonzero if this is associated with an EH region. */
193 int exception_region;
194 /* The saved target_temp_slot_level from our outer block.
195 We may reset target_temp_slot_level to be the level of
196 this block, if that is done, target_temp_slot_level
197 reverts to the saved target_temp_slot_level at the very
198 end of the block. */
199 int block_target_temp_slot_level;
200 /* True if we are currently emitting insns in an area of
201 output code that is controlled by a conditional
202 expression. This is used by the cleanup handling code to
203 generate conditional cleanup actions. */
204 int conditional_code;
205 /* A place to move the start of the exception region for any
206 of the conditional cleanups, must be at the end or after
207 the start of the last unconditional cleanup, and before any
208 conditional branch points. */
209 rtx last_unconditional_cleanup;
210 } GTY ((tag ("BLOCK_NESTING"))) block;
211 /* For switch (C) or case (Pascal) statements. */
212 struct nesting_case
214 /* The insn after which the case dispatch should finally
215 be emitted. Zero for a dummy. */
216 rtx start;
217 /* A list of case labels; it is first built as an AVL tree.
218 During expand_end_case, this is converted to a list, and may be
219 rearranged into a nearly balanced binary tree. */
220 struct case_node *case_list;
221 /* Label to jump to if no case matches. */
222 tree default_label;
223 /* The expression to be dispatched on. */
224 tree index_expr;
225 /* Type that INDEX_EXPR should be converted to. */
226 tree nominal_type;
227 /* Name of this kind of statement, for warnings. */
228 const char *printname;
229 /* Used to save no_line_numbers till we see the first case label.
230 We set this to -1 when we see the first case label in this
231 case statement. */
232 int line_number_status;
233 } GTY ((tag ("CASE_NESTING"))) case_stmt;
234 } GTY ((desc ("%1.desc"))) data;
237 /* Allocate and return a new `struct nesting'. */
239 #define ALLOC_NESTING() ggc_alloc (sizeof (struct nesting))
241 /* Pop the nesting stack element by element until we pop off
242 the element which is at the top of STACK.
243 Update all the other stacks, popping off elements from them
244 as we pop them from nesting_stack. */
246 #define POPSTACK(STACK) \
247 do { struct nesting *target = STACK; \
248 struct nesting *this; \
249 do { this = nesting_stack; \
250 if (cond_stack == this) \
251 cond_stack = cond_stack->next; \
252 if (block_stack == this) \
253 block_stack = block_stack->next; \
254 if (stack_block_stack == this) \
255 stack_block_stack = stack_block_stack->next; \
256 if (case_stack == this) \
257 case_stack = case_stack->next; \
258 nesting_depth = nesting_stack->depth - 1; \
259 nesting_stack = this->all; } \
260 while (this != target); } while (0)
262 /* In some cases it is impossible to generate code for a forward goto
263 until the label definition is seen. This happens when it may be necessary
264 for the goto to reset the stack pointer: we don't yet know how to do that.
265 So expand_goto puts an entry on this fixup list.
266 Each time a binding contour that resets the stack is exited,
267 we check each fixup.
268 If the target label has now been defined, we can insert the proper code. */
270 struct goto_fixup GTY(())
272 /* Points to following fixup. */
273 struct goto_fixup *next;
274 /* Points to the insn before the jump insn.
275 If more code must be inserted, it goes after this insn. */
276 rtx before_jump;
277 /* The LABEL_DECL that this jump is jumping to, or 0
278 for break, continue or return. */
279 tree target;
280 /* The BLOCK for the place where this goto was found. */
281 tree context;
282 /* The CODE_LABEL rtx that this is jumping to. */
283 rtx target_rtl;
284 /* Number of binding contours started in current function
285 before the label reference. */
286 int block_start_count;
287 /* The outermost stack level that should be restored for this jump.
288 Each time a binding contour that resets the stack is exited,
289 if the target label is *not* yet defined, this slot is updated. */
290 rtx stack_level;
291 /* List of lists of cleanup expressions to be run by this goto.
292 There is one element for each block that this goto is within.
293 The tail of this list can be 0,
294 if all remaining elements would be empty.
295 The TREE_VALUE contains the cleanup list of that block as of the
296 time this goto was seen.
297 The TREE_ADDRESSABLE flag is 1 for a block that has been exited. */
298 tree cleanup_list_list;
301 /* Within any binding contour that must restore a stack level,
302 all labels are recorded with a chain of these structures. */
304 struct label_chain GTY(())
306 /* Points to following fixup. */
307 struct label_chain *next;
308 tree label;
311 struct stmt_status GTY(())
313 /* Chain of all pending binding contours. */
314 struct nesting * x_block_stack;
316 /* If any new stacks are added here, add them to POPSTACKS too. */
318 /* Chain of all pending binding contours that restore stack levels
319 or have cleanups. */
320 struct nesting * x_stack_block_stack;
322 /* Chain of all pending conditional statements. */
323 struct nesting * x_cond_stack;
325 /* Chain of all pending case or switch statements. */
326 struct nesting * x_case_stack;
328 /* Separate chain including all of the above,
329 chained through the `all' field. */
330 struct nesting * x_nesting_stack;
332 /* Number of entries on nesting_stack now. */
333 int x_nesting_depth;
335 /* Number of binding contours started so far in this function. */
336 int x_block_start_count;
338 /* Each time we expand an expression-statement,
339 record the expr's type and its RTL value here. */
340 tree x_last_expr_type;
341 rtx x_last_expr_value;
342 rtx x_last_expr_alt_rtl;
344 /* Nonzero if within a ({...}) grouping, in which case we must
345 always compute a value for each expr-stmt in case it is the last one. */
346 int x_expr_stmts_for_value;
348 /* Location of last line-number note, whether we actually
349 emitted it or not. */
350 location_t x_emit_locus;
352 struct goto_fixup *x_goto_fixup_chain;
355 #define block_stack (cfun->stmt->x_block_stack)
356 #define stack_block_stack (cfun->stmt->x_stack_block_stack)
357 #define cond_stack (cfun->stmt->x_cond_stack)
358 #define case_stack (cfun->stmt->x_case_stack)
359 #define nesting_stack (cfun->stmt->x_nesting_stack)
360 #define nesting_depth (cfun->stmt->x_nesting_depth)
361 #define current_block_start_count (cfun->stmt->x_block_start_count)
362 #define last_expr_type (cfun->stmt->x_last_expr_type)
363 #define last_expr_value (cfun->stmt->x_last_expr_value)
364 #define last_expr_alt_rtl (cfun->stmt->x_last_expr_alt_rtl)
365 #define expr_stmts_for_value (cfun->stmt->x_expr_stmts_for_value)
366 #define emit_locus (cfun->stmt->x_emit_locus)
367 #define goto_fixup_chain (cfun->stmt->x_goto_fixup_chain)
369 /* Nonzero if we are using EH to handle cleanups. */
370 int using_eh_for_cleanups_p = 0;
372 static int n_occurrences (int, const char *);
373 static bool decl_conflicts_with_clobbers_p (tree, const HARD_REG_SET);
374 static void expand_goto_internal (tree, rtx, rtx);
375 static int expand_fixup (tree, rtx, rtx);
376 static void expand_nl_goto_receiver (void);
377 static void fixup_gotos (struct nesting *, rtx, tree, rtx, int);
378 static bool check_operand_nalternatives (tree, tree);
379 static bool check_unique_operand_names (tree, tree);
380 static char *resolve_operand_name_1 (char *, tree, tree);
381 static void expand_null_return_1 (rtx);
382 static enum br_predictor return_prediction (rtx);
383 static rtx shift_return_value (rtx);
384 static void expand_value_return (rtx);
385 static int tail_recursion_args (tree, tree);
386 static void expand_cleanups (tree, int, int);
387 static void check_seenlabel (void);
388 static void do_jump_if_equal (rtx, rtx, rtx, int);
389 static int estimate_case_costs (case_node_ptr);
390 static bool same_case_target_p (rtx, rtx);
391 static void strip_default_case_nodes (case_node_ptr *, rtx);
392 static bool lshift_cheap_p (void);
393 static int case_bit_test_cmp (const void *, const void *);
394 static void emit_case_bit_tests (tree, tree, tree, tree, case_node_ptr, rtx);
395 static void group_case_nodes (case_node_ptr);
396 static void balance_case_nodes (case_node_ptr *, case_node_ptr);
397 static int node_has_low_bound (case_node_ptr, tree);
398 static int node_has_high_bound (case_node_ptr, tree);
399 static int node_is_bounded (case_node_ptr, tree);
400 static void emit_jump_if_reachable (rtx);
401 static void emit_case_nodes (rtx, case_node_ptr, rtx, tree);
402 static struct case_node *case_tree2list (case_node *, case_node *);
404 void
405 using_eh_for_cleanups (void)
407 using_eh_for_cleanups_p = 1;
410 void
411 init_stmt_for_function (void)
413 cfun->stmt = ggc_alloc_cleared (sizeof (struct stmt_status));
416 /* Record the current file and line. Called from emit_line_note. */
418 void
419 set_file_and_line_for_stmt (location_t location)
421 /* If we're outputting an inline function, and we add a line note,
422 there may be no CFUN->STMT information. So, there's no need to
423 update it. */
424 if (cfun->stmt)
425 emit_locus = location;
428 /* Emit a no-op instruction. */
430 void
431 emit_nop (void)
433 rtx last_insn;
435 last_insn = get_last_insn ();
436 if (!optimize
437 && (GET_CODE (last_insn) == CODE_LABEL
438 || (GET_CODE (last_insn) == NOTE
439 && prev_real_insn (last_insn) == 0)))
440 emit_insn (gen_nop ());
443 /* Return the rtx-label that corresponds to a LABEL_DECL,
444 creating it if necessary. */
447 label_rtx (tree label)
449 if (TREE_CODE (label) != LABEL_DECL)
450 abort ();
452 if (!DECL_RTL_SET_P (label))
454 rtx r = gen_label_rtx ();
455 SET_DECL_RTL (label, r);
456 if (FORCED_LABEL (label) || DECL_NONLOCAL (label))
457 LABEL_PRESERVE_P (r) = 1;
460 return DECL_RTL (label);
463 /* As above, but also put it on the forced-reference list of the
464 function that contains it. */
466 force_label_rtx (tree label)
468 rtx ref = label_rtx (label);
469 tree function = decl_function_context (label);
470 struct function *p;
472 if (!function)
473 abort ();
475 if (function != current_function_decl)
476 p = find_function_data (function);
477 else
478 p = cfun;
480 p->expr->x_forced_labels = gen_rtx_EXPR_LIST (VOIDmode, ref,
481 p->expr->x_forced_labels);
482 return ref;
485 /* Add an unconditional jump to LABEL as the next sequential instruction. */
487 void
488 emit_jump (rtx label)
490 do_pending_stack_adjust ();
491 emit_jump_insn (gen_jump (label));
492 emit_barrier ();
495 /* Emit code to jump to the address
496 specified by the pointer expression EXP. */
498 void
499 expand_computed_goto (tree exp)
501 rtx x = expand_expr (exp, NULL_RTX, VOIDmode, 0);
503 x = convert_memory_address (Pmode, x);
505 emit_queue ();
507 if (! cfun->computed_goto_common_label)
509 cfun->computed_goto_common_reg = copy_to_mode_reg (Pmode, x);
510 cfun->computed_goto_common_label = gen_label_rtx ();
512 do_pending_stack_adjust ();
513 emit_label (cfun->computed_goto_common_label);
514 emit_indirect_jump (cfun->computed_goto_common_reg);
516 current_function_has_computed_jump = 1;
518 else
520 emit_move_insn (cfun->computed_goto_common_reg, x);
521 emit_jump (cfun->computed_goto_common_label);
525 /* Handle goto statements and the labels that they can go to. */
527 /* Specify the location in the RTL code of a label LABEL,
528 which is a LABEL_DECL tree node.
530 This is used for the kind of label that the user can jump to with a
531 goto statement, and for alternatives of a switch or case statement.
532 RTL labels generated for loops and conditionals don't go through here;
533 they are generated directly at the RTL level, by other functions below.
535 Note that this has nothing to do with defining label *names*.
536 Languages vary in how they do that and what that even means. */
538 void
539 expand_label (tree label)
541 struct label_chain *p;
542 rtx label_r = label_rtx (label);
544 do_pending_stack_adjust ();
545 emit_label (label_r);
546 if (DECL_NAME (label))
547 LABEL_NAME (DECL_RTL (label)) = IDENTIFIER_POINTER (DECL_NAME (label));
549 if (DECL_NONLOCAL (label))
551 expand_nl_goto_receiver ();
552 nonlocal_goto_handler_labels
553 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
554 nonlocal_goto_handler_labels);
557 if (FORCED_LABEL (label))
558 forced_labels = gen_rtx_EXPR_LIST (VOIDmode, label_r, forced_labels);
560 if (DECL_NONLOCAL (label) || FORCED_LABEL (label))
561 maybe_set_first_label_num (label_r);
563 if (stack_block_stack != 0)
565 p = ggc_alloc (sizeof (struct label_chain));
566 p->next = stack_block_stack->data.block.label_chain;
567 stack_block_stack->data.block.label_chain = p;
568 p->label = label;
572 /* Generate RTL code for a `goto' statement with target label LABEL.
573 LABEL should be a LABEL_DECL tree node that was or will later be
574 defined with `expand_label'. */
576 void
577 expand_goto (tree label)
579 #ifdef ENABLE_CHECKING
580 /* Check for a nonlocal goto to a containing function. Should have
581 gotten translated to __builtin_nonlocal_goto. */
582 tree context = decl_function_context (label);
583 if (context != 0 && context != current_function_decl)
584 abort ();
585 #endif
587 expand_goto_internal (label, label_rtx (label), NULL_RTX);
590 /* Generate RTL code for a `goto' statement with target label BODY.
591 LABEL should be a LABEL_REF.
592 LAST_INSN, if non-0, is the rtx we should consider as the last
593 insn emitted (for the purposes of cleaning up a return). */
595 static void
596 expand_goto_internal (tree body, rtx label, rtx last_insn)
598 struct nesting *block;
599 rtx stack_level = 0;
601 if (GET_CODE (label) != CODE_LABEL)
602 abort ();
604 /* If label has already been defined, we can tell now
605 whether and how we must alter the stack level. */
607 if (PREV_INSN (label) != 0)
609 /* Find the innermost pending block that contains the label.
610 (Check containment by comparing insn-uids.)
611 Then restore the outermost stack level within that block,
612 and do cleanups of all blocks contained in it. */
613 for (block = block_stack; block; block = block->next)
615 if (INSN_UID (block->data.block.first_insn) < INSN_UID (label))
616 break;
617 if (block->data.block.stack_level != 0)
618 stack_level = block->data.block.stack_level;
619 /* Execute the cleanups for blocks we are exiting. */
620 if (block->data.block.cleanups != 0)
622 expand_cleanups (block->data.block.cleanups, 1, 1);
623 do_pending_stack_adjust ();
627 if (stack_level)
629 /* Ensure stack adjust isn't done by emit_jump, as this
630 would clobber the stack pointer. This one should be
631 deleted as dead by flow. */
632 clear_pending_stack_adjust ();
633 do_pending_stack_adjust ();
635 /* Don't do this adjust if it's to the end label and this function
636 is to return with a depressed stack pointer. */
637 if (label == return_label
638 && (((TREE_CODE (TREE_TYPE (current_function_decl))
639 == FUNCTION_TYPE)
640 && (TYPE_RETURNS_STACK_DEPRESSED
641 (TREE_TYPE (current_function_decl))))))
643 else
644 emit_stack_restore (SAVE_BLOCK, stack_level, NULL_RTX);
647 if (body != 0 && DECL_TOO_LATE (body))
648 error ("jump to `%s' invalidly jumps into binding contour",
649 IDENTIFIER_POINTER (DECL_NAME (body)));
651 /* Label not yet defined: may need to put this goto
652 on the fixup list. */
653 else if (! expand_fixup (body, label, last_insn))
655 /* No fixup needed. Record that the label is the target
656 of at least one goto that has no fixup. */
657 if (body != 0)
658 TREE_ADDRESSABLE (body) = 1;
661 emit_jump (label);
664 /* Generate if necessary a fixup for a goto
665 whose target label in tree structure (if any) is TREE_LABEL
666 and whose target in rtl is RTL_LABEL.
668 If LAST_INSN is nonzero, we pretend that the jump appears
669 after insn LAST_INSN instead of at the current point in the insn stream.
671 The fixup will be used later to insert insns just before the goto.
672 Those insns will restore the stack level as appropriate for the
673 target label, and will (in the case of C++) also invoke any object
674 destructors which have to be invoked when we exit the scopes which
675 are exited by the goto.
677 Value is nonzero if a fixup is made. */
679 static int
680 expand_fixup (tree tree_label, rtx rtl_label, rtx last_insn)
682 struct nesting *block, *end_block;
684 /* See if we can recognize which block the label will be output in.
685 This is possible in some very common cases.
686 If we succeed, set END_BLOCK to that block.
687 Otherwise, set it to 0. */
689 if (cond_stack
690 && (rtl_label == cond_stack->data.cond.endif_label
691 || rtl_label == cond_stack->data.cond.next_label))
692 end_block = cond_stack;
693 else
694 end_block = 0;
696 /* Now set END_BLOCK to the binding level to which we will return. */
698 if (end_block)
700 struct nesting *next_block = end_block->all;
701 block = block_stack;
703 /* First see if the END_BLOCK is inside the innermost binding level.
704 If so, then no cleanups or stack levels are relevant. */
705 while (next_block && next_block != block)
706 next_block = next_block->all;
708 if (next_block)
709 return 0;
711 /* Otherwise, set END_BLOCK to the innermost binding level
712 which is outside the relevant control-structure nesting. */
713 next_block = block_stack->next;
714 for (block = block_stack; block != end_block; block = block->all)
715 if (block == next_block)
716 next_block = next_block->next;
717 end_block = next_block;
720 /* Does any containing block have a stack level or cleanups?
721 If not, no fixup is needed, and that is the normal case
722 (the only case, for standard C). */
723 for (block = block_stack; block != end_block; block = block->next)
724 if (block->data.block.stack_level != 0
725 || block->data.block.cleanups != 0)
726 break;
728 if (block != end_block)
730 /* Ok, a fixup is needed. Add a fixup to the list of such. */
731 struct goto_fixup *fixup = ggc_alloc (sizeof (struct goto_fixup));
732 /* In case an old stack level is restored, make sure that comes
733 after any pending stack adjust. */
734 /* ?? If the fixup isn't to come at the present position,
735 doing the stack adjust here isn't useful. Doing it with our
736 settings at that location isn't useful either. Let's hope
737 someone does it! */
738 if (last_insn == 0)
739 do_pending_stack_adjust ();
740 fixup->target = tree_label;
741 fixup->target_rtl = rtl_label;
743 /* Create a BLOCK node and a corresponding matched set of
744 NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes at
745 this point. The notes will encapsulate any and all fixup
746 code which we might later insert at this point in the insn
747 stream. Also, the BLOCK node will be the parent (i.e. the
748 `SUPERBLOCK') of any other BLOCK nodes which we might create
749 later on when we are expanding the fixup code.
751 Note that optimization passes might move the *_BLOCK notes away,
752 so we use a NOTE_INSN_DELETED as a placeholder. */
755 rtx original_before_jump
756 = last_insn ? last_insn : get_last_insn ();
757 rtx start;
758 rtx end;
759 tree block;
761 block = make_node (BLOCK);
762 TREE_USED (block) = 1;
764 if (!cfun->x_whole_function_mode_p)
765 lang_hooks.decls.insert_block (block);
766 else
768 BLOCK_CHAIN (block)
769 = BLOCK_CHAIN (DECL_INITIAL (current_function_decl));
770 BLOCK_CHAIN (DECL_INITIAL (current_function_decl))
771 = block;
774 start_sequence ();
775 start = emit_note (NOTE_INSN_BLOCK_BEG);
776 if (cfun->x_whole_function_mode_p)
777 NOTE_BLOCK (start) = block;
778 fixup->before_jump = emit_note (NOTE_INSN_DELETED);
779 end = emit_note (NOTE_INSN_BLOCK_END);
780 if (cfun->x_whole_function_mode_p)
781 NOTE_BLOCK (end) = block;
782 fixup->context = block;
783 end_sequence ();
784 emit_insn_after (start, original_before_jump);
787 fixup->block_start_count = current_block_start_count;
788 fixup->stack_level = 0;
789 fixup->cleanup_list_list
790 = ((block->data.block.outer_cleanups
791 || block->data.block.cleanups)
792 ? tree_cons (NULL_TREE, block->data.block.cleanups,
793 block->data.block.outer_cleanups)
794 : 0);
795 fixup->next = goto_fixup_chain;
796 goto_fixup_chain = fixup;
799 return block != 0;
802 /* Expand any needed fixups in the outputmost binding level of the
803 function. FIRST_INSN is the first insn in the function. */
805 void
806 expand_fixups (rtx first_insn)
808 fixup_gotos (NULL, NULL_RTX, NULL_TREE, first_insn, 0);
811 /* When exiting a binding contour, process all pending gotos requiring fixups.
812 THISBLOCK is the structure that describes the block being exited.
813 STACK_LEVEL is the rtx for the stack level to restore exiting this contour.
814 CLEANUP_LIST is a list of expressions to evaluate on exiting this contour.
815 FIRST_INSN is the insn that began this contour.
817 Gotos that jump out of this contour must restore the
818 stack level and do the cleanups before actually jumping.
820 DONT_JUMP_IN positive means report error if there is a jump into this
821 contour from before the beginning of the contour. This is also done if
822 STACK_LEVEL is nonzero unless DONT_JUMP_IN is negative. */
824 static void
825 fixup_gotos (struct nesting *thisblock, rtx stack_level,
826 tree cleanup_list, rtx first_insn, int dont_jump_in)
828 struct goto_fixup *f, *prev;
830 /* F is the fixup we are considering; PREV is the previous one. */
831 /* We run this loop in two passes so that cleanups of exited blocks
832 are run first, and blocks that are exited are marked so
833 afterwards. */
835 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
837 /* Test for a fixup that is inactive because it is already handled. */
838 if (f->before_jump == 0)
840 /* Delete inactive fixup from the chain, if that is easy to do. */
841 if (prev != 0)
842 prev->next = f->next;
844 /* Has this fixup's target label been defined?
845 If so, we can finalize it. */
846 else if (PREV_INSN (f->target_rtl) != 0)
848 rtx cleanup_insns;
850 /* If this fixup jumped into this contour from before the beginning
851 of this contour, report an error. This code used to use
852 the first non-label insn after f->target_rtl, but that's
853 wrong since such can be added, by things like put_var_into_stack
854 and have INSN_UIDs that are out of the range of the block. */
855 /* ??? Bug: this does not detect jumping in through intermediate
856 blocks that have stack levels or cleanups.
857 It detects only a problem with the innermost block
858 around the label. */
859 if (f->target != 0
860 && (dont_jump_in > 0 || (dont_jump_in == 0 && stack_level)
861 || cleanup_list)
862 && INSN_UID (first_insn) < INSN_UID (f->target_rtl)
863 && INSN_UID (first_insn) > INSN_UID (f->before_jump)
864 && ! DECL_ERROR_ISSUED (f->target))
866 error ("%Jlabel '%D' used before containing binding contour",
867 f->target, f->target);
868 /* Prevent multiple errors for one label. */
869 DECL_ERROR_ISSUED (f->target) = 1;
872 /* We will expand the cleanups into a sequence of their own and
873 then later on we will attach this new sequence to the insn
874 stream just ahead of the actual jump insn. */
876 start_sequence ();
878 /* Temporarily restore the lexical context where we will
879 logically be inserting the fixup code. We do this for the
880 sake of getting the debugging information right. */
882 lang_hooks.decls.pushlevel (0);
883 lang_hooks.decls.set_block (f->context);
885 /* Expand the cleanups for blocks this jump exits. */
886 if (f->cleanup_list_list)
888 tree lists;
889 for (lists = f->cleanup_list_list; lists; lists = TREE_CHAIN (lists))
890 /* Marked elements correspond to blocks that have been closed.
891 Do their cleanups. */
892 if (TREE_ADDRESSABLE (lists)
893 && TREE_VALUE (lists) != 0)
895 expand_cleanups (TREE_VALUE (lists), 1, 1);
896 /* Pop any pushes done in the cleanups,
897 in case function is about to return. */
898 do_pending_stack_adjust ();
902 /* Restore stack level for the biggest contour that this
903 jump jumps out of. */
904 if (f->stack_level
905 && ! (f->target_rtl == return_label
906 && ((TREE_CODE (TREE_TYPE (current_function_decl))
907 == FUNCTION_TYPE)
908 && (TYPE_RETURNS_STACK_DEPRESSED
909 (TREE_TYPE (current_function_decl))))))
910 emit_stack_restore (SAVE_BLOCK, f->stack_level, f->before_jump);
912 /* Finish up the sequence containing the insns which implement the
913 necessary cleanups, and then attach that whole sequence to the
914 insn stream just ahead of the actual jump insn. Attaching it
915 at that point insures that any cleanups which are in fact
916 implicit C++ object destructions (which must be executed upon
917 leaving the block) appear (to the debugger) to be taking place
918 in an area of the generated code where the object(s) being
919 destructed are still "in scope". */
921 cleanup_insns = get_insns ();
922 lang_hooks.decls.poplevel (1, 0, 0);
924 end_sequence ();
925 emit_insn_after (cleanup_insns, f->before_jump);
927 f->before_jump = 0;
931 /* For any still-undefined labels, do the cleanups for this block now.
932 We must do this now since items in the cleanup list may go out
933 of scope when the block ends. */
934 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
935 if (f->before_jump != 0
936 && PREV_INSN (f->target_rtl) == 0
937 /* Label has still not appeared. If we are exiting a block with
938 a stack level to restore, that started before the fixup,
939 mark this stack level as needing restoration
940 when the fixup is later finalized. */
941 && thisblock != 0
942 /* Note: if THISBLOCK == 0 and we have a label that hasn't appeared, it
943 means the label is undefined. That's erroneous, but possible. */
944 && (thisblock->data.block.block_start_count
945 <= f->block_start_count))
947 tree lists = f->cleanup_list_list;
948 rtx cleanup_insns;
950 for (; lists; lists = TREE_CHAIN (lists))
951 /* If the following elt. corresponds to our containing block
952 then the elt. must be for this block. */
953 if (TREE_CHAIN (lists) == thisblock->data.block.outer_cleanups)
955 start_sequence ();
956 lang_hooks.decls.pushlevel (0);
957 lang_hooks.decls.set_block (f->context);
958 expand_cleanups (TREE_VALUE (lists), 1, 1);
959 do_pending_stack_adjust ();
960 cleanup_insns = get_insns ();
961 lang_hooks.decls.poplevel (1, 0, 0);
962 end_sequence ();
963 if (cleanup_insns != 0)
964 f->before_jump
965 = emit_insn_after (cleanup_insns, f->before_jump);
967 f->cleanup_list_list = TREE_CHAIN (lists);
970 if (stack_level)
971 f->stack_level = stack_level;
975 /* Return the number of times character C occurs in string S. */
976 static int
977 n_occurrences (int c, const char *s)
979 int n = 0;
980 while (*s)
981 n += (*s++ == c);
982 return n;
985 /* Generate RTL for an asm statement (explicit assembler code).
986 STRING is a STRING_CST node containing the assembler code text,
987 or an ADDR_EXPR containing a STRING_CST. VOL nonzero means the
988 insn is volatile; don't optimize it. */
990 void
991 expand_asm (tree string, int vol)
993 rtx body;
995 if (TREE_CODE (string) == ADDR_EXPR)
996 string = TREE_OPERAND (string, 0);
998 body = gen_rtx_ASM_INPUT (VOIDmode, TREE_STRING_POINTER (string));
1000 MEM_VOLATILE_P (body) = vol;
1002 emit_insn (body);
1004 clear_last_expr ();
1007 /* Parse the output constraint pointed to by *CONSTRAINT_P. It is the
1008 OPERAND_NUMth output operand, indexed from zero. There are NINPUTS
1009 inputs and NOUTPUTS outputs to this extended-asm. Upon return,
1010 *ALLOWS_MEM will be TRUE iff the constraint allows the use of a
1011 memory operand. Similarly, *ALLOWS_REG will be TRUE iff the
1012 constraint allows the use of a register operand. And, *IS_INOUT
1013 will be true if the operand is read-write, i.e., if it is used as
1014 an input as well as an output. If *CONSTRAINT_P is not in
1015 canonical form, it will be made canonical. (Note that `+' will be
1016 replaced with `=' as part of this process.)
1018 Returns TRUE if all went well; FALSE if an error occurred. */
1020 bool
1021 parse_output_constraint (const char **constraint_p, int operand_num,
1022 int ninputs, int noutputs, bool *allows_mem,
1023 bool *allows_reg, bool *is_inout)
1025 const char *constraint = *constraint_p;
1026 const char *p;
1028 /* Assume the constraint doesn't allow the use of either a register
1029 or memory. */
1030 *allows_mem = false;
1031 *allows_reg = false;
1033 /* Allow the `=' or `+' to not be at the beginning of the string,
1034 since it wasn't explicitly documented that way, and there is a
1035 large body of code that puts it last. Swap the character to
1036 the front, so as not to uglify any place else. */
1037 p = strchr (constraint, '=');
1038 if (!p)
1039 p = strchr (constraint, '+');
1041 /* If the string doesn't contain an `=', issue an error
1042 message. */
1043 if (!p)
1045 error ("output operand constraint lacks `='");
1046 return false;
1049 /* If the constraint begins with `+', then the operand is both read
1050 from and written to. */
1051 *is_inout = (*p == '+');
1053 /* Canonicalize the output constraint so that it begins with `='. */
1054 if (p != constraint || is_inout)
1056 char *buf;
1057 size_t c_len = strlen (constraint);
1059 if (p != constraint)
1060 warning ("output constraint `%c' for operand %d is not at the beginning",
1061 *p, operand_num);
1063 /* Make a copy of the constraint. */
1064 buf = alloca (c_len + 1);
1065 strcpy (buf, constraint);
1066 /* Swap the first character and the `=' or `+'. */
1067 buf[p - constraint] = buf[0];
1068 /* Make sure the first character is an `='. (Until we do this,
1069 it might be a `+'.) */
1070 buf[0] = '=';
1071 /* Replace the constraint with the canonicalized string. */
1072 *constraint_p = ggc_alloc_string (buf, c_len);
1073 constraint = *constraint_p;
1076 /* Loop through the constraint string. */
1077 for (p = constraint + 1; *p; p += CONSTRAINT_LEN (*p, p))
1078 switch (*p)
1080 case '+':
1081 case '=':
1082 error ("operand constraint contains incorrectly positioned '+' or '='");
1083 return false;
1085 case '%':
1086 if (operand_num + 1 == ninputs + noutputs)
1088 error ("`%%' constraint used with last operand");
1089 return false;
1091 break;
1093 case 'V': case 'm': case 'o':
1094 *allows_mem = true;
1095 break;
1097 case '?': case '!': case '*': case '&': case '#':
1098 case 'E': case 'F': case 'G': case 'H':
1099 case 's': case 'i': case 'n':
1100 case 'I': case 'J': case 'K': case 'L': case 'M':
1101 case 'N': case 'O': case 'P': case ',':
1102 break;
1104 case '0': case '1': case '2': case '3': case '4':
1105 case '5': case '6': case '7': case '8': case '9':
1106 case '[':
1107 error ("matching constraint not valid in output operand");
1108 return false;
1110 case '<': case '>':
1111 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
1112 excepting those that expand_call created. So match memory
1113 and hope. */
1114 *allows_mem = true;
1115 break;
1117 case 'g': case 'X':
1118 *allows_reg = true;
1119 *allows_mem = true;
1120 break;
1122 case 'p': case 'r':
1123 *allows_reg = true;
1124 break;
1126 default:
1127 if (!ISALPHA (*p))
1128 break;
1129 if (REG_CLASS_FROM_CONSTRAINT (*p, p) != NO_REGS)
1130 *allows_reg = true;
1131 #ifdef EXTRA_CONSTRAINT_STR
1132 else if (EXTRA_ADDRESS_CONSTRAINT (*p, p))
1133 *allows_reg = true;
1134 else if (EXTRA_MEMORY_CONSTRAINT (*p, p))
1135 *allows_mem = true;
1136 else
1138 /* Otherwise we can't assume anything about the nature of
1139 the constraint except that it isn't purely registers.
1140 Treat it like "g" and hope for the best. */
1141 *allows_reg = true;
1142 *allows_mem = true;
1144 #endif
1145 break;
1148 return true;
1151 /* Similar, but for input constraints. */
1153 bool
1154 parse_input_constraint (const char **constraint_p, int input_num,
1155 int ninputs, int noutputs, int ninout,
1156 const char * const * constraints,
1157 bool *allows_mem, bool *allows_reg)
1159 const char *constraint = *constraint_p;
1160 const char *orig_constraint = constraint;
1161 size_t c_len = strlen (constraint);
1162 size_t j;
1163 bool saw_match = false;
1165 /* Assume the constraint doesn't allow the use of either
1166 a register or memory. */
1167 *allows_mem = false;
1168 *allows_reg = false;
1170 /* Make sure constraint has neither `=', `+', nor '&'. */
1172 for (j = 0; j < c_len; j += CONSTRAINT_LEN (constraint[j], constraint+j))
1173 switch (constraint[j])
1175 case '+': case '=': case '&':
1176 if (constraint == orig_constraint)
1178 error ("input operand constraint contains `%c'", constraint[j]);
1179 return false;
1181 break;
1183 case '%':
1184 if (constraint == orig_constraint
1185 && input_num + 1 == ninputs - ninout)
1187 error ("`%%' constraint used with last operand");
1188 return false;
1190 break;
1192 case 'V': case 'm': case 'o':
1193 *allows_mem = true;
1194 break;
1196 case '<': case '>':
1197 case '?': case '!': case '*': case '#':
1198 case 'E': case 'F': case 'G': case 'H':
1199 case 's': case 'i': case 'n':
1200 case 'I': case 'J': case 'K': case 'L': case 'M':
1201 case 'N': case 'O': case 'P': case ',':
1202 break;
1204 /* Whether or not a numeric constraint allows a register is
1205 decided by the matching constraint, and so there is no need
1206 to do anything special with them. We must handle them in
1207 the default case, so that we don't unnecessarily force
1208 operands to memory. */
1209 case '0': case '1': case '2': case '3': case '4':
1210 case '5': case '6': case '7': case '8': case '9':
1212 char *end;
1213 unsigned long match;
1215 saw_match = true;
1217 match = strtoul (constraint + j, &end, 10);
1218 if (match >= (unsigned long) noutputs)
1220 error ("matching constraint references invalid operand number");
1221 return false;
1224 /* Try and find the real constraint for this dup. Only do this
1225 if the matching constraint is the only alternative. */
1226 if (*end == '\0'
1227 && (j == 0 || (j == 1 && constraint[0] == '%')))
1229 constraint = constraints[match];
1230 *constraint_p = constraint;
1231 c_len = strlen (constraint);
1232 j = 0;
1233 /* ??? At the end of the loop, we will skip the first part of
1234 the matched constraint. This assumes not only that the
1235 other constraint is an output constraint, but also that
1236 the '=' or '+' come first. */
1237 break;
1239 else
1240 j = end - constraint;
1241 /* Anticipate increment at end of loop. */
1242 j--;
1244 /* Fall through. */
1246 case 'p': case 'r':
1247 *allows_reg = true;
1248 break;
1250 case 'g': case 'X':
1251 *allows_reg = true;
1252 *allows_mem = true;
1253 break;
1255 default:
1256 if (! ISALPHA (constraint[j]))
1258 error ("invalid punctuation `%c' in constraint", constraint[j]);
1259 return false;
1261 if (REG_CLASS_FROM_CONSTRAINT (constraint[j], constraint + j)
1262 != NO_REGS)
1263 *allows_reg = true;
1264 #ifdef EXTRA_CONSTRAINT_STR
1265 else if (EXTRA_ADDRESS_CONSTRAINT (constraint[j], constraint + j))
1266 *allows_reg = true;
1267 else if (EXTRA_MEMORY_CONSTRAINT (constraint[j], constraint + j))
1268 *allows_mem = true;
1269 else
1271 /* Otherwise we can't assume anything about the nature of
1272 the constraint except that it isn't purely registers.
1273 Treat it like "g" and hope for the best. */
1274 *allows_reg = true;
1275 *allows_mem = true;
1277 #endif
1278 break;
1281 if (saw_match && !*allows_reg)
1282 warning ("matching constraint does not allow a register");
1284 return true;
1287 /* INPUT is one of the input operands from EXPR, an ASM_EXPR. Returns true
1288 if it is an operand which must be passed in memory (i.e. an "m"
1289 constraint), false otherwise. */
1291 bool
1292 asm_op_is_mem_input (tree input, tree expr)
1294 const char *constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (input)));
1295 tree outputs = ASM_OUTPUTS (expr);
1296 int noutputs = list_length (outputs);
1297 const char **constraints
1298 = (const char **) alloca ((noutputs) * sizeof (const char *));
1299 int i = 0;
1300 bool allows_mem, allows_reg;
1301 tree t;
1303 /* Collect output constraints. */
1304 for (t = outputs; t ; t = TREE_CHAIN (t), i++)
1305 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
1307 /* We pass 0 for input_num, ninputs and ninout; they are only used for
1308 error checking which will be done at expand time. */
1309 parse_input_constraint (&constraint, 0, 0, noutputs, 0, constraints,
1310 &allows_mem, &allows_reg);
1311 return (!allows_reg && allows_mem);
1314 /* Check for overlap between registers marked in CLOBBERED_REGS and
1315 anything inappropriate in DECL. Emit error and return TRUE for error,
1316 FALSE for ok. */
1318 static bool
1319 decl_conflicts_with_clobbers_p (tree decl, const HARD_REG_SET clobbered_regs)
1321 /* Conflicts between asm-declared register variables and the clobber
1322 list are not allowed. */
1323 if ((TREE_CODE (decl) == VAR_DECL || TREE_CODE (decl) == PARM_DECL)
1324 && DECL_REGISTER (decl)
1325 && REG_P (DECL_RTL (decl))
1326 && REGNO (DECL_RTL (decl)) < FIRST_PSEUDO_REGISTER)
1328 rtx reg = DECL_RTL (decl);
1329 unsigned int regno;
1331 for (regno = REGNO (reg);
1332 regno < (REGNO (reg)
1333 + hard_regno_nregs[REGNO (reg)][GET_MODE (reg)]);
1334 regno++)
1335 if (TEST_HARD_REG_BIT (clobbered_regs, regno))
1337 error ("asm-specifier for variable `%s' conflicts with asm clobber list",
1338 IDENTIFIER_POINTER (DECL_NAME (decl)));
1340 /* Reset registerness to stop multiple errors emitted for a
1341 single variable. */
1342 DECL_REGISTER (decl) = 0;
1343 return true;
1346 return false;
1349 /* Generate RTL for an asm statement with arguments.
1350 STRING is the instruction template.
1351 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
1352 Each output or input has an expression in the TREE_VALUE and
1353 and a tree list in TREE_PURPOSE which in turn contains a constraint
1354 name in TREE_VALUE (or NULL_TREE) and a constraint string
1355 in TREE_PURPOSE.
1356 CLOBBERS is a list of STRING_CST nodes each naming a hard register
1357 that is clobbered by this insn.
1359 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
1360 Some elements of OUTPUTS may be replaced with trees representing temporary
1361 values. The caller should copy those temporary values to the originally
1362 specified lvalues.
1364 VOL nonzero means the insn is volatile; don't optimize it. */
1366 void
1367 expand_asm_operands (tree string, tree outputs, tree inputs,
1368 tree clobbers, int vol, location_t locus)
1370 rtvec argvec, constraintvec;
1371 rtx body;
1372 int ninputs = list_length (inputs);
1373 int noutputs = list_length (outputs);
1374 int ninout;
1375 int nclobbers;
1376 HARD_REG_SET clobbered_regs;
1377 int clobber_conflict_found = 0;
1378 tree tail;
1379 tree t;
1380 int i;
1381 /* Vector of RTX's of evaluated output operands. */
1382 rtx *output_rtx = alloca (noutputs * sizeof (rtx));
1383 int *inout_opnum = alloca (noutputs * sizeof (int));
1384 rtx *real_output_rtx = alloca (noutputs * sizeof (rtx));
1385 enum machine_mode *inout_mode
1386 = alloca (noutputs * sizeof (enum machine_mode));
1387 const char **constraints
1388 = alloca ((noutputs + ninputs) * sizeof (const char *));
1389 int old_generating_concat_p = generating_concat_p;
1391 /* An ASM with no outputs needs to be treated as volatile, for now. */
1392 if (noutputs == 0)
1393 vol = 1;
1395 if (! check_operand_nalternatives (outputs, inputs))
1396 return;
1398 string = resolve_asm_operand_names (string, outputs, inputs);
1400 /* Collect constraints. */
1401 i = 0;
1402 for (t = outputs; t ; t = TREE_CHAIN (t), i++)
1403 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
1404 for (t = inputs; t ; t = TREE_CHAIN (t), i++)
1405 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
1407 /* Sometimes we wish to automatically clobber registers across an asm.
1408 Case in point is when the i386 backend moved from cc0 to a hard reg --
1409 maintaining source-level compatibility means automatically clobbering
1410 the flags register. */
1411 clobbers = targetm.md_asm_clobbers (clobbers);
1413 /* Count the number of meaningful clobbered registers, ignoring what
1414 we would ignore later. */
1415 nclobbers = 0;
1416 CLEAR_HARD_REG_SET (clobbered_regs);
1417 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1419 const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1421 i = decode_reg_name (regname);
1422 if (i >= 0 || i == -4)
1423 ++nclobbers;
1424 else if (i == -2)
1425 error ("unknown register name `%s' in `asm'", regname);
1427 /* Mark clobbered registers. */
1428 if (i >= 0)
1430 /* Clobbering the PIC register is an error */
1431 if (i == (int) PIC_OFFSET_TABLE_REGNUM)
1433 error ("PIC register `%s' clobbered in `asm'", regname);
1434 return;
1437 SET_HARD_REG_BIT (clobbered_regs, i);
1441 clear_last_expr ();
1443 /* First pass over inputs and outputs checks validity and sets
1444 mark_addressable if needed. */
1446 ninout = 0;
1447 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1449 tree val = TREE_VALUE (tail);
1450 tree type = TREE_TYPE (val);
1451 const char *constraint;
1452 bool is_inout;
1453 bool allows_reg;
1454 bool allows_mem;
1456 /* If there's an erroneous arg, emit no insn. */
1457 if (type == error_mark_node)
1458 return;
1460 /* Try to parse the output constraint. If that fails, there's
1461 no point in going further. */
1462 constraint = constraints[i];
1463 if (!parse_output_constraint (&constraint, i, ninputs, noutputs,
1464 &allows_mem, &allows_reg, &is_inout))
1465 return;
1467 if (! allows_reg
1468 && (allows_mem
1469 || is_inout
1470 || (DECL_P (val)
1471 && GET_CODE (DECL_RTL (val)) == REG
1472 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type))))
1473 lang_hooks.mark_addressable (val);
1475 if (is_inout)
1476 ninout++;
1479 ninputs += ninout;
1480 if (ninputs + noutputs > MAX_RECOG_OPERANDS)
1482 error ("more than %d operands in `asm'", MAX_RECOG_OPERANDS);
1483 return;
1486 for (i = 0, tail = inputs; tail; i++, tail = TREE_CHAIN (tail))
1488 bool allows_reg, allows_mem;
1489 const char *constraint;
1491 /* If there's an erroneous arg, emit no insn, because the ASM_INPUT
1492 would get VOIDmode and that could cause a crash in reload. */
1493 if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
1494 return;
1496 constraint = constraints[i + noutputs];
1497 if (! parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
1498 constraints, &allows_mem, &allows_reg))
1499 return;
1501 if (! allows_reg && allows_mem)
1502 lang_hooks.mark_addressable (TREE_VALUE (tail));
1505 /* Second pass evaluates arguments. */
1507 ninout = 0;
1508 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1510 tree val = TREE_VALUE (tail);
1511 tree type = TREE_TYPE (val);
1512 bool is_inout;
1513 bool allows_reg;
1514 bool allows_mem;
1515 rtx op;
1517 if (!parse_output_constraint (&constraints[i], i, ninputs,
1518 noutputs, &allows_mem, &allows_reg,
1519 &is_inout))
1520 abort ();
1522 /* If an output operand is not a decl or indirect ref and our constraint
1523 allows a register, make a temporary to act as an intermediate.
1524 Make the asm insn write into that, then our caller will copy it to
1525 the real output operand. Likewise for promoted variables. */
1527 generating_concat_p = 0;
1529 real_output_rtx[i] = NULL_RTX;
1530 if ((TREE_CODE (val) == INDIRECT_REF
1531 && allows_mem)
1532 || (DECL_P (val)
1533 && (allows_mem || GET_CODE (DECL_RTL (val)) == REG)
1534 && ! (GET_CODE (DECL_RTL (val)) == REG
1535 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
1536 || ! allows_reg
1537 || is_inout)
1539 op = expand_expr (val, NULL_RTX, VOIDmode, EXPAND_WRITE);
1540 if (GET_CODE (op) == MEM)
1541 op = validize_mem (op);
1543 if (! allows_reg && GET_CODE (op) != MEM)
1544 error ("output number %d not directly addressable", i);
1545 if ((! allows_mem && GET_CODE (op) == MEM)
1546 || GET_CODE (op) == CONCAT)
1548 real_output_rtx[i] = protect_from_queue (op, 1);
1549 op = gen_reg_rtx (GET_MODE (op));
1550 if (is_inout)
1551 emit_move_insn (op, real_output_rtx[i]);
1554 else
1556 op = assign_temp (type, 0, 0, 1);
1557 op = validize_mem (op);
1558 TREE_VALUE (tail) = make_tree (type, op);
1560 output_rtx[i] = op;
1562 generating_concat_p = old_generating_concat_p;
1564 if (is_inout)
1566 inout_mode[ninout] = TYPE_MODE (type);
1567 inout_opnum[ninout++] = i;
1570 if (decl_conflicts_with_clobbers_p (val, clobbered_regs))
1571 clobber_conflict_found = 1;
1574 /* Make vectors for the expression-rtx, constraint strings,
1575 and named operands. */
1577 argvec = rtvec_alloc (ninputs);
1578 constraintvec = rtvec_alloc (ninputs);
1580 body = gen_rtx_ASM_OPERANDS ((noutputs == 0 ? VOIDmode
1581 : GET_MODE (output_rtx[0])),
1582 TREE_STRING_POINTER (string),
1583 empty_string, 0, argvec, constraintvec,
1584 locus.file, locus.line);
1586 MEM_VOLATILE_P (body) = vol;
1588 /* Eval the inputs and put them into ARGVEC.
1589 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
1591 for (i = 0, tail = inputs; tail; tail = TREE_CHAIN (tail), ++i)
1593 bool allows_reg, allows_mem;
1594 const char *constraint;
1595 tree val, type;
1596 rtx op;
1598 constraint = constraints[i + noutputs];
1599 if (! parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
1600 constraints, &allows_mem, &allows_reg))
1601 abort ();
1603 generating_concat_p = 0;
1605 val = TREE_VALUE (tail);
1606 type = TREE_TYPE (val);
1607 op = expand_expr (val, NULL_RTX, VOIDmode,
1608 (allows_mem && !allows_reg
1609 ? EXPAND_MEMORY : EXPAND_NORMAL));
1611 /* Never pass a CONCAT to an ASM. */
1612 if (GET_CODE (op) == CONCAT)
1613 op = force_reg (GET_MODE (op), op);
1614 else if (GET_CODE (op) == MEM)
1615 op = validize_mem (op);
1617 if (asm_operand_ok (op, constraint) <= 0)
1619 if (allows_reg)
1620 op = force_reg (TYPE_MODE (type), op);
1621 else if (!allows_mem)
1622 warning ("asm operand %d probably doesn't match constraints",
1623 i + noutputs);
1624 else if (GET_CODE (op) == MEM)
1626 /* We won't recognize either volatile memory or memory
1627 with a queued address as available a memory_operand
1628 at this point. Ignore it: clearly this *is* a memory. */
1630 else
1632 warning ("use of memory input without lvalue in "
1633 "asm operand %d is deprecated", i + noutputs);
1635 if (CONSTANT_P (op))
1637 rtx mem = force_const_mem (TYPE_MODE (type), op);
1638 if (mem)
1639 op = validize_mem (mem);
1640 else
1641 op = force_reg (TYPE_MODE (type), op);
1643 if (GET_CODE (op) == REG
1644 || GET_CODE (op) == SUBREG
1645 || GET_CODE (op) == ADDRESSOF
1646 || GET_CODE (op) == CONCAT)
1648 tree qual_type = build_qualified_type (type,
1649 (TYPE_QUALS (type)
1650 | TYPE_QUAL_CONST));
1651 rtx memloc = assign_temp (qual_type, 1, 1, 1);
1652 memloc = validize_mem (memloc);
1653 emit_move_insn (memloc, op);
1654 op = memloc;
1659 generating_concat_p = old_generating_concat_p;
1660 ASM_OPERANDS_INPUT (body, i) = op;
1662 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, i)
1663 = gen_rtx_ASM_INPUT (TYPE_MODE (type), constraints[i + noutputs]);
1665 if (decl_conflicts_with_clobbers_p (val, clobbered_regs))
1666 clobber_conflict_found = 1;
1669 /* Protect all the operands from the queue now that they have all been
1670 evaluated. */
1672 generating_concat_p = 0;
1674 for (i = 0; i < ninputs - ninout; i++)
1675 ASM_OPERANDS_INPUT (body, i)
1676 = protect_from_queue (ASM_OPERANDS_INPUT (body, i), 0);
1678 for (i = 0; i < noutputs; i++)
1679 output_rtx[i] = protect_from_queue (output_rtx[i], 1);
1681 /* For in-out operands, copy output rtx to input rtx. */
1682 for (i = 0; i < ninout; i++)
1684 int j = inout_opnum[i];
1685 char buffer[16];
1687 ASM_OPERANDS_INPUT (body, ninputs - ninout + i)
1688 = output_rtx[j];
1690 sprintf (buffer, "%d", j);
1691 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, ninputs - ninout + i)
1692 = gen_rtx_ASM_INPUT (inout_mode[i], ggc_strdup (buffer));
1695 generating_concat_p = old_generating_concat_p;
1697 /* Now, for each output, construct an rtx
1698 (set OUTPUT (asm_operands INSN OUTPUTCONSTRAINT OUTPUTNUMBER
1699 ARGVEC CONSTRAINTS OPNAMES))
1700 If there is more than one, put them inside a PARALLEL. */
1702 if (noutputs == 1 && nclobbers == 0)
1704 ASM_OPERANDS_OUTPUT_CONSTRAINT (body) = constraints[0];
1705 emit_insn (gen_rtx_SET (VOIDmode, output_rtx[0], body));
1708 else if (noutputs == 0 && nclobbers == 0)
1710 /* No output operands: put in a raw ASM_OPERANDS rtx. */
1711 emit_insn (body);
1714 else
1716 rtx obody = body;
1717 int num = noutputs;
1719 if (num == 0)
1720 num = 1;
1722 body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
1724 /* For each output operand, store a SET. */
1725 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1727 XVECEXP (body, 0, i)
1728 = gen_rtx_SET (VOIDmode,
1729 output_rtx[i],
1730 gen_rtx_ASM_OPERANDS
1731 (GET_MODE (output_rtx[i]),
1732 TREE_STRING_POINTER (string),
1733 constraints[i], i, argvec, constraintvec,
1734 locus.file, locus.line));
1736 MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
1739 /* If there are no outputs (but there are some clobbers)
1740 store the bare ASM_OPERANDS into the PARALLEL. */
1742 if (i == 0)
1743 XVECEXP (body, 0, i++) = obody;
1745 /* Store (clobber REG) for each clobbered register specified. */
1747 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1749 const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1750 int j = decode_reg_name (regname);
1751 rtx clobbered_reg;
1753 if (j < 0)
1755 if (j == -3) /* `cc', which is not a register */
1756 continue;
1758 if (j == -4) /* `memory', don't cache memory across asm */
1760 XVECEXP (body, 0, i++)
1761 = gen_rtx_CLOBBER (VOIDmode,
1762 gen_rtx_MEM
1763 (BLKmode,
1764 gen_rtx_SCRATCH (VOIDmode)));
1765 continue;
1768 /* Ignore unknown register, error already signaled. */
1769 continue;
1772 /* Use QImode since that's guaranteed to clobber just one reg. */
1773 clobbered_reg = gen_rtx_REG (QImode, j);
1775 /* Do sanity check for overlap between clobbers and respectively
1776 input and outputs that hasn't been handled. Such overlap
1777 should have been detected and reported above. */
1778 if (!clobber_conflict_found)
1780 int opno;
1782 /* We test the old body (obody) contents to avoid tripping
1783 over the under-construction body. */
1784 for (opno = 0; opno < noutputs; opno++)
1785 if (reg_overlap_mentioned_p (clobbered_reg, output_rtx[opno]))
1786 internal_error ("asm clobber conflict with output operand");
1788 for (opno = 0; opno < ninputs - ninout; opno++)
1789 if (reg_overlap_mentioned_p (clobbered_reg,
1790 ASM_OPERANDS_INPUT (obody, opno)))
1791 internal_error ("asm clobber conflict with input operand");
1794 XVECEXP (body, 0, i++)
1795 = gen_rtx_CLOBBER (VOIDmode, clobbered_reg);
1798 emit_insn (body);
1801 /* For any outputs that needed reloading into registers, spill them
1802 back to where they belong. */
1803 for (i = 0; i < noutputs; ++i)
1804 if (real_output_rtx[i])
1805 emit_move_insn (real_output_rtx[i], output_rtx[i]);
1807 free_temp_slots ();
1810 void
1811 expand_asm_expr (tree exp)
1813 int noutputs, i;
1814 tree outputs, tail;
1815 tree *o;
1817 if (ASM_INPUT_P (exp))
1819 expand_asm (ASM_STRING (exp), ASM_VOLATILE_P (exp));
1820 return;
1823 outputs = ASM_OUTPUTS (exp);
1824 noutputs = list_length (outputs);
1825 /* o[I] is the place that output number I should be written. */
1826 o = (tree *) alloca (noutputs * sizeof (tree));
1828 /* Record the contents of OUTPUTS before it is modified. */
1829 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1830 o[i] = TREE_VALUE (tail);
1832 /* Generate the ASM_OPERANDS insn; store into the TREE_VALUEs of
1833 OUTPUTS some trees for where the values were actually stored. */
1834 expand_asm_operands (ASM_STRING (exp), outputs, ASM_INPUTS (exp),
1835 ASM_CLOBBERS (exp), ASM_VOLATILE_P (exp),
1836 input_location);
1838 /* Copy all the intermediate outputs into the specified outputs. */
1839 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1841 if (o[i] != TREE_VALUE (tail))
1843 expand_assignment (o[i], TREE_VALUE (tail), 0);
1844 free_temp_slots ();
1846 /* Restore the original value so that it's correct the next
1847 time we expand this function. */
1848 TREE_VALUE (tail) = o[i];
1852 /* Those MODIFY_EXPRs could do autoincrements. */
1853 emit_queue ();
1856 /* A subroutine of expand_asm_operands. Check that all operands have
1857 the same number of alternatives. Return true if so. */
1859 static bool
1860 check_operand_nalternatives (tree outputs, tree inputs)
1862 if (outputs || inputs)
1864 tree tmp = TREE_PURPOSE (outputs ? outputs : inputs);
1865 int nalternatives
1866 = n_occurrences (',', TREE_STRING_POINTER (TREE_VALUE (tmp)));
1867 tree next = inputs;
1869 if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
1871 error ("too many alternatives in `asm'");
1872 return false;
1875 tmp = outputs;
1876 while (tmp)
1878 const char *constraint
1879 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (tmp)));
1881 if (n_occurrences (',', constraint) != nalternatives)
1883 error ("operand constraints for `asm' differ in number of alternatives");
1884 return false;
1887 if (TREE_CHAIN (tmp))
1888 tmp = TREE_CHAIN (tmp);
1889 else
1890 tmp = next, next = 0;
1894 return true;
1897 /* A subroutine of expand_asm_operands. Check that all operand names
1898 are unique. Return true if so. We rely on the fact that these names
1899 are identifiers, and so have been canonicalized by get_identifier,
1900 so all we need are pointer comparisons. */
1902 static bool
1903 check_unique_operand_names (tree outputs, tree inputs)
1905 tree i, j;
1907 for (i = outputs; i ; i = TREE_CHAIN (i))
1909 tree i_name = TREE_PURPOSE (TREE_PURPOSE (i));
1910 if (! i_name)
1911 continue;
1913 for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
1914 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1915 goto failure;
1918 for (i = inputs; i ; i = TREE_CHAIN (i))
1920 tree i_name = TREE_PURPOSE (TREE_PURPOSE (i));
1921 if (! i_name)
1922 continue;
1924 for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
1925 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1926 goto failure;
1927 for (j = outputs; j ; j = TREE_CHAIN (j))
1928 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1929 goto failure;
1932 return true;
1934 failure:
1935 error ("duplicate asm operand name '%s'",
1936 TREE_STRING_POINTER (TREE_PURPOSE (TREE_PURPOSE (i))));
1937 return false;
1940 /* A subroutine of expand_asm_operands. Resolve the names of the operands
1941 in *POUTPUTS and *PINPUTS to numbers, and replace the name expansions in
1942 STRING and in the constraints to those numbers. */
1944 tree
1945 resolve_asm_operand_names (tree string, tree outputs, tree inputs)
1947 char *buffer;
1948 char *p;
1949 const char *c;
1950 tree t;
1952 check_unique_operand_names (outputs, inputs);
1954 /* Substitute [<name>] in input constraint strings. There should be no
1955 named operands in output constraints. */
1956 for (t = inputs; t ; t = TREE_CHAIN (t))
1958 c = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
1959 if (strchr (c, '[') != NULL)
1961 p = buffer = xstrdup (c);
1962 while ((p = strchr (p, '[')) != NULL)
1963 p = resolve_operand_name_1 (p, outputs, inputs);
1964 TREE_VALUE (TREE_PURPOSE (t))
1965 = build_string (strlen (buffer), buffer);
1966 free (buffer);
1970 /* Now check for any needed substitutions in the template. */
1971 c = TREE_STRING_POINTER (string);
1972 while ((c = strchr (c, '%')) != NULL)
1974 if (c[1] == '[')
1975 break;
1976 else if (ISALPHA (c[1]) && c[2] == '[')
1977 break;
1978 else
1980 c += 1;
1981 continue;
1985 if (c)
1987 /* OK, we need to make a copy so we can perform the substitutions.
1988 Assume that we will not need extra space--we get to remove '['
1989 and ']', which means we cannot have a problem until we have more
1990 than 999 operands. */
1991 buffer = xstrdup (TREE_STRING_POINTER (string));
1992 p = buffer + (c - TREE_STRING_POINTER (string));
1994 while ((p = strchr (p, '%')) != NULL)
1996 if (p[1] == '[')
1997 p += 1;
1998 else if (ISALPHA (p[1]) && p[2] == '[')
1999 p += 2;
2000 else
2002 p += 1;
2003 continue;
2006 p = resolve_operand_name_1 (p, outputs, inputs);
2009 string = build_string (strlen (buffer), buffer);
2010 free (buffer);
2013 return string;
2016 /* A subroutine of resolve_operand_names. P points to the '[' for a
2017 potential named operand of the form [<name>]. In place, replace
2018 the name and brackets with a number. Return a pointer to the
2019 balance of the string after substitution. */
2021 static char *
2022 resolve_operand_name_1 (char *p, tree outputs, tree inputs)
2024 char *q;
2025 int op;
2026 tree t;
2027 size_t len;
2029 /* Collect the operand name. */
2030 q = strchr (p, ']');
2031 if (!q)
2033 error ("missing close brace for named operand");
2034 return strchr (p, '\0');
2036 len = q - p - 1;
2038 /* Resolve the name to a number. */
2039 for (op = 0, t = outputs; t ; t = TREE_CHAIN (t), op++)
2041 tree name = TREE_PURPOSE (TREE_PURPOSE (t));
2042 if (name)
2044 const char *c = TREE_STRING_POINTER (name);
2045 if (strncmp (c, p + 1, len) == 0 && c[len] == '\0')
2046 goto found;
2049 for (t = inputs; t ; t = TREE_CHAIN (t), op++)
2051 tree name = TREE_PURPOSE (TREE_PURPOSE (t));
2052 if (name)
2054 const char *c = TREE_STRING_POINTER (name);
2055 if (strncmp (c, p + 1, len) == 0 && c[len] == '\0')
2056 goto found;
2060 *q = '\0';
2061 error ("undefined named operand '%s'", p + 1);
2062 op = 0;
2063 found:
2065 /* Replace the name with the number. Unfortunately, not all libraries
2066 get the return value of sprintf correct, so search for the end of the
2067 generated string by hand. */
2068 sprintf (p, "%d", op);
2069 p = strchr (p, '\0');
2071 /* Verify the no extra buffer space assumption. */
2072 if (p > q)
2073 abort ();
2075 /* Shift the rest of the buffer down to fill the gap. */
2076 memmove (p, q + 1, strlen (q + 1) + 1);
2078 return p;
2081 /* Generate RTL to evaluate the expression EXP
2082 and remember it in case this is the VALUE in a ({... VALUE; }) constr.
2083 Provided just for backward-compatibility. expand_expr_stmt_value()
2084 should be used for new code. */
2086 void
2087 expand_expr_stmt (tree exp)
2089 expand_expr_stmt_value (exp, -1, 1);
2092 /* Generate RTL to evaluate the expression EXP. WANT_VALUE tells
2093 whether to (1) save the value of the expression, (0) discard it or
2094 (-1) use expr_stmts_for_value to tell. The use of -1 is
2095 deprecated, and retained only for backward compatibility. */
2097 void
2098 expand_expr_stmt_value (tree exp, int want_value, int maybe_last)
2100 rtx value;
2101 tree type;
2102 rtx alt_rtl = NULL;
2104 if (want_value == -1)
2105 want_value = expr_stmts_for_value != 0;
2107 /* If -Wextra, warn about statements with no side effects,
2108 except for an explicit cast to void (e.g. for assert()), and
2109 except for last statement in ({...}) where they may be useful. */
2110 if (! want_value
2111 && (expr_stmts_for_value == 0 || ! maybe_last)
2112 && exp != error_mark_node
2113 && warn_unused_value)
2115 if (TREE_SIDE_EFFECTS (exp))
2116 warn_if_unused_value (exp);
2117 else if (!VOID_TYPE_P (TREE_TYPE (exp)) && !TREE_NO_WARNING (exp))
2118 warning ("%Hstatement with no effect", &emit_locus);
2121 /* If EXP is of function type and we are expanding statements for
2122 value, convert it to pointer-to-function. */
2123 if (want_value && TREE_CODE (TREE_TYPE (exp)) == FUNCTION_TYPE)
2124 exp = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (exp)), exp);
2126 /* The call to `expand_expr' could cause last_expr_type and
2127 last_expr_value to get reset. Therefore, we set last_expr_value
2128 and last_expr_type *after* calling expand_expr. */
2129 value = expand_expr_real (exp, want_value ? NULL_RTX : const0_rtx,
2130 VOIDmode, 0, &alt_rtl);
2131 type = TREE_TYPE (exp);
2133 /* If all we do is reference a volatile value in memory,
2134 copy it to a register to be sure it is actually touched. */
2135 if (value && GET_CODE (value) == MEM && TREE_THIS_VOLATILE (exp))
2137 if (TYPE_MODE (type) == VOIDmode)
2139 else if (TYPE_MODE (type) != BLKmode)
2140 value = copy_to_reg (value);
2141 else
2143 rtx lab = gen_label_rtx ();
2145 /* Compare the value with itself to reference it. */
2146 emit_cmp_and_jump_insns (value, value, EQ,
2147 expand_expr (TYPE_SIZE (type),
2148 NULL_RTX, VOIDmode, 0),
2149 BLKmode, 0, lab);
2150 emit_label (lab);
2154 /* If this expression is part of a ({...}) and is in memory, we may have
2155 to preserve temporaries. */
2156 preserve_temp_slots (value);
2158 /* Free any temporaries used to evaluate this expression. Any temporary
2159 used as a result of this expression will already have been preserved
2160 above. */
2161 free_temp_slots ();
2163 if (want_value)
2165 last_expr_value = value;
2166 last_expr_alt_rtl = alt_rtl;
2167 last_expr_type = type;
2170 emit_queue ();
2173 /* Warn if EXP contains any computations whose results are not used.
2174 Return 1 if a warning is printed; 0 otherwise. */
2177 warn_if_unused_value (tree exp)
2179 if (TREE_USED (exp))
2180 return 0;
2182 /* Don't warn about void constructs. This includes casting to void,
2183 void function calls, and statement expressions with a final cast
2184 to void. */
2185 if (VOID_TYPE_P (TREE_TYPE (exp)))
2186 return 0;
2188 switch (TREE_CODE (exp))
2190 case PREINCREMENT_EXPR:
2191 case POSTINCREMENT_EXPR:
2192 case PREDECREMENT_EXPR:
2193 case POSTDECREMENT_EXPR:
2194 case MODIFY_EXPR:
2195 case INIT_EXPR:
2196 case TARGET_EXPR:
2197 case CALL_EXPR:
2198 case RTL_EXPR:
2199 case TRY_CATCH_EXPR:
2200 case WITH_CLEANUP_EXPR:
2201 case EXIT_EXPR:
2202 return 0;
2204 case BIND_EXPR:
2205 /* For a binding, warn if no side effect within it. */
2206 return warn_if_unused_value (TREE_OPERAND (exp, 1));
2208 case SAVE_EXPR:
2209 return warn_if_unused_value (TREE_OPERAND (exp, 0));
2211 case TRUTH_ORIF_EXPR:
2212 case TRUTH_ANDIF_EXPR:
2213 /* In && or ||, warn if 2nd operand has no side effect. */
2214 return warn_if_unused_value (TREE_OPERAND (exp, 1));
2216 case COMPOUND_EXPR:
2217 if (TREE_NO_WARNING (exp))
2218 return 0;
2219 if (warn_if_unused_value (TREE_OPERAND (exp, 0)))
2220 return 1;
2221 /* Let people do `(foo (), 0)' without a warning. */
2222 if (TREE_CONSTANT (TREE_OPERAND (exp, 1)))
2223 return 0;
2224 return warn_if_unused_value (TREE_OPERAND (exp, 1));
2226 case NOP_EXPR:
2227 case CONVERT_EXPR:
2228 case NON_LVALUE_EXPR:
2229 /* Don't warn about conversions not explicit in the user's program. */
2230 if (TREE_NO_WARNING (exp))
2231 return 0;
2232 /* Assignment to a cast usually results in a cast of a modify.
2233 Don't complain about that. There can be an arbitrary number of
2234 casts before the modify, so we must loop until we find the first
2235 non-cast expression and then test to see if that is a modify. */
2237 tree tem = TREE_OPERAND (exp, 0);
2239 while (TREE_CODE (tem) == CONVERT_EXPR || TREE_CODE (tem) == NOP_EXPR)
2240 tem = TREE_OPERAND (tem, 0);
2242 if (TREE_CODE (tem) == MODIFY_EXPR || TREE_CODE (tem) == INIT_EXPR
2243 || TREE_CODE (tem) == CALL_EXPR)
2244 return 0;
2246 goto maybe_warn;
2248 case INDIRECT_REF:
2249 /* Don't warn about automatic dereferencing of references, since
2250 the user cannot control it. */
2251 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == REFERENCE_TYPE)
2252 return warn_if_unused_value (TREE_OPERAND (exp, 0));
2253 /* Fall through. */
2255 default:
2256 /* Referencing a volatile value is a side effect, so don't warn. */
2257 if ((DECL_P (exp)
2258 || TREE_CODE_CLASS (TREE_CODE (exp)) == 'r')
2259 && TREE_THIS_VOLATILE (exp))
2260 return 0;
2262 /* If this is an expression which has no operands, there is no value
2263 to be unused. There are no such language-independent codes,
2264 but front ends may define such. */
2265 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'e'
2266 && TREE_CODE_LENGTH (TREE_CODE (exp)) == 0)
2267 return 0;
2269 maybe_warn:
2270 /* If this is an expression with side effects, don't warn. */
2271 if (TREE_SIDE_EFFECTS (exp))
2272 return 0;
2274 warning ("%Hvalue computed is not used", &emit_locus);
2275 return 1;
2279 /* Clear out the memory of the last expression evaluated. */
2281 void
2282 clear_last_expr (void)
2284 last_expr_type = NULL_TREE;
2285 last_expr_value = NULL_RTX;
2286 last_expr_alt_rtl = NULL_RTX;
2289 /* Begin a statement-expression, i.e., a series of statements which
2290 may return a value. Return the RTL_EXPR for this statement expr.
2291 The caller must save that value and pass it to
2292 expand_end_stmt_expr. If HAS_SCOPE is nonzero, temporaries created
2293 in the statement-expression are deallocated at the end of the
2294 expression. */
2296 tree
2297 expand_start_stmt_expr (int has_scope)
2299 tree t;
2301 /* Make the RTL_EXPR node temporary, not momentary,
2302 so that rtl_expr_chain doesn't become garbage. */
2303 t = make_node (RTL_EXPR);
2304 do_pending_stack_adjust ();
2305 if (has_scope)
2306 start_sequence_for_rtl_expr (t);
2307 else
2308 start_sequence ();
2309 NO_DEFER_POP;
2310 expr_stmts_for_value++;
2311 return t;
2314 /* Restore the previous state at the end of a statement that returns a value.
2315 Returns a tree node representing the statement's value and the
2316 insns to compute the value.
2318 The nodes of that expression have been freed by now, so we cannot use them.
2319 But we don't want to do that anyway; the expression has already been
2320 evaluated and now we just want to use the value. So generate a RTL_EXPR
2321 with the proper type and RTL value.
2323 If the last substatement was not an expression,
2324 return something with type `void'. */
2326 tree
2327 expand_end_stmt_expr (tree t)
2329 OK_DEFER_POP;
2331 if (! last_expr_value || ! last_expr_type)
2333 last_expr_value = const0_rtx;
2334 last_expr_alt_rtl = NULL_RTX;
2335 last_expr_type = void_type_node;
2337 else if (GET_CODE (last_expr_value) != REG && ! CONSTANT_P (last_expr_value))
2338 /* Remove any possible QUEUED. */
2339 last_expr_value = protect_from_queue (last_expr_value, 0);
2341 emit_queue ();
2343 TREE_TYPE (t) = last_expr_type;
2344 RTL_EXPR_RTL (t) = last_expr_value;
2345 RTL_EXPR_ALT_RTL (t) = last_expr_alt_rtl;
2346 RTL_EXPR_SEQUENCE (t) = get_insns ();
2348 rtl_expr_chain = tree_cons (NULL_TREE, t, rtl_expr_chain);
2350 end_sequence ();
2352 /* Don't consider deleting this expr or containing exprs at tree level. */
2353 TREE_SIDE_EFFECTS (t) = 1;
2354 /* Propagate volatility of the actual RTL expr. */
2355 TREE_THIS_VOLATILE (t) = volatile_refs_p (last_expr_value);
2357 clear_last_expr ();
2358 expr_stmts_for_value--;
2360 return t;
2363 /* Generate RTL for the start of an if-then. COND is the expression
2364 whose truth should be tested.
2366 If EXITFLAG is nonzero, this conditional is visible to
2367 `exit_something'. */
2369 void
2370 expand_start_cond (tree cond, int exitflag)
2372 struct nesting *thiscond = ALLOC_NESTING ();
2374 /* Make an entry on cond_stack for the cond we are entering. */
2376 thiscond->desc = COND_NESTING;
2377 thiscond->next = cond_stack;
2378 thiscond->all = nesting_stack;
2379 thiscond->depth = ++nesting_depth;
2380 thiscond->data.cond.next_label = gen_label_rtx ();
2381 /* Before we encounter an `else', we don't need a separate exit label
2382 unless there are supposed to be exit statements
2383 to exit this conditional. */
2384 thiscond->exit_label = exitflag ? gen_label_rtx () : 0;
2385 thiscond->data.cond.endif_label = thiscond->exit_label;
2386 cond_stack = thiscond;
2387 nesting_stack = thiscond;
2389 do_jump (cond, thiscond->data.cond.next_label, NULL_RTX);
2392 /* Generate RTL between then-clause and the elseif-clause
2393 of an if-then-elseif-.... */
2395 void
2396 expand_start_elseif (tree cond)
2398 if (cond_stack->data.cond.endif_label == 0)
2399 cond_stack->data.cond.endif_label = gen_label_rtx ();
2400 emit_jump (cond_stack->data.cond.endif_label);
2401 emit_label (cond_stack->data.cond.next_label);
2402 cond_stack->data.cond.next_label = gen_label_rtx ();
2403 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
2406 /* Generate RTL between the then-clause and the else-clause
2407 of an if-then-else. */
2409 void
2410 expand_start_else (void)
2412 if (cond_stack->data.cond.endif_label == 0)
2413 cond_stack->data.cond.endif_label = gen_label_rtx ();
2415 emit_jump (cond_stack->data.cond.endif_label);
2416 emit_label (cond_stack->data.cond.next_label);
2417 cond_stack->data.cond.next_label = 0; /* No more _else or _elseif calls. */
2420 /* After calling expand_start_else, turn this "else" into an "else if"
2421 by providing another condition. */
2423 void
2424 expand_elseif (tree cond)
2426 cond_stack->data.cond.next_label = gen_label_rtx ();
2427 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
2430 /* Generate RTL for the end of an if-then.
2431 Pop the record for it off of cond_stack. */
2433 void
2434 expand_end_cond (void)
2436 struct nesting *thiscond = cond_stack;
2438 do_pending_stack_adjust ();
2439 if (thiscond->data.cond.next_label)
2440 emit_label (thiscond->data.cond.next_label);
2441 if (thiscond->data.cond.endif_label)
2442 emit_label (thiscond->data.cond.endif_label);
2444 POPSTACK (cond_stack);
2445 clear_last_expr ();
2448 /* Return nonzero if we should preserve sub-expressions as separate
2449 pseudos. We never do so if we aren't optimizing. We always do so
2450 if -fexpensive-optimizations. */
2453 preserve_subexpressions_p (void)
2455 if (flag_expensive_optimizations)
2456 return 1;
2458 if (optimize == 0 || cfun == 0 || cfun->stmt == 0)
2459 return 0;
2461 return 1;
2465 /* Generate RTL to return from the current function, with no value.
2466 (That is, we do not do anything about returning any value.) */
2468 void
2469 expand_null_return (void)
2471 rtx last_insn;
2473 last_insn = get_last_insn ();
2475 /* If this function was declared to return a value, but we
2476 didn't, clobber the return registers so that they are not
2477 propagated live to the rest of the function. */
2478 clobber_return_register ();
2480 expand_null_return_1 (last_insn);
2483 /* Generate RTL to return directly from the current function.
2484 (That is, we bypass any return value.) */
2486 void
2487 expand_naked_return (void)
2489 rtx last_insn, end_label;
2491 last_insn = get_last_insn ();
2492 end_label = naked_return_label;
2494 clear_pending_stack_adjust ();
2495 do_pending_stack_adjust ();
2496 clear_last_expr ();
2498 if (end_label == 0)
2499 end_label = naked_return_label = gen_label_rtx ();
2500 expand_goto_internal (NULL_TREE, end_label, last_insn);
2503 /* Try to guess whether the value of return means error code. */
2504 static enum br_predictor
2505 return_prediction (rtx val)
2507 /* Different heuristics for pointers and scalars. */
2508 if (POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (current_function_decl))))
2510 /* NULL is usually not returned. */
2511 if (val == const0_rtx)
2512 return PRED_NULL_RETURN;
2514 else
2516 /* Negative return values are often used to indicate
2517 errors. */
2518 if (GET_CODE (val) == CONST_INT
2519 && INTVAL (val) < 0)
2520 return PRED_NEGATIVE_RETURN;
2521 /* Constant return values are also usually erors,
2522 zero/one often mean booleans so exclude them from the
2523 heuristics. */
2524 if (CONSTANT_P (val)
2525 && (val != const0_rtx && val != const1_rtx))
2526 return PRED_CONST_RETURN;
2528 return PRED_NO_PREDICTION;
2532 /* If the current function returns values in the most significant part
2533 of a register, shift return value VAL appropriately. The mode of
2534 the function's return type is known not to be BLKmode. */
2536 static rtx
2537 shift_return_value (rtx val)
2539 tree type;
2541 type = TREE_TYPE (DECL_RESULT (current_function_decl));
2542 if (targetm.calls.return_in_msb (type))
2544 rtx target;
2545 HOST_WIDE_INT shift;
2547 target = DECL_RTL (DECL_RESULT (current_function_decl));
2548 shift = (GET_MODE_BITSIZE (GET_MODE (target))
2549 - BITS_PER_UNIT * int_size_in_bytes (type));
2550 if (shift > 0)
2551 val = expand_binop (GET_MODE (target), ashl_optab,
2552 gen_lowpart (GET_MODE (target), val),
2553 GEN_INT (shift), target, 1, OPTAB_WIDEN);
2555 return val;
2559 /* Generate RTL to return from the current function, with value VAL. */
2561 static void
2562 expand_value_return (rtx val)
2564 rtx last_insn;
2565 rtx return_reg;
2566 enum br_predictor pred;
2568 if (flag_guess_branch_prob
2569 && (pred = return_prediction (val)) != PRED_NO_PREDICTION)
2571 /* Emit information for branch prediction. */
2572 rtx note;
2574 note = emit_note (NOTE_INSN_PREDICTION);
2576 NOTE_PREDICTION (note) = NOTE_PREDICT (pred, NOT_TAKEN);
2580 last_insn = get_last_insn ();
2581 return_reg = DECL_RTL (DECL_RESULT (current_function_decl));
2583 /* Copy the value to the return location
2584 unless it's already there. */
2586 if (return_reg != val)
2588 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
2589 if (targetm.calls.promote_function_return (TREE_TYPE (current_function_decl)))
2591 int unsignedp = TYPE_UNSIGNED (type);
2592 enum machine_mode old_mode
2593 = DECL_MODE (DECL_RESULT (current_function_decl));
2594 enum machine_mode mode
2595 = promote_mode (type, old_mode, &unsignedp, 1);
2597 if (mode != old_mode)
2598 val = convert_modes (mode, old_mode, val, unsignedp);
2600 if (GET_CODE (return_reg) == PARALLEL)
2601 emit_group_load (return_reg, val, type, int_size_in_bytes (type));
2602 else
2603 emit_move_insn (return_reg, val);
2606 expand_null_return_1 (last_insn);
2609 /* Output a return with no value. If LAST_INSN is nonzero,
2610 pretend that the return takes place after LAST_INSN. */
2612 static void
2613 expand_null_return_1 (rtx last_insn)
2615 rtx end_label = cleanup_label ? cleanup_label : return_label;
2617 clear_pending_stack_adjust ();
2618 do_pending_stack_adjust ();
2619 clear_last_expr ();
2621 if (end_label == 0)
2622 end_label = return_label = gen_label_rtx ();
2623 expand_goto_internal (NULL_TREE, end_label, last_insn);
2626 /* Generate RTL to evaluate the expression RETVAL and return it
2627 from the current function. */
2629 void
2630 expand_return (tree retval)
2632 /* If there are any cleanups to be performed, then they will
2633 be inserted following LAST_INSN. It is desirable
2634 that the last_insn, for such purposes, should be the
2635 last insn before computing the return value. Otherwise, cleanups
2636 which call functions can clobber the return value. */
2637 /* ??? rms: I think that is erroneous, because in C++ it would
2638 run destructors on variables that might be used in the subsequent
2639 computation of the return value. */
2640 rtx last_insn = 0;
2641 rtx result_rtl;
2642 rtx val = 0;
2643 tree retval_rhs;
2645 /* If function wants no value, give it none. */
2646 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
2648 expand_expr (retval, NULL_RTX, VOIDmode, 0);
2649 emit_queue ();
2650 expand_null_return ();
2651 return;
2654 if (retval == error_mark_node)
2656 /* Treat this like a return of no value from a function that
2657 returns a value. */
2658 expand_null_return ();
2659 return;
2661 else if (TREE_CODE (retval) == RESULT_DECL)
2662 retval_rhs = retval;
2663 else if ((TREE_CODE (retval) == MODIFY_EXPR || TREE_CODE (retval) == INIT_EXPR)
2664 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
2665 retval_rhs = TREE_OPERAND (retval, 1);
2666 else
2667 retval_rhs = retval;
2669 last_insn = get_last_insn ();
2671 /* Distribute return down conditional expr if either of the sides
2672 may involve tail recursion (see test below). This enhances the number
2673 of tail recursions we see. Don't do this always since it can produce
2674 sub-optimal code in some cases and we distribute assignments into
2675 conditional expressions when it would help. */
2677 if (optimize && retval_rhs != 0
2678 && frame_offset == 0
2679 && TREE_CODE (retval_rhs) == COND_EXPR
2680 && (TREE_CODE (TREE_OPERAND (retval_rhs, 1)) == CALL_EXPR
2681 || TREE_CODE (TREE_OPERAND (retval_rhs, 2)) == CALL_EXPR))
2683 rtx label = gen_label_rtx ();
2684 tree expr;
2686 do_jump (TREE_OPERAND (retval_rhs, 0), label, NULL_RTX);
2687 start_cleanup_deferral ();
2688 expr = build (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (current_function_decl)),
2689 DECL_RESULT (current_function_decl),
2690 TREE_OPERAND (retval_rhs, 1));
2691 TREE_SIDE_EFFECTS (expr) = 1;
2692 expand_return (expr);
2693 emit_label (label);
2695 expr = build (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (current_function_decl)),
2696 DECL_RESULT (current_function_decl),
2697 TREE_OPERAND (retval_rhs, 2));
2698 TREE_SIDE_EFFECTS (expr) = 1;
2699 expand_return (expr);
2700 end_cleanup_deferral ();
2701 return;
2704 result_rtl = DECL_RTL (DECL_RESULT (current_function_decl));
2706 /* If the result is an aggregate that is being returned in one (or more)
2707 registers, load the registers here. The compiler currently can't handle
2708 copying a BLKmode value into registers. We could put this code in a
2709 more general area (for use by everyone instead of just function
2710 call/return), but until this feature is generally usable it is kept here
2711 (and in expand_call). The value must go into a pseudo in case there
2712 are cleanups that will clobber the real return register. */
2714 if (retval_rhs != 0
2715 && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
2716 && GET_CODE (result_rtl) == REG)
2718 int i;
2719 unsigned HOST_WIDE_INT bitpos, xbitpos;
2720 unsigned HOST_WIDE_INT padding_correction = 0;
2721 unsigned HOST_WIDE_INT bytes
2722 = int_size_in_bytes (TREE_TYPE (retval_rhs));
2723 int n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
2724 unsigned int bitsize
2725 = MIN (TYPE_ALIGN (TREE_TYPE (retval_rhs)), BITS_PER_WORD);
2726 rtx *result_pseudos = alloca (sizeof (rtx) * n_regs);
2727 rtx result_reg, src = NULL_RTX, dst = NULL_RTX;
2728 rtx result_val = expand_expr (retval_rhs, NULL_RTX, VOIDmode, 0);
2729 enum machine_mode tmpmode, result_reg_mode;
2731 if (bytes == 0)
2733 expand_null_return ();
2734 return;
2737 /* If the structure doesn't take up a whole number of words, see
2738 whether the register value should be padded on the left or on
2739 the right. Set PADDING_CORRECTION to the number of padding
2740 bits needed on the left side.
2742 In most ABIs, the structure will be returned at the least end of
2743 the register, which translates to right padding on little-endian
2744 targets and left padding on big-endian targets. The opposite
2745 holds if the structure is returned at the most significant
2746 end of the register. */
2747 if (bytes % UNITS_PER_WORD != 0
2748 && (targetm.calls.return_in_msb (TREE_TYPE (retval_rhs))
2749 ? !BYTES_BIG_ENDIAN
2750 : BYTES_BIG_ENDIAN))
2751 padding_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2752 * BITS_PER_UNIT));
2754 /* Copy the structure BITSIZE bits at a time. */
2755 for (bitpos = 0, xbitpos = padding_correction;
2756 bitpos < bytes * BITS_PER_UNIT;
2757 bitpos += bitsize, xbitpos += bitsize)
2759 /* We need a new destination pseudo each time xbitpos is
2760 on a word boundary and when xbitpos == padding_correction
2761 (the first time through). */
2762 if (xbitpos % BITS_PER_WORD == 0
2763 || xbitpos == padding_correction)
2765 /* Generate an appropriate register. */
2766 dst = gen_reg_rtx (word_mode);
2767 result_pseudos[xbitpos / BITS_PER_WORD] = dst;
2769 /* Clear the destination before we move anything into it. */
2770 emit_move_insn (dst, CONST0_RTX (GET_MODE (dst)));
2773 /* We need a new source operand each time bitpos is on a word
2774 boundary. */
2775 if (bitpos % BITS_PER_WORD == 0)
2776 src = operand_subword_force (result_val,
2777 bitpos / BITS_PER_WORD,
2778 BLKmode);
2780 /* Use bitpos for the source extraction (left justified) and
2781 xbitpos for the destination store (right justified). */
2782 store_bit_field (dst, bitsize, xbitpos % BITS_PER_WORD, word_mode,
2783 extract_bit_field (src, bitsize,
2784 bitpos % BITS_PER_WORD, 1,
2785 NULL_RTX, word_mode, word_mode,
2786 BITS_PER_WORD),
2787 BITS_PER_WORD);
2790 tmpmode = GET_MODE (result_rtl);
2791 if (tmpmode == BLKmode)
2793 /* Find the smallest integer mode large enough to hold the
2794 entire structure and use that mode instead of BLKmode
2795 on the USE insn for the return register. */
2796 for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2797 tmpmode != VOIDmode;
2798 tmpmode = GET_MODE_WIDER_MODE (tmpmode))
2799 /* Have we found a large enough mode? */
2800 if (GET_MODE_SIZE (tmpmode) >= bytes)
2801 break;
2803 /* No suitable mode found. */
2804 if (tmpmode == VOIDmode)
2805 abort ();
2807 PUT_MODE (result_rtl, tmpmode);
2810 if (GET_MODE_SIZE (tmpmode) < GET_MODE_SIZE (word_mode))
2811 result_reg_mode = word_mode;
2812 else
2813 result_reg_mode = tmpmode;
2814 result_reg = gen_reg_rtx (result_reg_mode);
2816 emit_queue ();
2817 for (i = 0; i < n_regs; i++)
2818 emit_move_insn (operand_subword (result_reg, i, 0, result_reg_mode),
2819 result_pseudos[i]);
2821 if (tmpmode != result_reg_mode)
2822 result_reg = gen_lowpart (tmpmode, result_reg);
2824 expand_value_return (result_reg);
2826 else if (retval_rhs != 0
2827 && !VOID_TYPE_P (TREE_TYPE (retval_rhs))
2828 && (GET_CODE (result_rtl) == REG
2829 || (GET_CODE (result_rtl) == PARALLEL)))
2831 /* Calculate the return value into a temporary (usually a pseudo
2832 reg). */
2833 tree ot = TREE_TYPE (DECL_RESULT (current_function_decl));
2834 tree nt = build_qualified_type (ot, TYPE_QUALS (ot) | TYPE_QUAL_CONST);
2836 val = assign_temp (nt, 0, 0, 1);
2837 val = expand_expr (retval_rhs, val, GET_MODE (val), 0);
2838 val = force_not_mem (val);
2839 emit_queue ();
2840 /* Return the calculated value, doing cleanups first. */
2841 expand_value_return (shift_return_value (val));
2843 else
2845 /* No cleanups or no hard reg used;
2846 calculate value into hard return reg. */
2847 expand_expr (retval, const0_rtx, VOIDmode, 0);
2848 emit_queue ();
2849 expand_value_return (result_rtl);
2853 /* Attempt to optimize a potential tail recursion call into a goto.
2854 ARGUMENTS are the arguments to a CALL_EXPR; LAST_INSN indicates
2855 where to place the jump to the tail recursion label.
2857 Return TRUE if the call was optimized into a goto. */
2860 optimize_tail_recursion (tree arguments, rtx last_insn)
2862 /* Finish checking validity, and if valid emit code to set the
2863 argument variables for the new call. */
2864 if (tail_recursion_args (arguments, DECL_ARGUMENTS (current_function_decl)))
2866 if (tail_recursion_label == 0)
2868 tail_recursion_label = gen_label_rtx ();
2869 emit_label_after (tail_recursion_label,
2870 tail_recursion_reentry);
2872 emit_queue ();
2873 expand_goto_internal (NULL_TREE, tail_recursion_label, last_insn);
2874 emit_barrier ();
2875 return 1;
2877 return 0;
2880 /* Emit code to alter this function's formal parms for a tail-recursive call.
2881 ACTUALS is a list of actual parameter expressions (chain of TREE_LISTs).
2882 FORMALS is the chain of decls of formals.
2883 Return 1 if this can be done;
2884 otherwise return 0 and do not emit any code. */
2886 static int
2887 tail_recursion_args (tree actuals, tree formals)
2889 tree a = actuals, f = formals;
2890 int i;
2891 rtx *argvec;
2893 /* Check that number and types of actuals are compatible
2894 with the formals. This is not always true in valid C code.
2895 Also check that no formal needs to be addressable
2896 and that all formals are scalars. */
2898 /* Also count the args. */
2900 for (a = actuals, f = formals, i = 0; a && f; a = TREE_CHAIN (a), f = TREE_CHAIN (f), i++)
2902 if (!lang_hooks.types_compatible_p (TREE_TYPE (TREE_VALUE (a)),
2903 TREE_TYPE (f)))
2904 return 0;
2905 if (GET_CODE (DECL_RTL (f)) != REG || DECL_MODE (f) == BLKmode)
2906 return 0;
2908 if (a != 0 || f != 0)
2909 return 0;
2911 /* Compute all the actuals. */
2913 argvec = alloca (i * sizeof (rtx));
2915 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
2916 argvec[i] = expand_expr (TREE_VALUE (a), NULL_RTX, VOIDmode, 0);
2918 /* Find which actual values refer to current values of previous formals.
2919 Copy each of them now, before any formal is changed. */
2921 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
2923 int copy = 0;
2924 int j;
2925 for (f = formals, j = 0; j < i; f = TREE_CHAIN (f), j++)
2926 if (reg_mentioned_p (DECL_RTL (f), argvec[i]))
2928 copy = 1;
2929 break;
2931 if (copy)
2932 argvec[i] = copy_to_reg (argvec[i]);
2935 /* Store the values of the actuals into the formals. */
2937 for (f = formals, a = actuals, i = 0; f;
2938 f = TREE_CHAIN (f), a = TREE_CHAIN (a), i++)
2940 if (GET_MODE (DECL_RTL (f)) == GET_MODE (argvec[i]))
2941 emit_move_insn (DECL_RTL (f), argvec[i]);
2942 else
2944 rtx tmp = argvec[i];
2945 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (TREE_VALUE (a)));
2946 promote_mode(TREE_TYPE (TREE_VALUE (a)), GET_MODE (tmp),
2947 &unsignedp, 0);
2948 if (DECL_MODE (f) != GET_MODE (DECL_RTL (f)))
2950 tmp = gen_reg_rtx (DECL_MODE (f));
2951 convert_move (tmp, argvec[i], unsignedp);
2953 convert_move (DECL_RTL (f), tmp, unsignedp);
2957 free_temp_slots ();
2958 return 1;
2961 /* Generate the RTL code for entering a binding contour.
2962 The variables are declared one by one, by calls to `expand_decl'.
2964 FLAGS is a bitwise or of the following flags:
2966 1 - Nonzero if this construct should be visible to
2967 `exit_something'.
2969 2 - Nonzero if this contour does not require a
2970 NOTE_INSN_BLOCK_BEG note. Virtually all calls from
2971 language-independent code should set this flag because they
2972 will not create corresponding BLOCK nodes. (There should be
2973 a one-to-one correspondence between NOTE_INSN_BLOCK_BEG notes
2974 and BLOCKs.) If this flag is set, MARK_ENDS should be zero
2975 when expand_end_bindings is called.
2977 If we are creating a NOTE_INSN_BLOCK_BEG note, a BLOCK may
2978 optionally be supplied. If so, it becomes the NOTE_BLOCK for the
2979 note. */
2981 void
2982 expand_start_bindings_and_block (int flags, tree block)
2984 struct nesting *thisblock = ALLOC_NESTING ();
2985 rtx note;
2986 int exit_flag = ((flags & 1) != 0);
2987 int block_flag = ((flags & 2) == 0);
2989 /* If a BLOCK is supplied, then the caller should be requesting a
2990 NOTE_INSN_BLOCK_BEG note. */
2991 if (!block_flag && block)
2992 abort ();
2994 /* Create a note to mark the beginning of the block. */
2995 if (block_flag && !cfun->dont_emit_block_notes)
2997 note = emit_note (NOTE_INSN_BLOCK_BEG);
2998 NOTE_BLOCK (note) = block;
3000 else
3001 note = emit_note (NOTE_INSN_DELETED);
3003 /* Make an entry on block_stack for the block we are entering. */
3005 thisblock->desc = BLOCK_NESTING;
3006 thisblock->next = block_stack;
3007 thisblock->all = nesting_stack;
3008 thisblock->depth = ++nesting_depth;
3009 thisblock->data.block.stack_level = 0;
3010 thisblock->data.block.cleanups = 0;
3011 thisblock->data.block.exception_region = 0;
3012 thisblock->data.block.block_target_temp_slot_level = target_temp_slot_level;
3014 thisblock->data.block.conditional_code = 0;
3015 thisblock->data.block.last_unconditional_cleanup = note;
3016 /* When we insert instructions after the last unconditional cleanup,
3017 we don't adjust last_insn. That means that a later add_insn will
3018 clobber the instructions we've just added. The easiest way to
3019 fix this is to just insert another instruction here, so that the
3020 instructions inserted after the last unconditional cleanup are
3021 never the last instruction. */
3022 emit_note (NOTE_INSN_DELETED);
3024 if (block_stack
3025 && !(block_stack->data.block.cleanups == NULL_TREE
3026 && block_stack->data.block.outer_cleanups == NULL_TREE))
3027 thisblock->data.block.outer_cleanups
3028 = tree_cons (NULL_TREE, block_stack->data.block.cleanups,
3029 block_stack->data.block.outer_cleanups);
3030 else
3031 thisblock->data.block.outer_cleanups = 0;
3032 thisblock->data.block.label_chain = 0;
3033 thisblock->data.block.innermost_stack_block = stack_block_stack;
3034 thisblock->data.block.first_insn = note;
3035 thisblock->data.block.block_start_count = ++current_block_start_count;
3036 thisblock->exit_label = exit_flag ? gen_label_rtx () : 0;
3037 block_stack = thisblock;
3038 nesting_stack = thisblock;
3040 /* Make a new level for allocating stack slots. */
3041 push_temp_slots ();
3044 /* Specify the scope of temporaries created by TARGET_EXPRs. Similar
3045 to CLEANUP_POINT_EXPR, but handles cases when a series of calls to
3046 expand_expr are made. After we end the region, we know that all
3047 space for all temporaries that were created by TARGET_EXPRs will be
3048 destroyed and their space freed for reuse. */
3050 void
3051 expand_start_target_temps (void)
3053 /* This is so that even if the result is preserved, the space
3054 allocated will be freed, as we know that it is no longer in use. */
3055 push_temp_slots ();
3057 /* Start a new binding layer that will keep track of all cleanup
3058 actions to be performed. */
3059 expand_start_bindings (2);
3061 target_temp_slot_level = temp_slot_level;
3064 void
3065 expand_end_target_temps (void)
3067 expand_end_bindings (NULL_TREE, 0, 0);
3069 /* This is so that even if the result is preserved, the space
3070 allocated will be freed, as we know that it is no longer in use. */
3071 pop_temp_slots ();
3074 /* Given a pointer to a BLOCK node return nonzero if (and only if) the node
3075 in question represents the outermost pair of curly braces (i.e. the "body
3076 block") of a function or method.
3078 For any BLOCK node representing a "body block" of a function or method, the
3079 BLOCK_SUPERCONTEXT of the node will point to another BLOCK node which
3080 represents the outermost (function) scope for the function or method (i.e.
3081 the one which includes the formal parameters). The BLOCK_SUPERCONTEXT of
3082 *that* node in turn will point to the relevant FUNCTION_DECL node. */
3085 is_body_block (tree stmt)
3087 if (lang_hooks.no_body_blocks)
3088 return 0;
3090 if (TREE_CODE (stmt) == BLOCK)
3092 tree parent = BLOCK_SUPERCONTEXT (stmt);
3094 if (parent && TREE_CODE (parent) == BLOCK)
3096 tree grandparent = BLOCK_SUPERCONTEXT (parent);
3098 if (grandparent && TREE_CODE (grandparent) == FUNCTION_DECL)
3099 return 1;
3103 return 0;
3106 /* True if we are currently emitting insns in an area of output code
3107 that is controlled by a conditional expression. This is used by
3108 the cleanup handling code to generate conditional cleanup actions. */
3111 conditional_context (void)
3113 return block_stack && block_stack->data.block.conditional_code;
3116 /* Return an opaque pointer to the current nesting level, so frontend code
3117 can check its own sanity. */
3119 struct nesting *
3120 current_nesting_level (void)
3122 return cfun ? block_stack : 0;
3125 /* Emit code to restore vital registers at the beginning of a nonlocal goto
3126 handler. */
3127 static void
3128 expand_nl_goto_receiver (void)
3130 /* Clobber the FP when we get here, so we have to make sure it's
3131 marked as used by this function. */
3132 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
3134 /* Mark the static chain as clobbered here so life information
3135 doesn't get messed up for it. */
3136 emit_insn (gen_rtx_CLOBBER (VOIDmode, static_chain_rtx));
3138 #ifdef HAVE_nonlocal_goto
3139 if (! HAVE_nonlocal_goto)
3140 #endif
3141 /* First adjust our frame pointer to its actual value. It was
3142 previously set to the start of the virtual area corresponding to
3143 the stacked variables when we branched here and now needs to be
3144 adjusted to the actual hardware fp value.
3146 Assignments are to virtual registers are converted by
3147 instantiate_virtual_regs into the corresponding assignment
3148 to the underlying register (fp in this case) that makes
3149 the original assignment true.
3150 So the following insn will actually be
3151 decrementing fp by STARTING_FRAME_OFFSET. */
3152 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
3154 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3155 if (fixed_regs[ARG_POINTER_REGNUM])
3157 #ifdef ELIMINABLE_REGS
3158 /* If the argument pointer can be eliminated in favor of the
3159 frame pointer, we don't need to restore it. We assume here
3160 that if such an elimination is present, it can always be used.
3161 This is the case on all known machines; if we don't make this
3162 assumption, we do unnecessary saving on many machines. */
3163 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
3164 size_t i;
3166 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
3167 if (elim_regs[i].from == ARG_POINTER_REGNUM
3168 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
3169 break;
3171 if (i == ARRAY_SIZE (elim_regs))
3172 #endif
3174 /* Now restore our arg pointer from the address at which it
3175 was saved in our stack frame. */
3176 emit_move_insn (virtual_incoming_args_rtx,
3177 copy_to_reg (get_arg_pointer_save_area (cfun)));
3180 #endif
3182 #ifdef HAVE_nonlocal_goto_receiver
3183 if (HAVE_nonlocal_goto_receiver)
3184 emit_insn (gen_nonlocal_goto_receiver ());
3185 #endif
3187 /* @@@ This is a kludge. Not all machine descriptions define a blockage
3188 insn, but we must not allow the code we just generated to be reordered
3189 by scheduling. Specifically, the update of the frame pointer must
3190 happen immediately, not later. So emit an ASM_INPUT to act as blockage
3191 insn. */
3192 emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
3195 /* Warn about any unused VARS (which may contain nodes other than
3196 VAR_DECLs, but such nodes are ignored). The nodes are connected
3197 via the TREE_CHAIN field. */
3199 void
3200 warn_about_unused_variables (tree vars)
3202 tree decl;
3204 if (warn_unused_variable)
3205 for (decl = vars; decl; decl = TREE_CHAIN (decl))
3206 if (TREE_CODE (decl) == VAR_DECL
3207 && ! TREE_USED (decl)
3208 && ! DECL_IN_SYSTEM_HEADER (decl)
3209 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
3210 warning ("%Junused variable '%D'", decl, decl);
3213 /* Generate RTL code to terminate a binding contour.
3215 VARS is the chain of VAR_DECL nodes for the variables bound in this
3216 contour. There may actually be other nodes in this chain, but any
3217 nodes other than VAR_DECLS are ignored.
3219 MARK_ENDS is nonzero if we should put a note at the beginning
3220 and end of this binding contour.
3222 DONT_JUMP_IN is positive if it is not valid to jump into this contour,
3223 zero if we can jump into this contour only if it does not have a saved
3224 stack level, and negative if we are not to check for invalid use of
3225 labels (because the front end does that). */
3227 void
3228 expand_end_bindings (tree vars, int mark_ends, int dont_jump_in)
3230 struct nesting *thisblock = block_stack;
3232 /* If any of the variables in this scope were not used, warn the
3233 user. */
3234 warn_about_unused_variables (vars);
3236 if (thisblock->exit_label)
3238 do_pending_stack_adjust ();
3239 emit_label (thisblock->exit_label);
3242 /* Don't allow jumping into a block that has a stack level.
3243 Cleanups are allowed, though. */
3244 if (dont_jump_in > 0
3245 || (dont_jump_in == 0 && thisblock->data.block.stack_level != 0))
3247 struct label_chain *chain;
3249 /* Any labels in this block are no longer valid to go to.
3250 Mark them to cause an error message. */
3251 for (chain = thisblock->data.block.label_chain; chain; chain = chain->next)
3253 DECL_TOO_LATE (chain->label) = 1;
3254 /* If any goto without a fixup came to this label,
3255 that must be an error, because gotos without fixups
3256 come from outside all saved stack-levels. */
3257 if (TREE_ADDRESSABLE (chain->label))
3258 error ("%Jlabel '%D' used before containing binding contour",
3259 chain->label, chain->label);
3263 /* Restore stack level in effect before the block
3264 (only if variable-size objects allocated). */
3265 /* Perform any cleanups associated with the block. */
3267 if (thisblock->data.block.stack_level != 0
3268 || thisblock->data.block.cleanups != 0)
3270 int reachable;
3271 rtx insn;
3273 /* Don't let cleanups affect ({...}) constructs. */
3274 int old_expr_stmts_for_value = expr_stmts_for_value;
3275 rtx old_last_expr_value = last_expr_value;
3276 rtx old_last_expr_alt_rtl = last_expr_alt_rtl;
3277 tree old_last_expr_type = last_expr_type;
3278 expr_stmts_for_value = 0;
3280 /* Only clean up here if this point can actually be reached. */
3281 insn = get_last_insn ();
3282 if (GET_CODE (insn) == NOTE)
3283 insn = prev_nonnote_insn (insn);
3284 reachable = (! insn || GET_CODE (insn) != BARRIER);
3286 /* Do the cleanups. */
3287 expand_cleanups (thisblock->data.block.cleanups, 0, reachable);
3288 if (reachable)
3289 do_pending_stack_adjust ();
3291 expr_stmts_for_value = old_expr_stmts_for_value;
3292 last_expr_value = old_last_expr_value;
3293 last_expr_alt_rtl = old_last_expr_alt_rtl;
3294 last_expr_type = old_last_expr_type;
3296 /* Restore the stack level. */
3298 if (reachable && thisblock->data.block.stack_level != 0)
3300 emit_stack_restore (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3301 thisblock->data.block.stack_level, NULL_RTX);
3302 if (cfun->nonlocal_goto_save_area)
3303 update_nonlocal_goto_save_area ();
3306 /* Any gotos out of this block must also do these things.
3307 Also report any gotos with fixups that came to labels in this
3308 level. */
3309 fixup_gotos (thisblock,
3310 thisblock->data.block.stack_level,
3311 thisblock->data.block.cleanups,
3312 thisblock->data.block.first_insn,
3313 dont_jump_in);
3316 /* Mark the beginning and end of the scope if requested.
3317 We do this now, after running cleanups on the variables
3318 just going out of scope, so they are in scope for their cleanups. */
3320 if (mark_ends && !cfun->dont_emit_block_notes)
3322 rtx note = emit_note (NOTE_INSN_BLOCK_END);
3323 NOTE_BLOCK (note) = NOTE_BLOCK (thisblock->data.block.first_insn);
3325 else
3326 /* Get rid of the beginning-mark if we don't make an end-mark. */
3327 NOTE_LINE_NUMBER (thisblock->data.block.first_insn) = NOTE_INSN_DELETED;
3329 /* Restore the temporary level of TARGET_EXPRs. */
3330 target_temp_slot_level = thisblock->data.block.block_target_temp_slot_level;
3332 /* Restore block_stack level for containing block. */
3334 stack_block_stack = thisblock->data.block.innermost_stack_block;
3335 POPSTACK (block_stack);
3337 /* Pop the stack slot nesting and free any slots at this level. */
3338 pop_temp_slots ();
3341 /* Generate code to save the stack pointer at the start of the current block
3342 and set up to restore it on exit. */
3344 void
3345 save_stack_pointer (void)
3347 struct nesting *thisblock = block_stack;
3349 if (thisblock->data.block.stack_level == 0)
3351 emit_stack_save (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3352 &thisblock->data.block.stack_level,
3353 thisblock->data.block.first_insn);
3354 stack_block_stack = thisblock;
3358 /* Generate RTL for the automatic variable declaration DECL.
3359 (Other kinds of declarations are simply ignored if seen here.) */
3361 void
3362 expand_decl (tree decl)
3364 tree type;
3366 type = TREE_TYPE (decl);
3368 /* For a CONST_DECL, set mode, alignment, and sizes from those of the
3369 type in case this node is used in a reference. */
3370 if (TREE_CODE (decl) == CONST_DECL)
3372 DECL_MODE (decl) = TYPE_MODE (type);
3373 DECL_ALIGN (decl) = TYPE_ALIGN (type);
3374 DECL_SIZE (decl) = TYPE_SIZE (type);
3375 DECL_SIZE_UNIT (decl) = TYPE_SIZE_UNIT (type);
3376 return;
3379 /* Otherwise, only automatic variables need any expansion done. Static and
3380 external variables, and external functions, will be handled by
3381 `assemble_variable' (called from finish_decl). TYPE_DECL requires
3382 nothing. PARM_DECLs are handled in `assign_parms'. */
3383 if (TREE_CODE (decl) != VAR_DECL)
3384 return;
3386 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
3387 return;
3389 /* Create the RTL representation for the variable. */
3391 if (type == error_mark_node)
3392 SET_DECL_RTL (decl, gen_rtx_MEM (BLKmode, const0_rtx));
3394 else if (DECL_SIZE (decl) == 0)
3395 /* Variable with incomplete type. */
3397 rtx x;
3398 if (DECL_INITIAL (decl) == 0)
3399 /* Error message was already done; now avoid a crash. */
3400 x = gen_rtx_MEM (BLKmode, const0_rtx);
3401 else
3402 /* An initializer is going to decide the size of this array.
3403 Until we know the size, represent its address with a reg. */
3404 x = gen_rtx_MEM (BLKmode, gen_reg_rtx (Pmode));
3406 set_mem_attributes (x, decl, 1);
3407 SET_DECL_RTL (decl, x);
3409 else if (DECL_MODE (decl) != BLKmode
3410 /* If -ffloat-store, don't put explicit float vars
3411 into regs. */
3412 && !(flag_float_store
3413 && TREE_CODE (type) == REAL_TYPE)
3414 && ! TREE_THIS_VOLATILE (decl)
3415 && ! DECL_NONLOCAL (decl)
3416 && (DECL_REGISTER (decl) || DECL_ARTIFICIAL (decl) || optimize))
3418 /* Automatic variable that can go in a register. */
3419 int unsignedp = TYPE_UNSIGNED (type);
3420 enum machine_mode reg_mode
3421 = promote_mode (type, DECL_MODE (decl), &unsignedp, 0);
3423 SET_DECL_RTL (decl, gen_reg_rtx (reg_mode));
3425 if (!DECL_ARTIFICIAL (decl))
3426 mark_user_reg (DECL_RTL (decl));
3428 if (POINTER_TYPE_P (type))
3429 mark_reg_pointer (DECL_RTL (decl),
3430 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (decl))));
3432 maybe_set_unchanging (DECL_RTL (decl), decl);
3434 /* If something wants our address, try to use ADDRESSOF. */
3435 if (TREE_ADDRESSABLE (decl))
3436 put_var_into_stack (decl, /*rescan=*/false);
3439 else if (TREE_CODE (DECL_SIZE_UNIT (decl)) == INTEGER_CST
3440 && ! (flag_stack_check && ! STACK_CHECK_BUILTIN
3441 && 0 < compare_tree_int (DECL_SIZE_UNIT (decl),
3442 STACK_CHECK_MAX_VAR_SIZE)))
3444 /* Variable of fixed size that goes on the stack. */
3445 rtx oldaddr = 0;
3446 rtx addr;
3447 rtx x;
3449 /* If we previously made RTL for this decl, it must be an array
3450 whose size was determined by the initializer.
3451 The old address was a register; set that register now
3452 to the proper address. */
3453 if (DECL_RTL_SET_P (decl))
3455 if (GET_CODE (DECL_RTL (decl)) != MEM
3456 || GET_CODE (XEXP (DECL_RTL (decl), 0)) != REG)
3457 abort ();
3458 oldaddr = XEXP (DECL_RTL (decl), 0);
3461 /* Set alignment we actually gave this decl. */
3462 DECL_ALIGN (decl) = (DECL_MODE (decl) == BLKmode ? BIGGEST_ALIGNMENT
3463 : GET_MODE_BITSIZE (DECL_MODE (decl)));
3464 DECL_USER_ALIGN (decl) = 0;
3466 x = assign_temp (decl, 1, 1, 1);
3467 set_mem_attributes (x, decl, 1);
3468 SET_DECL_RTL (decl, x);
3470 if (oldaddr)
3472 addr = force_operand (XEXP (DECL_RTL (decl), 0), oldaddr);
3473 if (addr != oldaddr)
3474 emit_move_insn (oldaddr, addr);
3477 else
3478 /* Dynamic-size object: must push space on the stack. */
3480 rtx address, size, x;
3482 /* Record the stack pointer on entry to block, if have
3483 not already done so. */
3484 do_pending_stack_adjust ();
3485 save_stack_pointer ();
3487 /* Compute the variable's size, in bytes. This will expand any
3488 needed SAVE_EXPRs for the first time. */
3489 size = expand_expr (DECL_SIZE_UNIT (decl), NULL_RTX, VOIDmode, 0);
3490 free_temp_slots ();
3492 /* Allocate space on the stack for the variable. Note that
3493 DECL_ALIGN says how the variable is to be aligned and we
3494 cannot use it to conclude anything about the alignment of
3495 the size. */
3496 address = allocate_dynamic_stack_space (size, NULL_RTX,
3497 TYPE_ALIGN (TREE_TYPE (decl)));
3499 /* Reference the variable indirect through that rtx. */
3500 x = gen_rtx_MEM (DECL_MODE (decl), address);
3501 set_mem_attributes (x, decl, 1);
3502 SET_DECL_RTL (decl, x);
3505 /* Indicate the alignment we actually gave this variable. */
3506 #ifdef STACK_BOUNDARY
3507 DECL_ALIGN (decl) = STACK_BOUNDARY;
3508 #else
3509 DECL_ALIGN (decl) = BIGGEST_ALIGNMENT;
3510 #endif
3511 DECL_USER_ALIGN (decl) = 0;
3515 /* Emit code to allocate T_SIZE bytes of dynamic stack space for ALLOC. */
3516 void
3517 expand_stack_alloc (tree alloc, tree t_size)
3519 rtx address, dest, size;
3520 tree var, type;
3522 if (TREE_CODE (alloc) != ADDR_EXPR)
3523 abort ();
3524 var = TREE_OPERAND (alloc, 0);
3525 if (TREE_CODE (var) != VAR_DECL)
3526 abort ();
3528 type = TREE_TYPE (var);
3530 /* In function-at-a-time mode, variable_size doesn't expand this,
3531 so do it now. */
3532 if (TREE_CODE (type) == ARRAY_TYPE && TYPE_DOMAIN (type))
3533 expand_expr (TYPE_MAX_VALUE (TYPE_DOMAIN (type)),
3534 const0_rtx, VOIDmode, 0);
3536 /* Compute the variable's size, in bytes. */
3537 size = expand_expr (t_size, NULL_RTX, VOIDmode, 0);
3538 free_temp_slots ();
3540 /* Allocate space on the stack for the variable. */
3541 address = XEXP (DECL_RTL (var), 0);
3542 dest = allocate_dynamic_stack_space (size, address, TYPE_ALIGN (type));
3543 if (dest != address)
3544 emit_move_insn (address, dest);
3546 /* Indicate the alignment we actually gave this variable. */
3547 #ifdef STACK_BOUNDARY
3548 DECL_ALIGN (var) = STACK_BOUNDARY;
3549 #else
3550 DECL_ALIGN (var) = BIGGEST_ALIGNMENT;
3551 #endif
3552 DECL_USER_ALIGN (var) = 0;
3555 /* Emit code to save the current value of stack. */
3557 expand_stack_save (void)
3559 rtx ret = NULL_RTX;
3561 do_pending_stack_adjust ();
3562 emit_stack_save (SAVE_BLOCK, &ret, NULL_RTX);
3563 return ret;
3566 /* Emit code to restore the current value of stack. */
3567 void
3568 expand_stack_restore (tree var)
3570 rtx sa = DECL_RTL (var);
3572 emit_stack_restore (SAVE_BLOCK, sa, NULL_RTX);
3575 /* Emit code to perform the initialization of a declaration DECL. */
3577 void
3578 expand_decl_init (tree decl)
3580 int was_used = TREE_USED (decl);
3582 /* If this is a CONST_DECL, we don't have to generate any code. Likewise
3583 for static decls. */
3584 if (TREE_CODE (decl) == CONST_DECL
3585 || TREE_STATIC (decl))
3586 return;
3588 /* Compute and store the initial value now. */
3590 push_temp_slots ();
3592 if (DECL_INITIAL (decl) == error_mark_node)
3594 enum tree_code code = TREE_CODE (TREE_TYPE (decl));
3596 if (code == INTEGER_TYPE || code == REAL_TYPE || code == ENUMERAL_TYPE
3597 || code == POINTER_TYPE || code == REFERENCE_TYPE)
3598 expand_assignment (decl, convert (TREE_TYPE (decl), integer_zero_node),
3600 emit_queue ();
3602 else if (DECL_INITIAL (decl) && TREE_CODE (DECL_INITIAL (decl)) != TREE_LIST)
3604 emit_line_note (DECL_SOURCE_LOCATION (decl));
3605 expand_assignment (decl, DECL_INITIAL (decl), 0);
3606 emit_queue ();
3609 /* Don't let the initialization count as "using" the variable. */
3610 TREE_USED (decl) = was_used;
3612 /* Free any temporaries we made while initializing the decl. */
3613 preserve_temp_slots (NULL_RTX);
3614 free_temp_slots ();
3615 pop_temp_slots ();
3618 /* CLEANUP is an expression to be executed at exit from this binding contour;
3619 for example, in C++, it might call the destructor for this variable.
3621 We wrap CLEANUP in an UNSAVE_EXPR node, so that we can expand the
3622 CLEANUP multiple times, and have the correct semantics. This
3623 happens in exception handling, for gotos, returns, breaks that
3624 leave the current scope.
3626 If CLEANUP is nonzero and DECL is zero, we record a cleanup
3627 that is not associated with any particular variable. */
3630 expand_decl_cleanup (tree decl, tree cleanup)
3632 struct nesting *thisblock;
3634 /* Error if we are not in any block. */
3635 if (cfun == 0 || block_stack == 0)
3636 return 0;
3638 thisblock = block_stack;
3640 /* Record the cleanup if there is one. */
3642 if (cleanup != 0)
3644 tree t;
3645 rtx seq;
3646 tree *cleanups = &thisblock->data.block.cleanups;
3647 int cond_context = conditional_context ();
3649 if (cond_context)
3651 rtx flag = gen_reg_rtx (word_mode);
3652 rtx set_flag_0;
3653 tree cond;
3655 start_sequence ();
3656 emit_move_insn (flag, const0_rtx);
3657 set_flag_0 = get_insns ();
3658 end_sequence ();
3660 thisblock->data.block.last_unconditional_cleanup
3661 = emit_insn_after (set_flag_0,
3662 thisblock->data.block.last_unconditional_cleanup);
3664 emit_move_insn (flag, const1_rtx);
3666 cond = build_decl (VAR_DECL, NULL_TREE,
3667 lang_hooks.types.type_for_mode (word_mode, 1));
3668 SET_DECL_RTL (cond, flag);
3670 /* Conditionalize the cleanup. */
3671 cleanup = build (COND_EXPR, void_type_node,
3672 lang_hooks.truthvalue_conversion (cond),
3673 cleanup, integer_zero_node);
3674 cleanup = fold (cleanup);
3676 cleanups = &thisblock->data.block.cleanups;
3679 cleanup = unsave_expr (cleanup);
3681 t = *cleanups = tree_cons (decl, cleanup, *cleanups);
3683 if (! cond_context)
3684 /* If this block has a cleanup, it belongs in stack_block_stack. */
3685 stack_block_stack = thisblock;
3687 if (cond_context)
3689 start_sequence ();
3692 if (! using_eh_for_cleanups_p)
3693 TREE_ADDRESSABLE (t) = 1;
3694 else
3695 expand_eh_region_start ();
3697 if (cond_context)
3699 seq = get_insns ();
3700 end_sequence ();
3701 if (seq)
3702 thisblock->data.block.last_unconditional_cleanup
3703 = emit_insn_after (seq,
3704 thisblock->data.block.last_unconditional_cleanup);
3706 else
3708 thisblock->data.block.last_unconditional_cleanup
3709 = get_last_insn ();
3710 /* When we insert instructions after the last unconditional cleanup,
3711 we don't adjust last_insn. That means that a later add_insn will
3712 clobber the instructions we've just added. The easiest way to
3713 fix this is to just insert another instruction here, so that the
3714 instructions inserted after the last unconditional cleanup are
3715 never the last instruction. */
3716 emit_note (NOTE_INSN_DELETED);
3719 return 1;
3722 /* Like expand_decl_cleanup, but maybe only run the cleanup if an exception
3723 is thrown. */
3726 expand_decl_cleanup_eh (tree decl, tree cleanup, int eh_only)
3728 int ret = expand_decl_cleanup (decl, cleanup);
3729 if (cleanup && ret)
3731 tree node = block_stack->data.block.cleanups;
3732 CLEANUP_EH_ONLY (node) = eh_only;
3734 return ret;
3737 /* DECL is an anonymous union. CLEANUP is a cleanup for DECL.
3738 DECL_ELTS is the list of elements that belong to DECL's type.
3739 In each, the TREE_VALUE is a VAR_DECL, and the TREE_PURPOSE a cleanup. */
3741 void
3742 expand_anon_union_decl (tree decl, tree cleanup, tree decl_elts)
3744 struct nesting *thisblock = cfun == 0 ? 0 : block_stack;
3745 rtx x;
3746 tree t;
3748 /* If any of the elements are addressable, so is the entire union. */
3749 for (t = decl_elts; t; t = TREE_CHAIN (t))
3750 if (TREE_ADDRESSABLE (TREE_VALUE (t)))
3752 TREE_ADDRESSABLE (decl) = 1;
3753 break;
3756 expand_decl (decl);
3757 expand_decl_cleanup (decl, cleanup);
3758 x = DECL_RTL (decl);
3760 /* Go through the elements, assigning RTL to each. */
3761 for (t = decl_elts; t; t = TREE_CHAIN (t))
3763 tree decl_elt = TREE_VALUE (t);
3764 tree cleanup_elt = TREE_PURPOSE (t);
3765 enum machine_mode mode = TYPE_MODE (TREE_TYPE (decl_elt));
3767 /* If any of the elements are addressable, so is the entire
3768 union. */
3769 if (TREE_USED (decl_elt))
3770 TREE_USED (decl) = 1;
3772 /* Propagate the union's alignment to the elements. */
3773 DECL_ALIGN (decl_elt) = DECL_ALIGN (decl);
3774 DECL_USER_ALIGN (decl_elt) = DECL_USER_ALIGN (decl);
3776 /* If the element has BLKmode and the union doesn't, the union is
3777 aligned such that the element doesn't need to have BLKmode, so
3778 change the element's mode to the appropriate one for its size. */
3779 if (mode == BLKmode && DECL_MODE (decl) != BLKmode)
3780 DECL_MODE (decl_elt) = mode
3781 = mode_for_size_tree (DECL_SIZE (decl_elt), MODE_INT, 1);
3783 /* (SUBREG (MEM ...)) at RTL generation time is invalid, so we
3784 instead create a new MEM rtx with the proper mode. */
3785 if (GET_CODE (x) == MEM)
3787 if (mode == GET_MODE (x))
3788 SET_DECL_RTL (decl_elt, x);
3789 else
3790 SET_DECL_RTL (decl_elt, adjust_address_nv (x, mode, 0));
3792 else if (GET_CODE (x) == REG)
3794 if (mode == GET_MODE (x))
3795 SET_DECL_RTL (decl_elt, x);
3796 else
3797 SET_DECL_RTL (decl_elt, gen_lowpart_SUBREG (mode, x));
3799 else
3800 abort ();
3802 /* Record the cleanup if there is one. */
3804 if (cleanup != 0)
3805 thisblock->data.block.cleanups
3806 = tree_cons (decl_elt, cleanup_elt,
3807 thisblock->data.block.cleanups);
3811 /* Expand a list of cleanups LIST.
3812 Elements may be expressions or may be nested lists.
3814 If IN_FIXUP is nonzero, we are generating this cleanup for a fixup
3815 goto and handle protection regions specially in that case.
3817 If REACHABLE, we emit code, otherwise just inform the exception handling
3818 code about this finalization. */
3820 static void
3821 expand_cleanups (tree list, int in_fixup, int reachable)
3823 tree tail;
3824 for (tail = list; tail; tail = TREE_CHAIN (tail))
3825 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3826 expand_cleanups (TREE_VALUE (tail), in_fixup, reachable);
3827 else
3829 if (! in_fixup && using_eh_for_cleanups_p)
3830 expand_eh_region_end_cleanup (TREE_VALUE (tail));
3832 if (reachable && !CLEANUP_EH_ONLY (tail))
3834 /* Cleanups may be run multiple times. For example,
3835 when exiting a binding contour, we expand the
3836 cleanups associated with that contour. When a goto
3837 within that binding contour has a target outside that
3838 contour, it will expand all cleanups from its scope to
3839 the target. Though the cleanups are expanded multiple
3840 times, the control paths are non-overlapping so the
3841 cleanups will not be executed twice. */
3843 /* We may need to protect from outer cleanups. */
3844 if (in_fixup && using_eh_for_cleanups_p)
3846 expand_eh_region_start ();
3848 expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
3850 expand_eh_region_end_fixup (TREE_VALUE (tail));
3852 else
3853 expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
3855 free_temp_slots ();
3860 /* Mark when the context we are emitting RTL for as a conditional
3861 context, so that any cleanup actions we register with
3862 expand_decl_init will be properly conditionalized when those
3863 cleanup actions are later performed. Must be called before any
3864 expression (tree) is expanded that is within a conditional context. */
3866 void
3867 start_cleanup_deferral (void)
3869 /* block_stack can be NULL if we are inside the parameter list. It is
3870 OK to do nothing, because cleanups aren't possible here. */
3871 if (block_stack)
3872 ++block_stack->data.block.conditional_code;
3875 /* Mark the end of a conditional region of code. Because cleanup
3876 deferrals may be nested, we may still be in a conditional region
3877 after we end the currently deferred cleanups, only after we end all
3878 deferred cleanups, are we back in unconditional code. */
3880 void
3881 end_cleanup_deferral (void)
3883 /* block_stack can be NULL if we are inside the parameter list. It is
3884 OK to do nothing, because cleanups aren't possible here. */
3885 if (block_stack)
3886 --block_stack->data.block.conditional_code;
3889 tree
3890 last_cleanup_this_contour (void)
3892 if (block_stack == 0)
3893 return 0;
3895 return block_stack->data.block.cleanups;
3899 /* Return nonzero if any containing block has a stack level or
3900 cleanups. */
3903 containing_blocks_have_cleanups_or_stack_level (void)
3905 struct nesting *block;
3907 for (block = block_stack; block; block = block->next)
3908 if (block->data.block.stack_level != 0
3909 || block->data.block.cleanups != 0)
3910 return 1;
3912 return 0;
3915 /* Return 1 if there are any pending cleanups at this point.
3916 Check the current contour as well as contours that enclose
3917 the current contour. */
3920 any_pending_cleanups (void)
3922 struct nesting *block;
3924 if (cfun == NULL || cfun->stmt == NULL || block_stack == 0)
3925 return 0;
3927 if (block_stack->data.block.cleanups != NULL)
3928 return 1;
3930 if (block_stack->data.block.outer_cleanups == 0)
3931 return 0;
3933 for (block = block_stack->next; block; block = block->next)
3934 if (block->data.block.cleanups != 0)
3935 return 1;
3937 return 0;
3940 /* Enter a case (Pascal) or switch (C) statement.
3941 Push a block onto case_stack and nesting_stack
3942 to accumulate the case-labels that are seen
3943 and to record the labels generated for the statement.
3945 EXIT_FLAG is nonzero if `exit_something' should exit this case stmt.
3946 Otherwise, this construct is transparent for `exit_something'.
3948 EXPR is the index-expression to be dispatched on.
3949 TYPE is its nominal type. We could simply convert EXPR to this type,
3950 but instead we take short cuts. */
3952 void
3953 expand_start_case (int exit_flag, tree expr, tree type,
3954 const char *printname)
3956 struct nesting *thiscase = ALLOC_NESTING ();
3958 /* Make an entry on case_stack for the case we are entering. */
3960 thiscase->desc = CASE_NESTING;
3961 thiscase->next = case_stack;
3962 thiscase->all = nesting_stack;
3963 thiscase->depth = ++nesting_depth;
3964 thiscase->exit_label = exit_flag ? gen_label_rtx () : 0;
3965 thiscase->data.case_stmt.case_list = 0;
3966 thiscase->data.case_stmt.index_expr = expr;
3967 thiscase->data.case_stmt.nominal_type = type;
3968 thiscase->data.case_stmt.default_label = 0;
3969 thiscase->data.case_stmt.printname = printname;
3970 thiscase->data.case_stmt.line_number_status = force_line_numbers ();
3971 case_stack = thiscase;
3972 nesting_stack = thiscase;
3974 do_pending_stack_adjust ();
3975 emit_queue ();
3977 /* Make sure case_stmt.start points to something that won't
3978 need any transformation before expand_end_case. */
3979 if (GET_CODE (get_last_insn ()) != NOTE)
3980 emit_note (NOTE_INSN_DELETED);
3982 thiscase->data.case_stmt.start = get_last_insn ();
3984 start_cleanup_deferral ();
3987 static void
3988 check_seenlabel (void)
3990 /* If this is the first label, warn if any insns have been emitted. */
3991 if (case_stack->data.case_stmt.line_number_status >= 0)
3993 rtx insn;
3995 restore_line_number_status
3996 (case_stack->data.case_stmt.line_number_status);
3997 case_stack->data.case_stmt.line_number_status = -1;
3999 for (insn = case_stack->data.case_stmt.start;
4000 insn;
4001 insn = NEXT_INSN (insn))
4003 if (GET_CODE (insn) == CODE_LABEL)
4004 break;
4005 if (GET_CODE (insn) != NOTE
4006 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn)) != USE))
4009 insn = PREV_INSN (insn);
4010 while (insn && (GET_CODE (insn) != NOTE || NOTE_LINE_NUMBER (insn) < 0));
4012 /* If insn is zero, then there must have been a syntax error. */
4013 if (insn)
4015 location_t locus;
4016 locus.file = NOTE_SOURCE_FILE (insn);
4017 locus.line = NOTE_LINE_NUMBER (insn);
4018 warning ("%Hunreachable code at beginning of %s", &locus,
4019 case_stack->data.case_stmt.printname);
4021 break;
4027 /* Accumulate one case or default label inside a case or switch statement.
4028 VALUE is the value of the case (a null pointer, for a default label).
4029 The function CONVERTER, when applied to arguments T and V,
4030 converts the value V to the type T.
4032 If not currently inside a case or switch statement, return 1 and do
4033 nothing. The caller will print a language-specific error message.
4034 If VALUE is a duplicate or overlaps, return 2 and do nothing
4035 except store the (first) duplicate node in *DUPLICATE.
4036 If VALUE is out of range, return 3 and do nothing.
4037 If we are jumping into the scope of a cleanup or var-sized array, return 5.
4038 Return 0 on success.
4040 Extended to handle range statements. */
4043 pushcase (tree value, tree (*converter) (tree, tree), tree label,
4044 tree *duplicate)
4046 tree index_type;
4047 tree nominal_type;
4049 /* Fail if not inside a real case statement. */
4050 if (! (case_stack && case_stack->data.case_stmt.start))
4051 return 1;
4053 if (stack_block_stack
4054 && stack_block_stack->depth > case_stack->depth)
4055 return 5;
4057 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4058 nominal_type = case_stack->data.case_stmt.nominal_type;
4060 /* If the index is erroneous, avoid more problems: pretend to succeed. */
4061 if (index_type == error_mark_node)
4062 return 0;
4064 /* Convert VALUE to the type in which the comparisons are nominally done. */
4065 if (value != 0)
4066 value = (*converter) (nominal_type, value);
4068 check_seenlabel ();
4070 /* Fail if this value is out of range for the actual type of the index
4071 (which may be narrower than NOMINAL_TYPE). */
4072 if (value != 0
4073 && (TREE_CONSTANT_OVERFLOW (value)
4074 || ! int_fits_type_p (value, index_type)))
4075 return 3;
4077 return add_case_node (value, value, label, duplicate, false);
4080 /* Like pushcase but this case applies to all values between VALUE1 and
4081 VALUE2 (inclusive). If VALUE1 is NULL, the range starts at the lowest
4082 value of the index type and ends at VALUE2. If VALUE2 is NULL, the range
4083 starts at VALUE1 and ends at the highest value of the index type.
4084 If both are NULL, this case applies to all values.
4086 The return value is the same as that of pushcase but there is one
4087 additional error code: 4 means the specified range was empty. */
4090 pushcase_range (tree value1, tree value2, tree (*converter) (tree, tree),
4091 tree label, tree *duplicate)
4093 tree index_type;
4094 tree nominal_type;
4096 /* Fail if not inside a real case statement. */
4097 if (! (case_stack && case_stack->data.case_stmt.start))
4098 return 1;
4100 if (stack_block_stack
4101 && stack_block_stack->depth > case_stack->depth)
4102 return 5;
4104 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4105 nominal_type = case_stack->data.case_stmt.nominal_type;
4107 /* If the index is erroneous, avoid more problems: pretend to succeed. */
4108 if (index_type == error_mark_node)
4109 return 0;
4111 check_seenlabel ();
4113 /* Convert VALUEs to type in which the comparisons are nominally done
4114 and replace any unspecified value with the corresponding bound. */
4115 if (value1 == 0)
4116 value1 = TYPE_MIN_VALUE (index_type);
4117 if (value2 == 0)
4118 value2 = TYPE_MAX_VALUE (index_type);
4120 /* Fail if the range is empty. Do this before any conversion since
4121 we want to allow out-of-range empty ranges. */
4122 if (value2 != 0 && tree_int_cst_lt (value2, value1))
4123 return 4;
4125 /* If the max was unbounded, use the max of the nominal_type we are
4126 converting to. Do this after the < check above to suppress false
4127 positives. */
4128 if (value2 == 0)
4129 value2 = TYPE_MAX_VALUE (nominal_type);
4131 value1 = (*converter) (nominal_type, value1);
4132 value2 = (*converter) (nominal_type, value2);
4134 /* Fail if these values are out of range. */
4135 if (TREE_CONSTANT_OVERFLOW (value1)
4136 || ! int_fits_type_p (value1, index_type))
4137 return 3;
4139 if (TREE_CONSTANT_OVERFLOW (value2)
4140 || ! int_fits_type_p (value2, index_type))
4141 return 3;
4143 return add_case_node (value1, value2, label, duplicate, false);
4146 /* Do the actual insertion of a case label for pushcase and pushcase_range
4147 into case_stack->data.case_stmt.case_list. Use an AVL tree to avoid
4148 slowdown for large switch statements. */
4151 add_case_node (tree low, tree high, tree label, tree *duplicate,
4152 bool dont_expand_label)
4154 struct case_node *p, **q, *r;
4156 /* If there's no HIGH value, then this is not a case range; it's
4157 just a simple case label. But that's just a degenerate case
4158 range. */
4159 if (!high)
4160 high = low;
4162 /* Handle default labels specially. */
4163 if (!high && !low)
4165 if (case_stack->data.case_stmt.default_label != 0)
4167 *duplicate = case_stack->data.case_stmt.default_label;
4168 return 2;
4170 case_stack->data.case_stmt.default_label = label;
4171 if (!dont_expand_label)
4172 expand_label (label);
4173 return 0;
4176 q = &case_stack->data.case_stmt.case_list;
4177 p = *q;
4179 while ((r = *q))
4181 p = r;
4183 /* Keep going past elements distinctly greater than HIGH. */
4184 if (tree_int_cst_lt (high, p->low))
4185 q = &p->left;
4187 /* or distinctly less than LOW. */
4188 else if (tree_int_cst_lt (p->high, low))
4189 q = &p->right;
4191 else
4193 /* We have an overlap; this is an error. */
4194 *duplicate = p->code_label;
4195 return 2;
4199 /* Add this label to the chain, and succeed. */
4201 r = ggc_alloc (sizeof (struct case_node));
4202 r->low = low;
4204 /* If the bounds are equal, turn this into the one-value case. */
4205 if (tree_int_cst_equal (low, high))
4206 r->high = r->low;
4207 else
4208 r->high = high;
4210 r->code_label = label;
4211 if (!dont_expand_label)
4212 expand_label (label);
4214 *q = r;
4215 r->parent = p;
4216 r->left = 0;
4217 r->right = 0;
4218 r->balance = 0;
4220 while (p)
4222 struct case_node *s;
4224 if (r == p->left)
4226 int b;
4228 if (! (b = p->balance))
4229 /* Growth propagation from left side. */
4230 p->balance = -1;
4231 else if (b < 0)
4233 if (r->balance < 0)
4235 /* R-Rotation */
4236 if ((p->left = s = r->right))
4237 s->parent = p;
4239 r->right = p;
4240 p->balance = 0;
4241 r->balance = 0;
4242 s = p->parent;
4243 p->parent = r;
4245 if ((r->parent = s))
4247 if (s->left == p)
4248 s->left = r;
4249 else
4250 s->right = r;
4252 else
4253 case_stack->data.case_stmt.case_list = r;
4255 else
4256 /* r->balance == +1 */
4258 /* LR-Rotation */
4260 int b2;
4261 struct case_node *t = r->right;
4263 if ((p->left = s = t->right))
4264 s->parent = p;
4266 t->right = p;
4267 if ((r->right = s = t->left))
4268 s->parent = r;
4270 t->left = r;
4271 b = t->balance;
4272 b2 = b < 0;
4273 p->balance = b2;
4274 b2 = -b2 - b;
4275 r->balance = b2;
4276 t->balance = 0;
4277 s = p->parent;
4278 p->parent = t;
4279 r->parent = t;
4281 if ((t->parent = s))
4283 if (s->left == p)
4284 s->left = t;
4285 else
4286 s->right = t;
4288 else
4289 case_stack->data.case_stmt.case_list = t;
4291 break;
4294 else
4296 /* p->balance == +1; growth of left side balances the node. */
4297 p->balance = 0;
4298 break;
4301 else
4302 /* r == p->right */
4304 int b;
4306 if (! (b = p->balance))
4307 /* Growth propagation from right side. */
4308 p->balance++;
4309 else if (b > 0)
4311 if (r->balance > 0)
4313 /* L-Rotation */
4315 if ((p->right = s = r->left))
4316 s->parent = p;
4318 r->left = p;
4319 p->balance = 0;
4320 r->balance = 0;
4321 s = p->parent;
4322 p->parent = r;
4323 if ((r->parent = s))
4325 if (s->left == p)
4326 s->left = r;
4327 else
4328 s->right = r;
4331 else
4332 case_stack->data.case_stmt.case_list = r;
4335 else
4336 /* r->balance == -1 */
4338 /* RL-Rotation */
4339 int b2;
4340 struct case_node *t = r->left;
4342 if ((p->right = s = t->left))
4343 s->parent = p;
4345 t->left = p;
4347 if ((r->left = s = t->right))
4348 s->parent = r;
4350 t->right = r;
4351 b = t->balance;
4352 b2 = b < 0;
4353 r->balance = b2;
4354 b2 = -b2 - b;
4355 p->balance = b2;
4356 t->balance = 0;
4357 s = p->parent;
4358 p->parent = t;
4359 r->parent = t;
4361 if ((t->parent = s))
4363 if (s->left == p)
4364 s->left = t;
4365 else
4366 s->right = t;
4369 else
4370 case_stack->data.case_stmt.case_list = t;
4372 break;
4374 else
4376 /* p->balance == -1; growth of right side balances the node. */
4377 p->balance = 0;
4378 break;
4382 r = p;
4383 p = p->parent;
4386 return 0;
4389 /* Maximum number of case bit tests. */
4390 #define MAX_CASE_BIT_TESTS 3
4392 /* By default, enable case bit tests on targets with ashlsi3. */
4393 #ifndef CASE_USE_BIT_TESTS
4394 #define CASE_USE_BIT_TESTS (ashl_optab->handlers[word_mode].insn_code \
4395 != CODE_FOR_nothing)
4396 #endif
4399 /* A case_bit_test represents a set of case nodes that may be
4400 selected from using a bit-wise comparison. HI and LO hold
4401 the integer to be tested against, LABEL contains the label
4402 to jump to upon success and BITS counts the number of case
4403 nodes handled by this test, typically the number of bits
4404 set in HI:LO. */
4406 struct case_bit_test
4408 HOST_WIDE_INT hi;
4409 HOST_WIDE_INT lo;
4410 rtx label;
4411 int bits;
4414 /* Determine whether "1 << x" is relatively cheap in word_mode. */
4416 static
4417 bool lshift_cheap_p (void)
4419 static bool init = false;
4420 static bool cheap = true;
4422 if (!init)
4424 rtx reg = gen_rtx_REG (word_mode, 10000);
4425 int cost = rtx_cost (gen_rtx_ASHIFT (word_mode, const1_rtx, reg), SET);
4426 cheap = cost < COSTS_N_INSNS (3);
4427 init = true;
4430 return cheap;
4433 /* Comparison function for qsort to order bit tests by decreasing
4434 number of case nodes, i.e. the node with the most cases gets
4435 tested first. */
4437 static
4438 int case_bit_test_cmp (const void *p1, const void *p2)
4440 const struct case_bit_test *d1 = p1;
4441 const struct case_bit_test *d2 = p2;
4443 return d2->bits - d1->bits;
4446 /* Expand a switch statement by a short sequence of bit-wise
4447 comparisons. "switch(x)" is effectively converted into
4448 "if ((1 << (x-MINVAL)) & CST)" where CST and MINVAL are
4449 integer constants.
4451 INDEX_EXPR is the value being switched on, which is of
4452 type INDEX_TYPE. MINVAL is the lowest case value of in
4453 the case nodes, of INDEX_TYPE type, and RANGE is highest
4454 value minus MINVAL, also of type INDEX_TYPE. NODES is
4455 the set of case nodes, and DEFAULT_LABEL is the label to
4456 branch to should none of the cases match.
4458 There *MUST* be MAX_CASE_BIT_TESTS or less unique case
4459 node targets. */
4461 static void
4462 emit_case_bit_tests (tree index_type, tree index_expr, tree minval,
4463 tree range, case_node_ptr nodes, rtx default_label)
4465 struct case_bit_test test[MAX_CASE_BIT_TESTS];
4466 enum machine_mode mode;
4467 rtx expr, index, label;
4468 unsigned int i,j,lo,hi;
4469 struct case_node *n;
4470 unsigned int count;
4472 count = 0;
4473 for (n = nodes; n; n = n->right)
4475 label = label_rtx (n->code_label);
4476 for (i = 0; i < count; i++)
4477 if (same_case_target_p (label, test[i].label))
4478 break;
4480 if (i == count)
4482 if (count >= MAX_CASE_BIT_TESTS)
4483 abort ();
4484 test[i].hi = 0;
4485 test[i].lo = 0;
4486 test[i].label = label;
4487 test[i].bits = 1;
4488 count++;
4490 else
4491 test[i].bits++;
4493 lo = tree_low_cst (fold (build (MINUS_EXPR, index_type,
4494 n->low, minval)), 1);
4495 hi = tree_low_cst (fold (build (MINUS_EXPR, index_type,
4496 n->high, minval)), 1);
4497 for (j = lo; j <= hi; j++)
4498 if (j >= HOST_BITS_PER_WIDE_INT)
4499 test[i].hi |= (HOST_WIDE_INT) 1 << (j - HOST_BITS_PER_INT);
4500 else
4501 test[i].lo |= (HOST_WIDE_INT) 1 << j;
4504 qsort (test, count, sizeof(*test), case_bit_test_cmp);
4506 index_expr = fold (build (MINUS_EXPR, index_type,
4507 convert (index_type, index_expr),
4508 convert (index_type, minval)));
4509 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
4510 emit_queue ();
4511 index = protect_from_queue (index, 0);
4512 do_pending_stack_adjust ();
4514 mode = TYPE_MODE (index_type);
4515 expr = expand_expr (range, NULL_RTX, VOIDmode, 0);
4516 emit_cmp_and_jump_insns (index, expr, GTU, NULL_RTX, mode, 1,
4517 default_label);
4519 index = convert_to_mode (word_mode, index, 0);
4520 index = expand_binop (word_mode, ashl_optab, const1_rtx,
4521 index, NULL_RTX, 1, OPTAB_WIDEN);
4523 for (i = 0; i < count; i++)
4525 expr = immed_double_const (test[i].lo, test[i].hi, word_mode);
4526 expr = expand_binop (word_mode, and_optab, index, expr,
4527 NULL_RTX, 1, OPTAB_WIDEN);
4528 emit_cmp_and_jump_insns (expr, const0_rtx, NE, NULL_RTX,
4529 word_mode, 1, test[i].label);
4532 emit_jump (default_label);
4535 #ifndef HAVE_casesi
4536 #define HAVE_casesi 0
4537 #endif
4539 #ifndef HAVE_tablejump
4540 #define HAVE_tablejump 0
4541 #endif
4543 /* Terminate a case (Pascal) or switch (C) statement
4544 in which ORIG_INDEX is the expression to be tested.
4545 If ORIG_TYPE is not NULL, it is the original ORIG_INDEX
4546 type as given in the source before any compiler conversions.
4547 Generate the code to test it and jump to the right place. */
4549 void
4550 expand_end_case_type (tree orig_index, tree orig_type)
4552 tree minval = NULL_TREE, maxval = NULL_TREE, range = NULL_TREE;
4553 rtx default_label = 0;
4554 struct case_node *n, *m;
4555 unsigned int count, uniq;
4556 rtx index;
4557 rtx table_label;
4558 int ncases;
4559 rtx *labelvec;
4560 int i;
4561 rtx before_case, end, lab;
4562 struct nesting *thiscase = case_stack;
4563 tree index_expr, index_type;
4564 bool exit_done = false;
4565 int unsignedp;
4567 /* Don't crash due to previous errors. */
4568 if (thiscase == NULL)
4569 return;
4571 index_expr = thiscase->data.case_stmt.index_expr;
4572 index_type = TREE_TYPE (index_expr);
4573 unsignedp = TYPE_UNSIGNED (index_type);
4574 if (orig_type == NULL)
4575 orig_type = TREE_TYPE (orig_index);
4577 do_pending_stack_adjust ();
4579 /* This might get a spurious warning in the presence of a syntax error;
4580 it could be fixed by moving the call to check_seenlabel after the
4581 check for error_mark_node, and copying the code of check_seenlabel that
4582 deals with case_stack->data.case_stmt.line_number_status /
4583 restore_line_number_status in front of the call to end_cleanup_deferral;
4584 However, this might miss some useful warnings in the presence of
4585 non-syntax errors. */
4586 check_seenlabel ();
4588 /* An ERROR_MARK occurs for various reasons including invalid data type. */
4589 if (index_type != error_mark_node)
4591 /* If we don't have a default-label, create one here,
4592 after the body of the switch. */
4593 if (thiscase->data.case_stmt.default_label == 0)
4595 thiscase->data.case_stmt.default_label
4596 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
4597 /* Share the exit label if possible. */
4598 if (thiscase->exit_label)
4600 SET_DECL_RTL (thiscase->data.case_stmt.default_label,
4601 thiscase->exit_label);
4602 exit_done = true;
4604 expand_label (thiscase->data.case_stmt.default_label);
4606 default_label = label_rtx (thiscase->data.case_stmt.default_label);
4608 before_case = get_last_insn ();
4610 if (thiscase->data.case_stmt.case_list
4611 && thiscase->data.case_stmt.case_list->left)
4612 thiscase->data.case_stmt.case_list
4613 = case_tree2list (thiscase->data.case_stmt.case_list, 0);
4615 /* Simplify the case-list before we count it. */
4616 group_case_nodes (thiscase->data.case_stmt.case_list);
4617 strip_default_case_nodes (&thiscase->data.case_stmt.case_list,
4618 default_label);
4620 /* Get upper and lower bounds of case values.
4621 Also convert all the case values to the index expr's data type. */
4623 uniq = 0;
4624 count = 0;
4625 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
4627 /* Check low and high label values are integers. */
4628 if (TREE_CODE (n->low) != INTEGER_CST)
4629 abort ();
4630 if (TREE_CODE (n->high) != INTEGER_CST)
4631 abort ();
4633 n->low = convert (index_type, n->low);
4634 n->high = convert (index_type, n->high);
4636 /* Count the elements and track the largest and smallest
4637 of them (treating them as signed even if they are not). */
4638 if (count++ == 0)
4640 minval = n->low;
4641 maxval = n->high;
4643 else
4645 if (INT_CST_LT (n->low, minval))
4646 minval = n->low;
4647 if (INT_CST_LT (maxval, n->high))
4648 maxval = n->high;
4650 /* A range counts double, since it requires two compares. */
4651 if (! tree_int_cst_equal (n->low, n->high))
4652 count++;
4654 /* Count the number of unique case node targets. */
4655 uniq++;
4656 lab = label_rtx (n->code_label);
4657 for (m = thiscase->data.case_stmt.case_list; m != n; m = m->right)
4658 if (same_case_target_p (label_rtx (m->code_label), lab))
4660 uniq--;
4661 break;
4665 /* Compute span of values. */
4666 if (count != 0)
4667 range = fold (build (MINUS_EXPR, index_type, maxval, minval));
4669 end_cleanup_deferral ();
4671 if (count == 0)
4673 expand_expr (index_expr, const0_rtx, VOIDmode, 0);
4674 emit_queue ();
4675 emit_jump (default_label);
4678 /* Try implementing this switch statement by a short sequence of
4679 bit-wise comparisons. However, we let the binary-tree case
4680 below handle constant index expressions. */
4681 else if (CASE_USE_BIT_TESTS
4682 && ! TREE_CONSTANT (index_expr)
4683 && compare_tree_int (range, GET_MODE_BITSIZE (word_mode)) < 0
4684 && compare_tree_int (range, 0) > 0
4685 && lshift_cheap_p ()
4686 && ((uniq == 1 && count >= 3)
4687 || (uniq == 2 && count >= 5)
4688 || (uniq == 3 && count >= 6)))
4690 /* Optimize the case where all the case values fit in a
4691 word without having to subtract MINVAL. In this case,
4692 we can optimize away the subtraction. */
4693 if (compare_tree_int (minval, 0) > 0
4694 && compare_tree_int (maxval, GET_MODE_BITSIZE (word_mode)) < 0)
4696 minval = integer_zero_node;
4697 range = maxval;
4699 emit_case_bit_tests (index_type, index_expr, minval, range,
4700 thiscase->data.case_stmt.case_list,
4701 default_label);
4704 /* If range of values is much bigger than number of values,
4705 make a sequence of conditional branches instead of a dispatch.
4706 If the switch-index is a constant, do it this way
4707 because we can optimize it. */
4709 else if (count < case_values_threshold ()
4710 || compare_tree_int (range,
4711 (optimize_size ? 3 : 10) * count) > 0
4712 /* RANGE may be signed, and really large ranges will show up
4713 as negative numbers. */
4714 || compare_tree_int (range, 0) < 0
4715 #ifndef ASM_OUTPUT_ADDR_DIFF_ELT
4716 || flag_pic
4717 #endif
4718 || TREE_CONSTANT (index_expr)
4719 /* If neither casesi or tablejump is available, we can
4720 only go this way. */
4721 || (!HAVE_casesi && !HAVE_tablejump))
4723 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
4725 /* If the index is a short or char that we do not have
4726 an insn to handle comparisons directly, convert it to
4727 a full integer now, rather than letting each comparison
4728 generate the conversion. */
4730 if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT
4731 && ! have_insn_for (COMPARE, GET_MODE (index)))
4733 enum machine_mode wider_mode;
4734 for (wider_mode = GET_MODE (index); wider_mode != VOIDmode;
4735 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
4736 if (have_insn_for (COMPARE, wider_mode))
4738 index = convert_to_mode (wider_mode, index, unsignedp);
4739 break;
4743 emit_queue ();
4744 do_pending_stack_adjust ();
4746 index = protect_from_queue (index, 0);
4747 if (GET_CODE (index) == MEM)
4748 index = copy_to_reg (index);
4749 if (GET_CODE (index) == CONST_INT
4750 || TREE_CODE (index_expr) == INTEGER_CST)
4752 /* Make a tree node with the proper constant value
4753 if we don't already have one. */
4754 if (TREE_CODE (index_expr) != INTEGER_CST)
4756 index_expr
4757 = build_int_2 (INTVAL (index),
4758 unsignedp || INTVAL (index) >= 0 ? 0 : -1);
4759 index_expr = convert (index_type, index_expr);
4762 /* For constant index expressions we need only
4763 issue an unconditional branch to the appropriate
4764 target code. The job of removing any unreachable
4765 code is left to the optimization phase if the
4766 "-O" option is specified. */
4767 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
4768 if (! tree_int_cst_lt (index_expr, n->low)
4769 && ! tree_int_cst_lt (n->high, index_expr))
4770 break;
4772 if (n)
4773 emit_jump (label_rtx (n->code_label));
4774 else
4775 emit_jump (default_label);
4777 else
4779 /* If the index expression is not constant we generate
4780 a binary decision tree to select the appropriate
4781 target code. This is done as follows:
4783 The list of cases is rearranged into a binary tree,
4784 nearly optimal assuming equal probability for each case.
4786 The tree is transformed into RTL, eliminating
4787 redundant test conditions at the same time.
4789 If program flow could reach the end of the
4790 decision tree an unconditional jump to the
4791 default code is emitted. */
4793 use_cost_table
4794 = (TREE_CODE (orig_type) != ENUMERAL_TYPE
4795 && estimate_case_costs (thiscase->data.case_stmt.case_list));
4796 balance_case_nodes (&thiscase->data.case_stmt.case_list, NULL);
4797 emit_case_nodes (index, thiscase->data.case_stmt.case_list,
4798 default_label, index_type);
4799 emit_jump_if_reachable (default_label);
4802 else
4804 table_label = gen_label_rtx ();
4805 if (! try_casesi (index_type, index_expr, minval, range,
4806 table_label, default_label))
4808 index_type = thiscase->data.case_stmt.nominal_type;
4810 /* Index jumptables from zero for suitable values of
4811 minval to avoid a subtraction. */
4812 if (! optimize_size
4813 && compare_tree_int (minval, 0) > 0
4814 && compare_tree_int (minval, 3) < 0)
4816 minval = integer_zero_node;
4817 range = maxval;
4820 if (! try_tablejump (index_type, index_expr, minval, range,
4821 table_label, default_label))
4822 abort ();
4825 /* Get table of labels to jump to, in order of case index. */
4827 ncases = tree_low_cst (range, 0) + 1;
4828 labelvec = alloca (ncases * sizeof (rtx));
4829 memset (labelvec, 0, ncases * sizeof (rtx));
4831 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
4833 /* Compute the low and high bounds relative to the minimum
4834 value since that should fit in a HOST_WIDE_INT while the
4835 actual values may not. */
4836 HOST_WIDE_INT i_low
4837 = tree_low_cst (fold (build (MINUS_EXPR, index_type,
4838 n->low, minval)), 1);
4839 HOST_WIDE_INT i_high
4840 = tree_low_cst (fold (build (MINUS_EXPR, index_type,
4841 n->high, minval)), 1);
4842 HOST_WIDE_INT i;
4844 for (i = i_low; i <= i_high; i ++)
4845 labelvec[i]
4846 = gen_rtx_LABEL_REF (Pmode, label_rtx (n->code_label));
4849 /* Fill in the gaps with the default. */
4850 for (i = 0; i < ncases; i++)
4851 if (labelvec[i] == 0)
4852 labelvec[i] = gen_rtx_LABEL_REF (Pmode, default_label);
4854 /* Output the table. */
4855 emit_label (table_label);
4857 if (CASE_VECTOR_PC_RELATIVE || flag_pic)
4858 emit_jump_insn (gen_rtx_ADDR_DIFF_VEC (CASE_VECTOR_MODE,
4859 gen_rtx_LABEL_REF (Pmode, table_label),
4860 gen_rtvec_v (ncases, labelvec),
4861 const0_rtx, const0_rtx));
4862 else
4863 emit_jump_insn (gen_rtx_ADDR_VEC (CASE_VECTOR_MODE,
4864 gen_rtvec_v (ncases, labelvec)));
4866 /* If the case insn drops through the table,
4867 after the table we must jump to the default-label.
4868 Otherwise record no drop-through after the table. */
4869 #ifdef CASE_DROPS_THROUGH
4870 emit_jump (default_label);
4871 #else
4872 emit_barrier ();
4873 #endif
4876 before_case = NEXT_INSN (before_case);
4877 end = get_last_insn ();
4878 if (squeeze_notes (&before_case, &end))
4879 abort ();
4880 reorder_insns (before_case, end,
4881 thiscase->data.case_stmt.start);
4883 else
4884 end_cleanup_deferral ();
4886 if (thiscase->exit_label && !exit_done)
4887 emit_label (thiscase->exit_label);
4889 POPSTACK (case_stack);
4891 free_temp_slots ();
4894 /* Convert the tree NODE into a list linked by the right field, with the left
4895 field zeroed. RIGHT is used for recursion; it is a list to be placed
4896 rightmost in the resulting list. */
4898 static struct case_node *
4899 case_tree2list (struct case_node *node, struct case_node *right)
4901 struct case_node *left;
4903 if (node->right)
4904 right = case_tree2list (node->right, right);
4906 node->right = right;
4907 if ((left = node->left))
4909 node->left = 0;
4910 return case_tree2list (left, node);
4913 return node;
4916 /* Generate code to jump to LABEL if OP1 and OP2 are equal. */
4918 static void
4919 do_jump_if_equal (rtx op1, rtx op2, rtx label, int unsignedp)
4921 if (GET_CODE (op1) == CONST_INT && GET_CODE (op2) == CONST_INT)
4923 if (op1 == op2)
4924 emit_jump (label);
4926 else
4927 emit_cmp_and_jump_insns (op1, op2, EQ, NULL_RTX,
4928 (GET_MODE (op1) == VOIDmode
4929 ? GET_MODE (op2) : GET_MODE (op1)),
4930 unsignedp, label);
4933 /* Not all case values are encountered equally. This function
4934 uses a heuristic to weight case labels, in cases where that
4935 looks like a reasonable thing to do.
4937 Right now, all we try to guess is text, and we establish the
4938 following weights:
4940 chars above space: 16
4941 digits: 16
4942 default: 12
4943 space, punct: 8
4944 tab: 4
4945 newline: 2
4946 other "\" chars: 1
4947 remaining chars: 0
4949 If we find any cases in the switch that are not either -1 or in the range
4950 of valid ASCII characters, or are control characters other than those
4951 commonly used with "\", don't treat this switch scanning text.
4953 Return 1 if these nodes are suitable for cost estimation, otherwise
4954 return 0. */
4956 static int
4957 estimate_case_costs (case_node_ptr node)
4959 tree min_ascii = integer_minus_one_node;
4960 tree max_ascii = convert (TREE_TYPE (node->high), build_int_2 (127, 0));
4961 case_node_ptr n;
4962 int i;
4964 /* If we haven't already made the cost table, make it now. Note that the
4965 lower bound of the table is -1, not zero. */
4967 if (! cost_table_initialized)
4969 cost_table_initialized = 1;
4971 for (i = 0; i < 128; i++)
4973 if (ISALNUM (i))
4974 COST_TABLE (i) = 16;
4975 else if (ISPUNCT (i))
4976 COST_TABLE (i) = 8;
4977 else if (ISCNTRL (i))
4978 COST_TABLE (i) = -1;
4981 COST_TABLE (' ') = 8;
4982 COST_TABLE ('\t') = 4;
4983 COST_TABLE ('\0') = 4;
4984 COST_TABLE ('\n') = 2;
4985 COST_TABLE ('\f') = 1;
4986 COST_TABLE ('\v') = 1;
4987 COST_TABLE ('\b') = 1;
4990 /* See if all the case expressions look like text. It is text if the
4991 constant is >= -1 and the highest constant is <= 127. Do all comparisons
4992 as signed arithmetic since we don't want to ever access cost_table with a
4993 value less than -1. Also check that none of the constants in a range
4994 are strange control characters. */
4996 for (n = node; n; n = n->right)
4998 if ((INT_CST_LT (n->low, min_ascii)) || INT_CST_LT (max_ascii, n->high))
4999 return 0;
5001 for (i = (HOST_WIDE_INT) TREE_INT_CST_LOW (n->low);
5002 i <= (HOST_WIDE_INT) TREE_INT_CST_LOW (n->high); i++)
5003 if (COST_TABLE (i) < 0)
5004 return 0;
5007 /* All interesting values are within the range of interesting
5008 ASCII characters. */
5009 return 1;
5012 /* Determine whether two case labels branch to the same target. */
5014 static bool
5015 same_case_target_p (rtx l1, rtx l2)
5017 #if 0
5018 rtx i1, i2;
5020 if (l1 == l2)
5021 return true;
5023 i1 = next_real_insn (l1);
5024 i2 = next_real_insn (l2);
5025 if (i1 == i2)
5026 return true;
5028 if (i1 && simplejump_p (i1))
5030 l1 = XEXP (SET_SRC (PATTERN (i1)), 0);
5033 if (i2 && simplejump_p (i2))
5035 l2 = XEXP (SET_SRC (PATTERN (i2)), 0);
5037 #endif
5038 /* When coming from gimple, we usually won't have emitted either
5039 the labels or the body of the switch statement. The job being
5040 done here should be done via jump threading at the tree level.
5041 Cases that go the same place should have the same label. */
5042 return l1 == l2;
5045 /* Delete nodes that branch to the default label from a list of
5046 case nodes. Eg. case 5: default: becomes just default: */
5048 static void
5049 strip_default_case_nodes (case_node_ptr *prev, rtx deflab)
5051 case_node_ptr ptr;
5053 while (*prev)
5055 ptr = *prev;
5056 if (same_case_target_p (label_rtx (ptr->code_label), deflab))
5057 *prev = ptr->right;
5058 else
5059 prev = &ptr->right;
5063 /* Scan an ordered list of case nodes
5064 combining those with consecutive values or ranges.
5066 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
5068 static void
5069 group_case_nodes (case_node_ptr head)
5071 case_node_ptr node = head;
5073 while (node)
5075 rtx lab;
5076 case_node_ptr np = node;
5078 lab = label_rtx (node->code_label);
5080 /* Try to group the successors of NODE with NODE. */
5081 while (((np = np->right) != 0)
5082 /* Do they jump to the same place? */
5083 && same_case_target_p (label_rtx (np->code_label), lab)
5084 /* Are their ranges consecutive? */
5085 && tree_int_cst_equal (np->low,
5086 fold (build (PLUS_EXPR,
5087 TREE_TYPE (node->high),
5088 node->high,
5089 integer_one_node)))
5090 /* An overflow is not consecutive. */
5091 && tree_int_cst_lt (node->high,
5092 fold (build (PLUS_EXPR,
5093 TREE_TYPE (node->high),
5094 node->high,
5095 integer_one_node))))
5097 node->high = np->high;
5099 /* NP is the first node after NODE which can't be grouped with it.
5100 Delete the nodes in between, and move on to that node. */
5101 node->right = np;
5102 node = np;
5106 /* Take an ordered list of case nodes
5107 and transform them into a near optimal binary tree,
5108 on the assumption that any target code selection value is as
5109 likely as any other.
5111 The transformation is performed by splitting the ordered
5112 list into two equal sections plus a pivot. The parts are
5113 then attached to the pivot as left and right branches. Each
5114 branch is then transformed recursively. */
5116 static void
5117 balance_case_nodes (case_node_ptr *head, case_node_ptr parent)
5119 case_node_ptr np;
5121 np = *head;
5122 if (np)
5124 int cost = 0;
5125 int i = 0;
5126 int ranges = 0;
5127 case_node_ptr *npp;
5128 case_node_ptr left;
5130 /* Count the number of entries on branch. Also count the ranges. */
5132 while (np)
5134 if (!tree_int_cst_equal (np->low, np->high))
5136 ranges++;
5137 if (use_cost_table)
5138 cost += COST_TABLE (TREE_INT_CST_LOW (np->high));
5141 if (use_cost_table)
5142 cost += COST_TABLE (TREE_INT_CST_LOW (np->low));
5144 i++;
5145 np = np->right;
5148 if (i > 2)
5150 /* Split this list if it is long enough for that to help. */
5151 npp = head;
5152 left = *npp;
5153 if (use_cost_table)
5155 /* Find the place in the list that bisects the list's total cost,
5156 Here I gets half the total cost. */
5157 int n_moved = 0;
5158 i = (cost + 1) / 2;
5159 while (1)
5161 /* Skip nodes while their cost does not reach that amount. */
5162 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5163 i -= COST_TABLE (TREE_INT_CST_LOW ((*npp)->high));
5164 i -= COST_TABLE (TREE_INT_CST_LOW ((*npp)->low));
5165 if (i <= 0)
5166 break;
5167 npp = &(*npp)->right;
5168 n_moved += 1;
5170 if (n_moved == 0)
5172 /* Leave this branch lopsided, but optimize left-hand
5173 side and fill in `parent' fields for right-hand side. */
5174 np = *head;
5175 np->parent = parent;
5176 balance_case_nodes (&np->left, np);
5177 for (; np->right; np = np->right)
5178 np->right->parent = np;
5179 return;
5182 /* If there are just three nodes, split at the middle one. */
5183 else if (i == 3)
5184 npp = &(*npp)->right;
5185 else
5187 /* Find the place in the list that bisects the list's total cost,
5188 where ranges count as 2.
5189 Here I gets half the total cost. */
5190 i = (i + ranges + 1) / 2;
5191 while (1)
5193 /* Skip nodes while their cost does not reach that amount. */
5194 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5195 i--;
5196 i--;
5197 if (i <= 0)
5198 break;
5199 npp = &(*npp)->right;
5202 *head = np = *npp;
5203 *npp = 0;
5204 np->parent = parent;
5205 np->left = left;
5207 /* Optimize each of the two split parts. */
5208 balance_case_nodes (&np->left, np);
5209 balance_case_nodes (&np->right, np);
5211 else
5213 /* Else leave this branch as one level,
5214 but fill in `parent' fields. */
5215 np = *head;
5216 np->parent = parent;
5217 for (; np->right; np = np->right)
5218 np->right->parent = np;
5223 /* Search the parent sections of the case node tree
5224 to see if a test for the lower bound of NODE would be redundant.
5225 INDEX_TYPE is the type of the index expression.
5227 The instructions to generate the case decision tree are
5228 output in the same order as nodes are processed so it is
5229 known that if a parent node checks the range of the current
5230 node minus one that the current node is bounded at its lower
5231 span. Thus the test would be redundant. */
5233 static int
5234 node_has_low_bound (case_node_ptr node, tree index_type)
5236 tree low_minus_one;
5237 case_node_ptr pnode;
5239 /* If the lower bound of this node is the lowest value in the index type,
5240 we need not test it. */
5242 if (tree_int_cst_equal (node->low, TYPE_MIN_VALUE (index_type)))
5243 return 1;
5245 /* If this node has a left branch, the value at the left must be less
5246 than that at this node, so it cannot be bounded at the bottom and
5247 we need not bother testing any further. */
5249 if (node->left)
5250 return 0;
5252 low_minus_one = fold (build (MINUS_EXPR, TREE_TYPE (node->low),
5253 node->low, integer_one_node));
5255 /* If the subtraction above overflowed, we can't verify anything.
5256 Otherwise, look for a parent that tests our value - 1. */
5258 if (! tree_int_cst_lt (low_minus_one, node->low))
5259 return 0;
5261 for (pnode = node->parent; pnode; pnode = pnode->parent)
5262 if (tree_int_cst_equal (low_minus_one, pnode->high))
5263 return 1;
5265 return 0;
5268 /* Search the parent sections of the case node tree
5269 to see if a test for the upper bound of NODE would be redundant.
5270 INDEX_TYPE is the type of the index expression.
5272 The instructions to generate the case decision tree are
5273 output in the same order as nodes are processed so it is
5274 known that if a parent node checks the range of the current
5275 node plus one that the current node is bounded at its upper
5276 span. Thus the test would be redundant. */
5278 static int
5279 node_has_high_bound (case_node_ptr node, tree index_type)
5281 tree high_plus_one;
5282 case_node_ptr pnode;
5284 /* If there is no upper bound, obviously no test is needed. */
5286 if (TYPE_MAX_VALUE (index_type) == NULL)
5287 return 1;
5289 /* If the upper bound of this node is the highest value in the type
5290 of the index expression, we need not test against it. */
5292 if (tree_int_cst_equal (node->high, TYPE_MAX_VALUE (index_type)))
5293 return 1;
5295 /* If this node has a right branch, the value at the right must be greater
5296 than that at this node, so it cannot be bounded at the top and
5297 we need not bother testing any further. */
5299 if (node->right)
5300 return 0;
5302 high_plus_one = fold (build (PLUS_EXPR, TREE_TYPE (node->high),
5303 node->high, integer_one_node));
5305 /* If the addition above overflowed, we can't verify anything.
5306 Otherwise, look for a parent that tests our value + 1. */
5308 if (! tree_int_cst_lt (node->high, high_plus_one))
5309 return 0;
5311 for (pnode = node->parent; pnode; pnode = pnode->parent)
5312 if (tree_int_cst_equal (high_plus_one, pnode->low))
5313 return 1;
5315 return 0;
5318 /* Search the parent sections of the
5319 case node tree to see if both tests for the upper and lower
5320 bounds of NODE would be redundant. */
5322 static int
5323 node_is_bounded (case_node_ptr node, tree index_type)
5325 return (node_has_low_bound (node, index_type)
5326 && node_has_high_bound (node, index_type));
5329 /* Emit an unconditional jump to LABEL unless it would be dead code. */
5331 static void
5332 emit_jump_if_reachable (rtx label)
5334 if (GET_CODE (get_last_insn ()) != BARRIER)
5335 emit_jump (label);
5338 /* Emit step-by-step code to select a case for the value of INDEX.
5339 The thus generated decision tree follows the form of the
5340 case-node binary tree NODE, whose nodes represent test conditions.
5341 INDEX_TYPE is the type of the index of the switch.
5343 Care is taken to prune redundant tests from the decision tree
5344 by detecting any boundary conditions already checked by
5345 emitted rtx. (See node_has_high_bound, node_has_low_bound
5346 and node_is_bounded, above.)
5348 Where the test conditions can be shown to be redundant we emit
5349 an unconditional jump to the target code. As a further
5350 optimization, the subordinates of a tree node are examined to
5351 check for bounded nodes. In this case conditional and/or
5352 unconditional jumps as a result of the boundary check for the
5353 current node are arranged to target the subordinates associated
5354 code for out of bound conditions on the current node.
5356 We can assume that when control reaches the code generated here,
5357 the index value has already been compared with the parents
5358 of this node, and determined to be on the same side of each parent
5359 as this node is. Thus, if this node tests for the value 51,
5360 and a parent tested for 52, we don't need to consider
5361 the possibility of a value greater than 51. If another parent
5362 tests for the value 50, then this node need not test anything. */
5364 static void
5365 emit_case_nodes (rtx index, case_node_ptr node, rtx default_label,
5366 tree index_type)
5368 /* If INDEX has an unsigned type, we must make unsigned branches. */
5369 int unsignedp = TYPE_UNSIGNED (index_type);
5370 enum machine_mode mode = GET_MODE (index);
5371 enum machine_mode imode = TYPE_MODE (index_type);
5373 /* See if our parents have already tested everything for us.
5374 If they have, emit an unconditional jump for this node. */
5375 if (node_is_bounded (node, index_type))
5376 emit_jump (label_rtx (node->code_label));
5378 else if (tree_int_cst_equal (node->low, node->high))
5380 /* Node is single valued. First see if the index expression matches
5381 this node and then check our children, if any. */
5383 do_jump_if_equal (index,
5384 convert_modes (mode, imode,
5385 expand_expr (node->low, NULL_RTX,
5386 VOIDmode, 0),
5387 unsignedp),
5388 label_rtx (node->code_label), unsignedp);
5390 if (node->right != 0 && node->left != 0)
5392 /* This node has children on both sides.
5393 Dispatch to one side or the other
5394 by comparing the index value with this node's value.
5395 If one subtree is bounded, check that one first,
5396 so we can avoid real branches in the tree. */
5398 if (node_is_bounded (node->right, index_type))
5400 emit_cmp_and_jump_insns (index,
5401 convert_modes
5402 (mode, imode,
5403 expand_expr (node->high, NULL_RTX,
5404 VOIDmode, 0),
5405 unsignedp),
5406 GT, NULL_RTX, mode, unsignedp,
5407 label_rtx (node->right->code_label));
5408 emit_case_nodes (index, node->left, default_label, index_type);
5411 else if (node_is_bounded (node->left, index_type))
5413 emit_cmp_and_jump_insns (index,
5414 convert_modes
5415 (mode, imode,
5416 expand_expr (node->high, NULL_RTX,
5417 VOIDmode, 0),
5418 unsignedp),
5419 LT, NULL_RTX, mode, unsignedp,
5420 label_rtx (node->left->code_label));
5421 emit_case_nodes (index, node->right, default_label, index_type);
5424 /* If both children are single-valued cases with no
5425 children, finish up all the work. This way, we can save
5426 one ordered comparison. */
5427 else if (tree_int_cst_equal (node->right->low, node->right->high)
5428 && node->right->left == 0
5429 && node->right->right == 0
5430 && tree_int_cst_equal (node->left->low, node->left->high)
5431 && node->left->left == 0
5432 && node->left->right == 0)
5434 /* Neither node is bounded. First distinguish the two sides;
5435 then emit the code for one side at a time. */
5437 /* See if the value matches what the right hand side
5438 wants. */
5439 do_jump_if_equal (index,
5440 convert_modes (mode, imode,
5441 expand_expr (node->right->low,
5442 NULL_RTX,
5443 VOIDmode, 0),
5444 unsignedp),
5445 label_rtx (node->right->code_label),
5446 unsignedp);
5448 /* See if the value matches what the left hand side
5449 wants. */
5450 do_jump_if_equal (index,
5451 convert_modes (mode, imode,
5452 expand_expr (node->left->low,
5453 NULL_RTX,
5454 VOIDmode, 0),
5455 unsignedp),
5456 label_rtx (node->left->code_label),
5457 unsignedp);
5460 else
5462 /* Neither node is bounded. First distinguish the two sides;
5463 then emit the code for one side at a time. */
5465 tree test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
5467 /* See if the value is on the right. */
5468 emit_cmp_and_jump_insns (index,
5469 convert_modes
5470 (mode, imode,
5471 expand_expr (node->high, NULL_RTX,
5472 VOIDmode, 0),
5473 unsignedp),
5474 GT, NULL_RTX, mode, unsignedp,
5475 label_rtx (test_label));
5477 /* Value must be on the left.
5478 Handle the left-hand subtree. */
5479 emit_case_nodes (index, node->left, default_label, index_type);
5480 /* If left-hand subtree does nothing,
5481 go to default. */
5482 emit_jump_if_reachable (default_label);
5484 /* Code branches here for the right-hand subtree. */
5485 expand_label (test_label);
5486 emit_case_nodes (index, node->right, default_label, index_type);
5490 else if (node->right != 0 && node->left == 0)
5492 /* Here we have a right child but no left so we issue conditional
5493 branch to default and process the right child.
5495 Omit the conditional branch to default if we it avoid only one
5496 right child; it costs too much space to save so little time. */
5498 if (node->right->right || node->right->left
5499 || !tree_int_cst_equal (node->right->low, node->right->high))
5501 if (!node_has_low_bound (node, index_type))
5503 emit_cmp_and_jump_insns (index,
5504 convert_modes
5505 (mode, imode,
5506 expand_expr (node->high, NULL_RTX,
5507 VOIDmode, 0),
5508 unsignedp),
5509 LT, NULL_RTX, mode, unsignedp,
5510 default_label);
5513 emit_case_nodes (index, node->right, default_label, index_type);
5515 else
5516 /* We cannot process node->right normally
5517 since we haven't ruled out the numbers less than
5518 this node's value. So handle node->right explicitly. */
5519 do_jump_if_equal (index,
5520 convert_modes
5521 (mode, imode,
5522 expand_expr (node->right->low, NULL_RTX,
5523 VOIDmode, 0),
5524 unsignedp),
5525 label_rtx (node->right->code_label), unsignedp);
5528 else if (node->right == 0 && node->left != 0)
5530 /* Just one subtree, on the left. */
5531 if (node->left->left || node->left->right
5532 || !tree_int_cst_equal (node->left->low, node->left->high))
5534 if (!node_has_high_bound (node, index_type))
5536 emit_cmp_and_jump_insns (index,
5537 convert_modes
5538 (mode, imode,
5539 expand_expr (node->high, NULL_RTX,
5540 VOIDmode, 0),
5541 unsignedp),
5542 GT, NULL_RTX, mode, unsignedp,
5543 default_label);
5546 emit_case_nodes (index, node->left, default_label, index_type);
5548 else
5549 /* We cannot process node->left normally
5550 since we haven't ruled out the numbers less than
5551 this node's value. So handle node->left explicitly. */
5552 do_jump_if_equal (index,
5553 convert_modes
5554 (mode, imode,
5555 expand_expr (node->left->low, NULL_RTX,
5556 VOIDmode, 0),
5557 unsignedp),
5558 label_rtx (node->left->code_label), unsignedp);
5561 else
5563 /* Node is a range. These cases are very similar to those for a single
5564 value, except that we do not start by testing whether this node
5565 is the one to branch to. */
5567 if (node->right != 0 && node->left != 0)
5569 /* Node has subtrees on both sides.
5570 If the right-hand subtree is bounded,
5571 test for it first, since we can go straight there.
5572 Otherwise, we need to make a branch in the control structure,
5573 then handle the two subtrees. */
5574 tree test_label = 0;
5576 if (node_is_bounded (node->right, index_type))
5577 /* Right hand node is fully bounded so we can eliminate any
5578 testing and branch directly to the target code. */
5579 emit_cmp_and_jump_insns (index,
5580 convert_modes
5581 (mode, imode,
5582 expand_expr (node->high, NULL_RTX,
5583 VOIDmode, 0),
5584 unsignedp),
5585 GT, NULL_RTX, mode, unsignedp,
5586 label_rtx (node->right->code_label));
5587 else
5589 /* Right hand node requires testing.
5590 Branch to a label where we will handle it later. */
5592 test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
5593 emit_cmp_and_jump_insns (index,
5594 convert_modes
5595 (mode, imode,
5596 expand_expr (node->high, NULL_RTX,
5597 VOIDmode, 0),
5598 unsignedp),
5599 GT, NULL_RTX, mode, unsignedp,
5600 label_rtx (test_label));
5603 /* Value belongs to this node or to the left-hand subtree. */
5605 emit_cmp_and_jump_insns (index,
5606 convert_modes
5607 (mode, imode,
5608 expand_expr (node->low, NULL_RTX,
5609 VOIDmode, 0),
5610 unsignedp),
5611 GE, NULL_RTX, mode, unsignedp,
5612 label_rtx (node->code_label));
5614 /* Handle the left-hand subtree. */
5615 emit_case_nodes (index, node->left, default_label, index_type);
5617 /* If right node had to be handled later, do that now. */
5619 if (test_label)
5621 /* If the left-hand subtree fell through,
5622 don't let it fall into the right-hand subtree. */
5623 emit_jump_if_reachable (default_label);
5625 expand_label (test_label);
5626 emit_case_nodes (index, node->right, default_label, index_type);
5630 else if (node->right != 0 && node->left == 0)
5632 /* Deal with values to the left of this node,
5633 if they are possible. */
5634 if (!node_has_low_bound (node, index_type))
5636 emit_cmp_and_jump_insns (index,
5637 convert_modes
5638 (mode, imode,
5639 expand_expr (node->low, NULL_RTX,
5640 VOIDmode, 0),
5641 unsignedp),
5642 LT, NULL_RTX, mode, unsignedp,
5643 default_label);
5646 /* Value belongs to this node or to the right-hand subtree. */
5648 emit_cmp_and_jump_insns (index,
5649 convert_modes
5650 (mode, imode,
5651 expand_expr (node->high, NULL_RTX,
5652 VOIDmode, 0),
5653 unsignedp),
5654 LE, NULL_RTX, mode, unsignedp,
5655 label_rtx (node->code_label));
5657 emit_case_nodes (index, node->right, default_label, index_type);
5660 else if (node->right == 0 && node->left != 0)
5662 /* Deal with values to the right of this node,
5663 if they are possible. */
5664 if (!node_has_high_bound (node, index_type))
5666 emit_cmp_and_jump_insns (index,
5667 convert_modes
5668 (mode, imode,
5669 expand_expr (node->high, NULL_RTX,
5670 VOIDmode, 0),
5671 unsignedp),
5672 GT, NULL_RTX, mode, unsignedp,
5673 default_label);
5676 /* Value belongs to this node or to the left-hand subtree. */
5678 emit_cmp_and_jump_insns (index,
5679 convert_modes
5680 (mode, imode,
5681 expand_expr (node->low, NULL_RTX,
5682 VOIDmode, 0),
5683 unsignedp),
5684 GE, NULL_RTX, mode, unsignedp,
5685 label_rtx (node->code_label));
5687 emit_case_nodes (index, node->left, default_label, index_type);
5690 else
5692 /* Node has no children so we check low and high bounds to remove
5693 redundant tests. Only one of the bounds can exist,
5694 since otherwise this node is bounded--a case tested already. */
5695 int high_bound = node_has_high_bound (node, index_type);
5696 int low_bound = node_has_low_bound (node, index_type);
5698 if (!high_bound && low_bound)
5700 emit_cmp_and_jump_insns (index,
5701 convert_modes
5702 (mode, imode,
5703 expand_expr (node->high, NULL_RTX,
5704 VOIDmode, 0),
5705 unsignedp),
5706 GT, NULL_RTX, mode, unsignedp,
5707 default_label);
5710 else if (!low_bound && high_bound)
5712 emit_cmp_and_jump_insns (index,
5713 convert_modes
5714 (mode, imode,
5715 expand_expr (node->low, NULL_RTX,
5716 VOIDmode, 0),
5717 unsignedp),
5718 LT, NULL_RTX, mode, unsignedp,
5719 default_label);
5721 else if (!low_bound && !high_bound)
5723 /* Widen LOW and HIGH to the same width as INDEX. */
5724 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
5725 tree low = build1 (CONVERT_EXPR, type, node->low);
5726 tree high = build1 (CONVERT_EXPR, type, node->high);
5727 rtx low_rtx, new_index, new_bound;
5729 /* Instead of doing two branches, emit one unsigned branch for
5730 (index-low) > (high-low). */
5731 low_rtx = expand_expr (low, NULL_RTX, mode, 0);
5732 new_index = expand_simple_binop (mode, MINUS, index, low_rtx,
5733 NULL_RTX, unsignedp,
5734 OPTAB_WIDEN);
5735 new_bound = expand_expr (fold (build (MINUS_EXPR, type,
5736 high, low)),
5737 NULL_RTX, mode, 0);
5739 emit_cmp_and_jump_insns (new_index, new_bound, GT, NULL_RTX,
5740 mode, 1, default_label);
5743 emit_jump (label_rtx (node->code_label));
5748 #include "gt-stmt.h"