* regclass.c (choose_hard_reg_mode): Add third argument.
[official-gcc.git] / gcc / stmt.c
blob334e751bcde35319bb07ee9d646023e81c3cf2e9
1 /* Expands front end tree to back end RTL for GCC
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 /* This file handles the generation of rtl code from tree structure
23 above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
24 It also creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
27 The functions whose names start with `expand_' are called by the
28 parser to generate RTL instructions for various kinds of constructs.
30 Some control and binding constructs require calling several such
31 functions at different times. For example, a simple if-then
32 is expanded by calling `expand_start_cond' (with the condition-expression
33 as argument) before parsing the then-clause and calling `expand_end_cond'
34 after parsing the then-clause. */
36 #include "config.h"
37 #include "system.h"
38 #include "coretypes.h"
39 #include "tm.h"
41 #include "rtl.h"
42 #include "tree.h"
43 #include "tm_p.h"
44 #include "flags.h"
45 #include "except.h"
46 #include "function.h"
47 #include "insn-config.h"
48 #include "expr.h"
49 #include "libfuncs.h"
50 #include "hard-reg-set.h"
51 #include "loop.h"
52 #include "recog.h"
53 #include "machmode.h"
54 #include "toplev.h"
55 #include "output.h"
56 #include "ggc.h"
57 #include "langhooks.h"
58 #include "predict.h"
59 #include "optabs.h"
61 /* Assume that case vectors are not pc-relative. */
62 #ifndef CASE_VECTOR_PC_RELATIVE
63 #define CASE_VECTOR_PC_RELATIVE 0
64 #endif
66 /* Functions and data structures for expanding case statements. */
68 /* Case label structure, used to hold info on labels within case
69 statements. We handle "range" labels; for a single-value label
70 as in C, the high and low limits are the same.
72 An AVL tree of case nodes is initially created, and later transformed
73 to a list linked via the RIGHT fields in the nodes. Nodes with
74 higher case values are later in the list.
76 Switch statements can be output in one of two forms. A branch table
77 is used if there are more than a few labels and the labels are dense
78 within the range between the smallest and largest case value. If a
79 branch table is used, no further manipulations are done with the case
80 node chain.
82 The alternative to the use of a branch table is to generate a series
83 of compare and jump insns. When that is done, we use the LEFT, RIGHT,
84 and PARENT fields to hold a binary tree. Initially the tree is
85 totally unbalanced, with everything on the right. We balance the tree
86 with nodes on the left having lower case values than the parent
87 and nodes on the right having higher values. We then output the tree
88 in order. */
90 struct case_node GTY(())
92 struct case_node *left; /* Left son in binary tree */
93 struct case_node *right; /* Right son in binary tree; also node chain */
94 struct case_node *parent; /* Parent of node in binary tree */
95 tree low; /* Lowest index value for this label */
96 tree high; /* Highest index value for this label */
97 tree code_label; /* Label to jump to when node matches */
98 int balance;
101 typedef struct case_node case_node;
102 typedef struct case_node *case_node_ptr;
104 /* These are used by estimate_case_costs and balance_case_nodes. */
106 /* This must be a signed type, and non-ANSI compilers lack signed char. */
107 static short cost_table_[129];
108 static int use_cost_table;
109 static int cost_table_initialized;
111 /* Special care is needed because we allow -1, but TREE_INT_CST_LOW
112 is unsigned. */
113 #define COST_TABLE(I) cost_table_[(unsigned HOST_WIDE_INT) ((I) + 1)]
115 /* Stack of control and binding constructs we are currently inside.
117 These constructs begin when you call `expand_start_WHATEVER'
118 and end when you call `expand_end_WHATEVER'. This stack records
119 info about how the construct began that tells the end-function
120 what to do. It also may provide information about the construct
121 to alter the behavior of other constructs within the body.
122 For example, they may affect the behavior of C `break' and `continue'.
124 Each construct gets one `struct nesting' object.
125 All of these objects are chained through the `all' field.
126 `nesting_stack' points to the first object (innermost construct).
127 The position of an entry on `nesting_stack' is in its `depth' field.
129 Each type of construct has its own individual stack.
130 For example, loops have `loop_stack'. Each object points to the
131 next object of the same type through the `next' field.
133 Some constructs are visible to `break' exit-statements and others
134 are not. Which constructs are visible depends on the language.
135 Therefore, the data structure allows each construct to be visible
136 or not, according to the args given when the construct is started.
137 The construct is visible if the `exit_label' field is non-null.
138 In that case, the value should be a CODE_LABEL rtx. */
140 struct nesting GTY(())
142 struct nesting *all;
143 struct nesting *next;
144 int depth;
145 rtx exit_label;
146 enum nesting_desc {
147 COND_NESTING,
148 LOOP_NESTING,
149 BLOCK_NESTING,
150 CASE_NESTING
151 } desc;
152 union nesting_u
154 /* For conds (if-then and if-then-else statements). */
155 struct nesting_cond
157 /* Label for the end of the if construct.
158 There is none if EXITFLAG was not set
159 and no `else' has been seen yet. */
160 rtx endif_label;
161 /* Label for the end of this alternative.
162 This may be the end of the if or the next else/elseif. */
163 rtx next_label;
164 } GTY ((tag ("COND_NESTING"))) cond;
165 /* For loops. */
166 struct nesting_loop
168 /* Label at the top of the loop; place to loop back to. */
169 rtx start_label;
170 /* Label at the end of the whole construct. */
171 rtx end_label;
172 /* Label for `continue' statement to jump to;
173 this is in front of the stepper of the loop. */
174 rtx continue_label;
175 } GTY ((tag ("LOOP_NESTING"))) loop;
176 /* For variable binding contours. */
177 struct nesting_block
179 /* Sequence number of this binding contour within the function,
180 in order of entry. */
181 int block_start_count;
182 /* Nonzero => value to restore stack to on exit. */
183 rtx stack_level;
184 /* The NOTE that starts this contour.
185 Used by expand_goto to check whether the destination
186 is within each contour or not. */
187 rtx first_insn;
188 /* Innermost containing binding contour that has a stack level. */
189 struct nesting *innermost_stack_block;
190 /* List of cleanups to be run on exit from this contour.
191 This is a list of expressions to be evaluated.
192 The TREE_PURPOSE of each link is the ..._DECL node
193 which the cleanup pertains to. */
194 tree cleanups;
195 /* List of cleanup-lists of blocks containing this block,
196 as they were at the locus where this block appears.
197 There is an element for each containing block,
198 ordered innermost containing block first.
199 The tail of this list can be 0,
200 if all remaining elements would be empty lists.
201 The element's TREE_VALUE is the cleanup-list of that block,
202 which may be null. */
203 tree outer_cleanups;
204 /* Chain of labels defined inside this binding contour.
205 For contours that have stack levels or cleanups. */
206 struct label_chain *label_chain;
207 /* Nonzero if this is associated with an EH region. */
208 int exception_region;
209 /* The saved target_temp_slot_level from our outer block.
210 We may reset target_temp_slot_level to be the level of
211 this block, if that is done, target_temp_slot_level
212 reverts to the saved target_temp_slot_level at the very
213 end of the block. */
214 int block_target_temp_slot_level;
215 /* True if we are currently emitting insns in an area of
216 output code that is controlled by a conditional
217 expression. This is used by the cleanup handling code to
218 generate conditional cleanup actions. */
219 int conditional_code;
220 /* A place to move the start of the exception region for any
221 of the conditional cleanups, must be at the end or after
222 the start of the last unconditional cleanup, and before any
223 conditional branch points. */
224 rtx last_unconditional_cleanup;
225 } GTY ((tag ("BLOCK_NESTING"))) block;
226 /* For switch (C) or case (Pascal) statements,
227 and also for dummies (see `expand_start_case_dummy'). */
228 struct nesting_case
230 /* The insn after which the case dispatch should finally
231 be emitted. Zero for a dummy. */
232 rtx start;
233 /* A list of case labels; it is first built as an AVL tree.
234 During expand_end_case, this is converted to a list, and may be
235 rearranged into a nearly balanced binary tree. */
236 struct case_node *case_list;
237 /* Label to jump to if no case matches. */
238 tree default_label;
239 /* The expression to be dispatched on. */
240 tree index_expr;
241 /* Type that INDEX_EXPR should be converted to. */
242 tree nominal_type;
243 /* Name of this kind of statement, for warnings. */
244 const char *printname;
245 /* Used to save no_line_numbers till we see the first case label.
246 We set this to -1 when we see the first case label in this
247 case statement. */
248 int line_number_status;
249 } GTY ((tag ("CASE_NESTING"))) case_stmt;
250 } GTY ((desc ("%1.desc"))) data;
253 /* Allocate and return a new `struct nesting'. */
255 #define ALLOC_NESTING() \
256 (struct nesting *) ggc_alloc (sizeof (struct nesting))
258 /* Pop the nesting stack element by element until we pop off
259 the element which is at the top of STACK.
260 Update all the other stacks, popping off elements from them
261 as we pop them from nesting_stack. */
263 #define POPSTACK(STACK) \
264 do { struct nesting *target = STACK; \
265 struct nesting *this; \
266 do { this = nesting_stack; \
267 if (loop_stack == this) \
268 loop_stack = loop_stack->next; \
269 if (cond_stack == this) \
270 cond_stack = cond_stack->next; \
271 if (block_stack == this) \
272 block_stack = block_stack->next; \
273 if (stack_block_stack == this) \
274 stack_block_stack = stack_block_stack->next; \
275 if (case_stack == this) \
276 case_stack = case_stack->next; \
277 nesting_depth = nesting_stack->depth - 1; \
278 nesting_stack = this->all; } \
279 while (this != target); } while (0)
281 /* In some cases it is impossible to generate code for a forward goto
282 until the label definition is seen. This happens when it may be necessary
283 for the goto to reset the stack pointer: we don't yet know how to do that.
284 So expand_goto puts an entry on this fixup list.
285 Each time a binding contour that resets the stack is exited,
286 we check each fixup.
287 If the target label has now been defined, we can insert the proper code. */
289 struct goto_fixup GTY(())
291 /* Points to following fixup. */
292 struct goto_fixup *next;
293 /* Points to the insn before the jump insn.
294 If more code must be inserted, it goes after this insn. */
295 rtx before_jump;
296 /* The LABEL_DECL that this jump is jumping to, or 0
297 for break, continue or return. */
298 tree target;
299 /* The BLOCK for the place where this goto was found. */
300 tree context;
301 /* The CODE_LABEL rtx that this is jumping to. */
302 rtx target_rtl;
303 /* Number of binding contours started in current function
304 before the label reference. */
305 int block_start_count;
306 /* The outermost stack level that should be restored for this jump.
307 Each time a binding contour that resets the stack is exited,
308 if the target label is *not* yet defined, this slot is updated. */
309 rtx stack_level;
310 /* List of lists of cleanup expressions to be run by this goto.
311 There is one element for each block that this goto is within.
312 The tail of this list can be 0,
313 if all remaining elements would be empty.
314 The TREE_VALUE contains the cleanup list of that block as of the
315 time this goto was seen.
316 The TREE_ADDRESSABLE flag is 1 for a block that has been exited. */
317 tree cleanup_list_list;
320 /* Within any binding contour that must restore a stack level,
321 all labels are recorded with a chain of these structures. */
323 struct label_chain GTY(())
325 /* Points to following fixup. */
326 struct label_chain *next;
327 tree label;
330 struct stmt_status GTY(())
332 /* Chain of all pending binding contours. */
333 struct nesting * x_block_stack;
335 /* If any new stacks are added here, add them to POPSTACKS too. */
337 /* Chain of all pending binding contours that restore stack levels
338 or have cleanups. */
339 struct nesting * x_stack_block_stack;
341 /* Chain of all pending conditional statements. */
342 struct nesting * x_cond_stack;
344 /* Chain of all pending loops. */
345 struct nesting * x_loop_stack;
347 /* Chain of all pending case or switch statements. */
348 struct nesting * x_case_stack;
350 /* Separate chain including all of the above,
351 chained through the `all' field. */
352 struct nesting * x_nesting_stack;
354 /* Number of entries on nesting_stack now. */
355 int x_nesting_depth;
357 /* Number of binding contours started so far in this function. */
358 int x_block_start_count;
360 /* Each time we expand an expression-statement,
361 record the expr's type and its RTL value here. */
362 tree x_last_expr_type;
363 rtx x_last_expr_value;
365 /* Nonzero if within a ({...}) grouping, in which case we must
366 always compute a value for each expr-stmt in case it is the last one. */
367 int x_expr_stmts_for_value;
369 /* Location of last line-number note, whether we actually
370 emitted it or not. */
371 location_t x_emit_locus;
373 struct goto_fixup *x_goto_fixup_chain;
376 #define block_stack (cfun->stmt->x_block_stack)
377 #define stack_block_stack (cfun->stmt->x_stack_block_stack)
378 #define cond_stack (cfun->stmt->x_cond_stack)
379 #define loop_stack (cfun->stmt->x_loop_stack)
380 #define case_stack (cfun->stmt->x_case_stack)
381 #define nesting_stack (cfun->stmt->x_nesting_stack)
382 #define nesting_depth (cfun->stmt->x_nesting_depth)
383 #define current_block_start_count (cfun->stmt->x_block_start_count)
384 #define last_expr_type (cfun->stmt->x_last_expr_type)
385 #define last_expr_value (cfun->stmt->x_last_expr_value)
386 #define expr_stmts_for_value (cfun->stmt->x_expr_stmts_for_value)
387 #define emit_locus (cfun->stmt->x_emit_locus)
388 #define goto_fixup_chain (cfun->stmt->x_goto_fixup_chain)
390 /* Nonzero if we are using EH to handle cleanups. */
391 static int using_eh_for_cleanups_p = 0;
393 static int n_occurrences (int, const char *);
394 static bool parse_input_constraint (const char **, int, int, int, int,
395 const char * const *, bool *, bool *);
396 static bool decl_conflicts_with_clobbers_p (tree, const HARD_REG_SET);
397 static void expand_goto_internal (tree, rtx, rtx);
398 static int expand_fixup (tree, rtx, rtx);
399 static rtx expand_nl_handler_label (rtx, rtx);
400 static void expand_nl_goto_receiver (void);
401 static void expand_nl_goto_receivers (struct nesting *);
402 static void fixup_gotos (struct nesting *, rtx, tree, rtx, int);
403 static bool check_operand_nalternatives (tree, tree);
404 static bool check_unique_operand_names (tree, tree);
405 static char *resolve_operand_name_1 (char *, tree, tree);
406 static void expand_null_return_1 (rtx);
407 static enum br_predictor return_prediction (rtx);
408 static void expand_value_return (rtx);
409 static int tail_recursion_args (tree, tree);
410 static void expand_cleanups (tree, int, int);
411 static void check_seenlabel (void);
412 static void do_jump_if_equal (rtx, rtx, rtx, int);
413 static int estimate_case_costs (case_node_ptr);
414 static bool same_case_target_p (rtx, rtx);
415 static void strip_default_case_nodes (case_node_ptr *, rtx);
416 static bool lshift_cheap_p (void);
417 static int case_bit_test_cmp (const void *, const void *);
418 static void emit_case_bit_tests (tree, tree, tree, tree, case_node_ptr, rtx);
419 static void group_case_nodes (case_node_ptr);
420 static void balance_case_nodes (case_node_ptr *, case_node_ptr);
421 static int node_has_low_bound (case_node_ptr, tree);
422 static int node_has_high_bound (case_node_ptr, tree);
423 static int node_is_bounded (case_node_ptr, tree);
424 static void emit_jump_if_reachable (rtx);
425 static void emit_case_nodes (rtx, case_node_ptr, rtx, tree);
426 static struct case_node *case_tree2list (case_node *, case_node *);
428 void
429 using_eh_for_cleanups (void)
431 using_eh_for_cleanups_p = 1;
434 void
435 init_stmt_for_function (void)
437 cfun->stmt = ((struct stmt_status *)ggc_alloc (sizeof (struct stmt_status)));
439 /* We are not currently within any block, conditional, loop or case. */
440 block_stack = 0;
441 stack_block_stack = 0;
442 loop_stack = 0;
443 case_stack = 0;
444 cond_stack = 0;
445 nesting_stack = 0;
446 nesting_depth = 0;
448 current_block_start_count = 0;
450 /* No gotos have been expanded yet. */
451 goto_fixup_chain = 0;
453 /* We are not processing a ({...}) grouping. */
454 expr_stmts_for_value = 0;
455 clear_last_expr ();
458 /* Record the current file and line. Called from emit_line_note. */
460 void
461 set_file_and_line_for_stmt (location_t location)
463 /* If we're outputting an inline function, and we add a line note,
464 there may be no CFUN->STMT information. So, there's no need to
465 update it. */
466 if (cfun->stmt)
467 emit_locus = location;
470 /* Emit a no-op instruction. */
472 void
473 emit_nop (void)
475 rtx last_insn;
477 last_insn = get_last_insn ();
478 if (!optimize
479 && (GET_CODE (last_insn) == CODE_LABEL
480 || (GET_CODE (last_insn) == NOTE
481 && prev_real_insn (last_insn) == 0)))
482 emit_insn (gen_nop ());
485 /* Return the rtx-label that corresponds to a LABEL_DECL,
486 creating it if necessary. */
489 label_rtx (tree label)
491 if (TREE_CODE (label) != LABEL_DECL)
492 abort ();
494 if (!DECL_RTL_SET_P (label))
495 SET_DECL_RTL (label, gen_label_rtx ());
497 return DECL_RTL (label);
500 /* As above, but also put it on the forced-reference list of the
501 function that contains it. */
503 force_label_rtx (tree label)
505 rtx ref = label_rtx (label);
506 tree function = decl_function_context (label);
507 struct function *p;
509 if (!function)
510 abort ();
512 if (function != current_function_decl
513 && function != inline_function_decl)
514 p = find_function_data (function);
515 else
516 p = cfun;
518 p->expr->x_forced_labels = gen_rtx_EXPR_LIST (VOIDmode, ref,
519 p->expr->x_forced_labels);
520 return ref;
523 /* Add an unconditional jump to LABEL as the next sequential instruction. */
525 void
526 emit_jump (rtx label)
528 do_pending_stack_adjust ();
529 emit_jump_insn (gen_jump (label));
530 emit_barrier ();
533 /* Emit code to jump to the address
534 specified by the pointer expression EXP. */
536 void
537 expand_computed_goto (tree exp)
539 rtx x = expand_expr (exp, NULL_RTX, VOIDmode, 0);
541 #ifdef POINTERS_EXTEND_UNSIGNED
542 if (GET_MODE (x) != Pmode)
543 x = convert_memory_address (Pmode, x);
544 #endif
546 emit_queue ();
548 if (! cfun->computed_goto_common_label)
550 cfun->computed_goto_common_reg = copy_to_mode_reg (Pmode, x);
551 cfun->computed_goto_common_label = gen_label_rtx ();
552 emit_label (cfun->computed_goto_common_label);
554 do_pending_stack_adjust ();
555 emit_indirect_jump (cfun->computed_goto_common_reg);
557 current_function_has_computed_jump = 1;
559 else
561 emit_move_insn (cfun->computed_goto_common_reg, x);
562 emit_jump (cfun->computed_goto_common_label);
566 /* Handle goto statements and the labels that they can go to. */
568 /* Specify the location in the RTL code of a label LABEL,
569 which is a LABEL_DECL tree node.
571 This is used for the kind of label that the user can jump to with a
572 goto statement, and for alternatives of a switch or case statement.
573 RTL labels generated for loops and conditionals don't go through here;
574 they are generated directly at the RTL level, by other functions below.
576 Note that this has nothing to do with defining label *names*.
577 Languages vary in how they do that and what that even means. */
579 void
580 expand_label (tree label)
582 struct label_chain *p;
584 do_pending_stack_adjust ();
585 emit_label (label_rtx (label));
586 if (DECL_NAME (label))
587 LABEL_NAME (DECL_RTL (label)) = IDENTIFIER_POINTER (DECL_NAME (label));
589 if (stack_block_stack != 0)
591 p = (struct label_chain *) ggc_alloc (sizeof (struct label_chain));
592 p->next = stack_block_stack->data.block.label_chain;
593 stack_block_stack->data.block.label_chain = p;
594 p->label = label;
598 /* Declare that LABEL (a LABEL_DECL) may be used for nonlocal gotos
599 from nested functions. */
601 void
602 declare_nonlocal_label (tree label)
604 rtx slot = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
606 nonlocal_labels = tree_cons (NULL_TREE, label, nonlocal_labels);
607 LABEL_PRESERVE_P (label_rtx (label)) = 1;
608 if (nonlocal_goto_handler_slots == 0)
610 emit_stack_save (SAVE_NONLOCAL,
611 &nonlocal_goto_stack_level,
612 PREV_INSN (tail_recursion_reentry));
614 nonlocal_goto_handler_slots
615 = gen_rtx_EXPR_LIST (VOIDmode, slot, nonlocal_goto_handler_slots);
618 /* Generate RTL code for a `goto' statement with target label LABEL.
619 LABEL should be a LABEL_DECL tree node that was or will later be
620 defined with `expand_label'. */
622 void
623 expand_goto (tree label)
625 tree context;
627 /* Check for a nonlocal goto to a containing function. */
628 context = decl_function_context (label);
629 if (context != 0 && context != current_function_decl)
631 struct function *p = find_function_data (context);
632 rtx label_ref = gen_rtx_LABEL_REF (Pmode, label_rtx (label));
633 rtx handler_slot, static_chain, save_area, insn;
634 tree link;
636 /* Find the corresponding handler slot for this label. */
637 handler_slot = p->x_nonlocal_goto_handler_slots;
638 for (link = p->x_nonlocal_labels; TREE_VALUE (link) != label;
639 link = TREE_CHAIN (link))
640 handler_slot = XEXP (handler_slot, 1);
641 handler_slot = XEXP (handler_slot, 0);
643 p->has_nonlocal_label = 1;
644 current_function_has_nonlocal_goto = 1;
645 LABEL_REF_NONLOCAL_P (label_ref) = 1;
647 /* Copy the rtl for the slots so that they won't be shared in
648 case the virtual stack vars register gets instantiated differently
649 in the parent than in the child. */
651 static_chain = copy_to_reg (lookup_static_chain (label));
653 /* Get addr of containing function's current nonlocal goto handler,
654 which will do any cleanups and then jump to the label. */
655 handler_slot = copy_to_reg (replace_rtx (copy_rtx (handler_slot),
656 virtual_stack_vars_rtx,
657 static_chain));
659 /* Get addr of containing function's nonlocal save area. */
660 save_area = p->x_nonlocal_goto_stack_level;
661 if (save_area)
662 save_area = replace_rtx (copy_rtx (save_area),
663 virtual_stack_vars_rtx, static_chain);
665 #if HAVE_nonlocal_goto
666 if (HAVE_nonlocal_goto)
667 emit_insn (gen_nonlocal_goto (static_chain, handler_slot,
668 save_area, label_ref));
669 else
670 #endif
672 /* Restore frame pointer for containing function.
673 This sets the actual hard register used for the frame pointer
674 to the location of the function's incoming static chain info.
675 The non-local goto handler will then adjust it to contain the
676 proper value and reload the argument pointer, if needed. */
677 emit_move_insn (hard_frame_pointer_rtx, static_chain);
678 emit_stack_restore (SAVE_NONLOCAL, save_area, NULL_RTX);
680 /* USE of hard_frame_pointer_rtx added for consistency;
681 not clear if really needed. */
682 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
683 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
684 emit_indirect_jump (handler_slot);
687 /* Search backwards to the jump insn and mark it as a
688 non-local goto. */
689 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
691 if (GET_CODE (insn) == JUMP_INSN)
693 REG_NOTES (insn) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO,
694 const0_rtx, REG_NOTES (insn));
695 break;
697 else if (GET_CODE (insn) == CALL_INSN)
698 break;
701 else
702 expand_goto_internal (label, label_rtx (label), NULL_RTX);
705 /* Generate RTL code for a `goto' statement with target label BODY.
706 LABEL should be a LABEL_REF.
707 LAST_INSN, if non-0, is the rtx we should consider as the last
708 insn emitted (for the purposes of cleaning up a return). */
710 static void
711 expand_goto_internal (tree body, rtx label, rtx last_insn)
713 struct nesting *block;
714 rtx stack_level = 0;
716 if (GET_CODE (label) != CODE_LABEL)
717 abort ();
719 /* If label has already been defined, we can tell now
720 whether and how we must alter the stack level. */
722 if (PREV_INSN (label) != 0)
724 /* Find the innermost pending block that contains the label.
725 (Check containment by comparing insn-uids.)
726 Then restore the outermost stack level within that block,
727 and do cleanups of all blocks contained in it. */
728 for (block = block_stack; block; block = block->next)
730 if (INSN_UID (block->data.block.first_insn) < INSN_UID (label))
731 break;
732 if (block->data.block.stack_level != 0)
733 stack_level = block->data.block.stack_level;
734 /* Execute the cleanups for blocks we are exiting. */
735 if (block->data.block.cleanups != 0)
737 expand_cleanups (block->data.block.cleanups, 1, 1);
738 do_pending_stack_adjust ();
742 if (stack_level)
744 /* Ensure stack adjust isn't done by emit_jump, as this
745 would clobber the stack pointer. This one should be
746 deleted as dead by flow. */
747 clear_pending_stack_adjust ();
748 do_pending_stack_adjust ();
750 /* Don't do this adjust if it's to the end label and this function
751 is to return with a depressed stack pointer. */
752 if (label == return_label
753 && (((TREE_CODE (TREE_TYPE (current_function_decl))
754 == FUNCTION_TYPE)
755 && (TYPE_RETURNS_STACK_DEPRESSED
756 (TREE_TYPE (current_function_decl))))))
758 else
759 emit_stack_restore (SAVE_BLOCK, stack_level, NULL_RTX);
762 if (body != 0 && DECL_TOO_LATE (body))
763 error ("jump to `%s' invalidly jumps into binding contour",
764 IDENTIFIER_POINTER (DECL_NAME (body)));
766 /* Label not yet defined: may need to put this goto
767 on the fixup list. */
768 else if (! expand_fixup (body, label, last_insn))
770 /* No fixup needed. Record that the label is the target
771 of at least one goto that has no fixup. */
772 if (body != 0)
773 TREE_ADDRESSABLE (body) = 1;
776 emit_jump (label);
779 /* Generate if necessary a fixup for a goto
780 whose target label in tree structure (if any) is TREE_LABEL
781 and whose target in rtl is RTL_LABEL.
783 If LAST_INSN is nonzero, we pretend that the jump appears
784 after insn LAST_INSN instead of at the current point in the insn stream.
786 The fixup will be used later to insert insns just before the goto.
787 Those insns will restore the stack level as appropriate for the
788 target label, and will (in the case of C++) also invoke any object
789 destructors which have to be invoked when we exit the scopes which
790 are exited by the goto.
792 Value is nonzero if a fixup is made. */
794 static int
795 expand_fixup (tree tree_label, rtx rtl_label, rtx last_insn)
797 struct nesting *block, *end_block;
799 /* See if we can recognize which block the label will be output in.
800 This is possible in some very common cases.
801 If we succeed, set END_BLOCK to that block.
802 Otherwise, set it to 0. */
804 if (cond_stack
805 && (rtl_label == cond_stack->data.cond.endif_label
806 || rtl_label == cond_stack->data.cond.next_label))
807 end_block = cond_stack;
808 /* If we are in a loop, recognize certain labels which
809 are likely targets. This reduces the number of fixups
810 we need to create. */
811 else if (loop_stack
812 && (rtl_label == loop_stack->data.loop.start_label
813 || rtl_label == loop_stack->data.loop.end_label
814 || rtl_label == loop_stack->data.loop.continue_label))
815 end_block = loop_stack;
816 else
817 end_block = 0;
819 /* Now set END_BLOCK to the binding level to which we will return. */
821 if (end_block)
823 struct nesting *next_block = end_block->all;
824 block = block_stack;
826 /* First see if the END_BLOCK is inside the innermost binding level.
827 If so, then no cleanups or stack levels are relevant. */
828 while (next_block && next_block != block)
829 next_block = next_block->all;
831 if (next_block)
832 return 0;
834 /* Otherwise, set END_BLOCK to the innermost binding level
835 which is outside the relevant control-structure nesting. */
836 next_block = block_stack->next;
837 for (block = block_stack; block != end_block; block = block->all)
838 if (block == next_block)
839 next_block = next_block->next;
840 end_block = next_block;
843 /* Does any containing block have a stack level or cleanups?
844 If not, no fixup is needed, and that is the normal case
845 (the only case, for standard C). */
846 for (block = block_stack; block != end_block; block = block->next)
847 if (block->data.block.stack_level != 0
848 || block->data.block.cleanups != 0)
849 break;
851 if (block != end_block)
853 /* Ok, a fixup is needed. Add a fixup to the list of such. */
854 struct goto_fixup *fixup
855 = (struct goto_fixup *) ggc_alloc (sizeof (struct goto_fixup));
856 /* In case an old stack level is restored, make sure that comes
857 after any pending stack adjust. */
858 /* ?? If the fixup isn't to come at the present position,
859 doing the stack adjust here isn't useful. Doing it with our
860 settings at that location isn't useful either. Let's hope
861 someone does it! */
862 if (last_insn == 0)
863 do_pending_stack_adjust ();
864 fixup->target = tree_label;
865 fixup->target_rtl = rtl_label;
867 /* Create a BLOCK node and a corresponding matched set of
868 NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes at
869 this point. The notes will encapsulate any and all fixup
870 code which we might later insert at this point in the insn
871 stream. Also, the BLOCK node will be the parent (i.e. the
872 `SUPERBLOCK') of any other BLOCK nodes which we might create
873 later on when we are expanding the fixup code.
875 Note that optimization passes (including expand_end_loop)
876 might move the *_BLOCK notes away, so we use a NOTE_INSN_DELETED
877 as a placeholder. */
880 rtx original_before_jump
881 = last_insn ? last_insn : get_last_insn ();
882 rtx start;
883 rtx end;
884 tree block;
886 block = make_node (BLOCK);
887 TREE_USED (block) = 1;
889 if (!cfun->x_whole_function_mode_p)
890 (*lang_hooks.decls.insert_block) (block);
891 else
893 BLOCK_CHAIN (block)
894 = BLOCK_CHAIN (DECL_INITIAL (current_function_decl));
895 BLOCK_CHAIN (DECL_INITIAL (current_function_decl))
896 = block;
899 start_sequence ();
900 start = emit_note (NOTE_INSN_BLOCK_BEG);
901 if (cfun->x_whole_function_mode_p)
902 NOTE_BLOCK (start) = block;
903 fixup->before_jump = emit_note (NOTE_INSN_DELETED);
904 end = emit_note (NOTE_INSN_BLOCK_END);
905 if (cfun->x_whole_function_mode_p)
906 NOTE_BLOCK (end) = block;
907 fixup->context = block;
908 end_sequence ();
909 emit_insn_after (start, original_before_jump);
912 fixup->block_start_count = current_block_start_count;
913 fixup->stack_level = 0;
914 fixup->cleanup_list_list
915 = ((block->data.block.outer_cleanups
916 || block->data.block.cleanups)
917 ? tree_cons (NULL_TREE, block->data.block.cleanups,
918 block->data.block.outer_cleanups)
919 : 0);
920 fixup->next = goto_fixup_chain;
921 goto_fixup_chain = fixup;
924 return block != 0;
927 /* Expand any needed fixups in the outputmost binding level of the
928 function. FIRST_INSN is the first insn in the function. */
930 void
931 expand_fixups (rtx first_insn)
933 fixup_gotos (NULL, NULL_RTX, NULL_TREE, first_insn, 0);
936 /* When exiting a binding contour, process all pending gotos requiring fixups.
937 THISBLOCK is the structure that describes the block being exited.
938 STACK_LEVEL is the rtx for the stack level to restore exiting this contour.
939 CLEANUP_LIST is a list of expressions to evaluate on exiting this contour.
940 FIRST_INSN is the insn that began this contour.
942 Gotos that jump out of this contour must restore the
943 stack level and do the cleanups before actually jumping.
945 DONT_JUMP_IN positive means report error if there is a jump into this
946 contour from before the beginning of the contour. This is also done if
947 STACK_LEVEL is nonzero unless DONT_JUMP_IN is negative. */
949 static void
950 fixup_gotos (struct nesting *thisblock, rtx stack_level,
951 tree cleanup_list, rtx first_insn, int dont_jump_in)
953 struct goto_fixup *f, *prev;
955 /* F is the fixup we are considering; PREV is the previous one. */
956 /* We run this loop in two passes so that cleanups of exited blocks
957 are run first, and blocks that are exited are marked so
958 afterwards. */
960 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
962 /* Test for a fixup that is inactive because it is already handled. */
963 if (f->before_jump == 0)
965 /* Delete inactive fixup from the chain, if that is easy to do. */
966 if (prev != 0)
967 prev->next = f->next;
969 /* Has this fixup's target label been defined?
970 If so, we can finalize it. */
971 else if (PREV_INSN (f->target_rtl) != 0)
973 rtx cleanup_insns;
975 /* If this fixup jumped into this contour from before the beginning
976 of this contour, report an error. This code used to use
977 the first non-label insn after f->target_rtl, but that's
978 wrong since such can be added, by things like put_var_into_stack
979 and have INSN_UIDs that are out of the range of the block. */
980 /* ??? Bug: this does not detect jumping in through intermediate
981 blocks that have stack levels or cleanups.
982 It detects only a problem with the innermost block
983 around the label. */
984 if (f->target != 0
985 && (dont_jump_in > 0 || (dont_jump_in == 0 && stack_level)
986 || cleanup_list)
987 && INSN_UID (first_insn) < INSN_UID (f->target_rtl)
988 && INSN_UID (first_insn) > INSN_UID (f->before_jump)
989 && ! DECL_ERROR_ISSUED (f->target))
991 error_with_decl (f->target,
992 "label `%s' used before containing binding contour");
993 /* Prevent multiple errors for one label. */
994 DECL_ERROR_ISSUED (f->target) = 1;
997 /* We will expand the cleanups into a sequence of their own and
998 then later on we will attach this new sequence to the insn
999 stream just ahead of the actual jump insn. */
1001 start_sequence ();
1003 /* Temporarily restore the lexical context where we will
1004 logically be inserting the fixup code. We do this for the
1005 sake of getting the debugging information right. */
1007 (*lang_hooks.decls.pushlevel) (0);
1008 (*lang_hooks.decls.set_block) (f->context);
1010 /* Expand the cleanups for blocks this jump exits. */
1011 if (f->cleanup_list_list)
1013 tree lists;
1014 for (lists = f->cleanup_list_list; lists; lists = TREE_CHAIN (lists))
1015 /* Marked elements correspond to blocks that have been closed.
1016 Do their cleanups. */
1017 if (TREE_ADDRESSABLE (lists)
1018 && TREE_VALUE (lists) != 0)
1020 expand_cleanups (TREE_VALUE (lists), 1, 1);
1021 /* Pop any pushes done in the cleanups,
1022 in case function is about to return. */
1023 do_pending_stack_adjust ();
1027 /* Restore stack level for the biggest contour that this
1028 jump jumps out of. */
1029 if (f->stack_level
1030 && ! (f->target_rtl == return_label
1031 && ((TREE_CODE (TREE_TYPE (current_function_decl))
1032 == FUNCTION_TYPE)
1033 && (TYPE_RETURNS_STACK_DEPRESSED
1034 (TREE_TYPE (current_function_decl))))))
1035 emit_stack_restore (SAVE_BLOCK, f->stack_level, f->before_jump);
1037 /* Finish up the sequence containing the insns which implement the
1038 necessary cleanups, and then attach that whole sequence to the
1039 insn stream just ahead of the actual jump insn. Attaching it
1040 at that point insures that any cleanups which are in fact
1041 implicit C++ object destructions (which must be executed upon
1042 leaving the block) appear (to the debugger) to be taking place
1043 in an area of the generated code where the object(s) being
1044 destructed are still "in scope". */
1046 cleanup_insns = get_insns ();
1047 (*lang_hooks.decls.poplevel) (1, 0, 0);
1049 end_sequence ();
1050 emit_insn_after (cleanup_insns, f->before_jump);
1052 f->before_jump = 0;
1056 /* For any still-undefined labels, do the cleanups for this block now.
1057 We must do this now since items in the cleanup list may go out
1058 of scope when the block ends. */
1059 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1060 if (f->before_jump != 0
1061 && PREV_INSN (f->target_rtl) == 0
1062 /* Label has still not appeared. If we are exiting a block with
1063 a stack level to restore, that started before the fixup,
1064 mark this stack level as needing restoration
1065 when the fixup is later finalized. */
1066 && thisblock != 0
1067 /* Note: if THISBLOCK == 0 and we have a label that hasn't appeared, it
1068 means the label is undefined. That's erroneous, but possible. */
1069 && (thisblock->data.block.block_start_count
1070 <= f->block_start_count))
1072 tree lists = f->cleanup_list_list;
1073 rtx cleanup_insns;
1075 for (; lists; lists = TREE_CHAIN (lists))
1076 /* If the following elt. corresponds to our containing block
1077 then the elt. must be for this block. */
1078 if (TREE_CHAIN (lists) == thisblock->data.block.outer_cleanups)
1080 start_sequence ();
1081 (*lang_hooks.decls.pushlevel) (0);
1082 (*lang_hooks.decls.set_block) (f->context);
1083 expand_cleanups (TREE_VALUE (lists), 1, 1);
1084 do_pending_stack_adjust ();
1085 cleanup_insns = get_insns ();
1086 (*lang_hooks.decls.poplevel) (1, 0, 0);
1087 end_sequence ();
1088 if (cleanup_insns != 0)
1089 f->before_jump
1090 = emit_insn_after (cleanup_insns, f->before_jump);
1092 f->cleanup_list_list = TREE_CHAIN (lists);
1095 if (stack_level)
1096 f->stack_level = stack_level;
1100 /* Return the number of times character C occurs in string S. */
1101 static int
1102 n_occurrences (int c, const char *s)
1104 int n = 0;
1105 while (*s)
1106 n += (*s++ == c);
1107 return n;
1110 /* Generate RTL for an asm statement (explicit assembler code).
1111 STRING is a STRING_CST node containing the assembler code text,
1112 or an ADDR_EXPR containing a STRING_CST. VOL nonzero means the
1113 insn is volatile; don't optimize it. */
1115 void
1116 expand_asm (tree string, int vol)
1118 rtx body;
1120 if (TREE_CODE (string) == ADDR_EXPR)
1121 string = TREE_OPERAND (string, 0);
1123 body = gen_rtx_ASM_INPUT (VOIDmode, TREE_STRING_POINTER (string));
1125 MEM_VOLATILE_P (body) = vol;
1127 emit_insn (body);
1129 clear_last_expr ();
1132 /* Parse the output constraint pointed to by *CONSTRAINT_P. It is the
1133 OPERAND_NUMth output operand, indexed from zero. There are NINPUTS
1134 inputs and NOUTPUTS outputs to this extended-asm. Upon return,
1135 *ALLOWS_MEM will be TRUE iff the constraint allows the use of a
1136 memory operand. Similarly, *ALLOWS_REG will be TRUE iff the
1137 constraint allows the use of a register operand. And, *IS_INOUT
1138 will be true if the operand is read-write, i.e., if it is used as
1139 an input as well as an output. If *CONSTRAINT_P is not in
1140 canonical form, it will be made canonical. (Note that `+' will be
1141 replaced with `=' as part of this process.)
1143 Returns TRUE if all went well; FALSE if an error occurred. */
1145 bool
1146 parse_output_constraint (const char **constraint_p, int operand_num,
1147 int ninputs, int noutputs, bool *allows_mem,
1148 bool *allows_reg, bool *is_inout)
1150 const char *constraint = *constraint_p;
1151 const char *p;
1153 /* Assume the constraint doesn't allow the use of either a register
1154 or memory. */
1155 *allows_mem = false;
1156 *allows_reg = false;
1158 /* Allow the `=' or `+' to not be at the beginning of the string,
1159 since it wasn't explicitly documented that way, and there is a
1160 large body of code that puts it last. Swap the character to
1161 the front, so as not to uglify any place else. */
1162 p = strchr (constraint, '=');
1163 if (!p)
1164 p = strchr (constraint, '+');
1166 /* If the string doesn't contain an `=', issue an error
1167 message. */
1168 if (!p)
1170 error ("output operand constraint lacks `='");
1171 return false;
1174 /* If the constraint begins with `+', then the operand is both read
1175 from and written to. */
1176 *is_inout = (*p == '+');
1178 /* Canonicalize the output constraint so that it begins with `='. */
1179 if (p != constraint || is_inout)
1181 char *buf;
1182 size_t c_len = strlen (constraint);
1184 if (p != constraint)
1185 warning ("output constraint `%c' for operand %d is not at the beginning",
1186 *p, operand_num);
1188 /* Make a copy of the constraint. */
1189 buf = alloca (c_len + 1);
1190 strcpy (buf, constraint);
1191 /* Swap the first character and the `=' or `+'. */
1192 buf[p - constraint] = buf[0];
1193 /* Make sure the first character is an `='. (Until we do this,
1194 it might be a `+'.) */
1195 buf[0] = '=';
1196 /* Replace the constraint with the canonicalized string. */
1197 *constraint_p = ggc_alloc_string (buf, c_len);
1198 constraint = *constraint_p;
1201 /* Loop through the constraint string. */
1202 for (p = constraint + 1; *p; p += CONSTRAINT_LEN (*p, p))
1203 switch (*p)
1205 case '+':
1206 case '=':
1207 error ("operand constraint contains incorrectly positioned '+' or '='");
1208 return false;
1210 case '%':
1211 if (operand_num + 1 == ninputs + noutputs)
1213 error ("`%%' constraint used with last operand");
1214 return false;
1216 break;
1218 case 'V': case 'm': case 'o':
1219 *allows_mem = true;
1220 break;
1222 case '?': case '!': case '*': case '&': case '#':
1223 case 'E': case 'F': case 'G': case 'H':
1224 case 's': case 'i': case 'n':
1225 case 'I': case 'J': case 'K': case 'L': case 'M':
1226 case 'N': case 'O': case 'P': case ',':
1227 break;
1229 case '0': case '1': case '2': case '3': case '4':
1230 case '5': case '6': case '7': case '8': case '9':
1231 case '[':
1232 error ("matching constraint not valid in output operand");
1233 return false;
1235 case '<': case '>':
1236 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
1237 excepting those that expand_call created. So match memory
1238 and hope. */
1239 *allows_mem = true;
1240 break;
1242 case 'g': case 'X':
1243 *allows_reg = true;
1244 *allows_mem = true;
1245 break;
1247 case 'p': case 'r':
1248 *allows_reg = true;
1249 break;
1251 default:
1252 if (!ISALPHA (*p))
1253 break;
1254 if (REG_CLASS_FROM_CONSTRAINT (*p, p) != NO_REGS)
1255 *allows_reg = true;
1256 #ifdef EXTRA_CONSTRAINT_STR
1257 else if (EXTRA_ADDRESS_CONSTRAINT (*p, p))
1258 *allows_reg = true;
1259 else if (EXTRA_MEMORY_CONSTRAINT (*p, p))
1260 *allows_mem = true;
1261 else
1263 /* Otherwise we can't assume anything about the nature of
1264 the constraint except that it isn't purely registers.
1265 Treat it like "g" and hope for the best. */
1266 *allows_reg = true;
1267 *allows_mem = true;
1269 #endif
1270 break;
1273 return true;
1276 /* Similar, but for input constraints. */
1278 static bool
1279 parse_input_constraint (const char **constraint_p, int input_num,
1280 int ninputs, int noutputs, int ninout,
1281 const char * const * constraints,
1282 bool *allows_mem, bool *allows_reg)
1284 const char *constraint = *constraint_p;
1285 const char *orig_constraint = constraint;
1286 size_t c_len = strlen (constraint);
1287 size_t j;
1289 /* Assume the constraint doesn't allow the use of either
1290 a register or memory. */
1291 *allows_mem = false;
1292 *allows_reg = false;
1294 /* Make sure constraint has neither `=', `+', nor '&'. */
1296 for (j = 0; j < c_len; j += CONSTRAINT_LEN (constraint[j], constraint+j))
1297 switch (constraint[j])
1299 case '+': case '=': case '&':
1300 if (constraint == orig_constraint)
1302 error ("input operand constraint contains `%c'", constraint[j]);
1303 return false;
1305 break;
1307 case '%':
1308 if (constraint == orig_constraint
1309 && input_num + 1 == ninputs - ninout)
1311 error ("`%%' constraint used with last operand");
1312 return false;
1314 break;
1316 case 'V': case 'm': case 'o':
1317 *allows_mem = true;
1318 break;
1320 case '<': case '>':
1321 case '?': case '!': case '*': case '#':
1322 case 'E': case 'F': case 'G': case 'H':
1323 case 's': case 'i': case 'n':
1324 case 'I': case 'J': case 'K': case 'L': case 'M':
1325 case 'N': case 'O': case 'P': case ',':
1326 break;
1328 /* Whether or not a numeric constraint allows a register is
1329 decided by the matching constraint, and so there is no need
1330 to do anything special with them. We must handle them in
1331 the default case, so that we don't unnecessarily force
1332 operands to memory. */
1333 case '0': case '1': case '2': case '3': case '4':
1334 case '5': case '6': case '7': case '8': case '9':
1336 char *end;
1337 unsigned long match;
1339 match = strtoul (constraint + j, &end, 10);
1340 if (match >= (unsigned long) noutputs)
1342 error ("matching constraint references invalid operand number");
1343 return false;
1346 /* Try and find the real constraint for this dup. Only do this
1347 if the matching constraint is the only alternative. */
1348 if (*end == '\0'
1349 && (j == 0 || (j == 1 && constraint[0] == '%')))
1351 constraint = constraints[match];
1352 *constraint_p = constraint;
1353 c_len = strlen (constraint);
1354 j = 0;
1355 /* ??? At the end of the loop, we will skip the first part of
1356 the matched constraint. This assumes not only that the
1357 other constraint is an output constraint, but also that
1358 the '=' or '+' come first. */
1359 break;
1361 else
1362 j = end - constraint;
1363 /* Anticipate increment at end of loop. */
1364 j--;
1366 /* Fall through. */
1368 case 'p': case 'r':
1369 *allows_reg = true;
1370 break;
1372 case 'g': case 'X':
1373 *allows_reg = true;
1374 *allows_mem = true;
1375 break;
1377 default:
1378 if (! ISALPHA (constraint[j]))
1380 error ("invalid punctuation `%c' in constraint", constraint[j]);
1381 return false;
1383 if (REG_CLASS_FROM_CONSTRAINT (constraint[j], constraint + j)
1384 != NO_REGS)
1385 *allows_reg = true;
1386 #ifdef EXTRA_CONSTRAINT_STR
1387 else if (EXTRA_ADDRESS_CONSTRAINT (constraint[j], constraint + j))
1388 *allows_reg = true;
1389 else if (EXTRA_MEMORY_CONSTRAINT (constraint[j], constraint + j))
1390 *allows_mem = true;
1391 else
1393 /* Otherwise we can't assume anything about the nature of
1394 the constraint except that it isn't purely registers.
1395 Treat it like "g" and hope for the best. */
1396 *allows_reg = true;
1397 *allows_mem = true;
1399 #endif
1400 break;
1403 return true;
1406 /* Check for overlap between registers marked in CLOBBERED_REGS and
1407 anything inappropriate in DECL. Emit error and return TRUE for error,
1408 FALSE for ok. */
1410 static bool
1411 decl_conflicts_with_clobbers_p (tree decl, const HARD_REG_SET clobbered_regs)
1413 /* Conflicts between asm-declared register variables and the clobber
1414 list are not allowed. */
1415 if ((TREE_CODE (decl) == VAR_DECL || TREE_CODE (decl) == PARM_DECL)
1416 && DECL_REGISTER (decl)
1417 && REG_P (DECL_RTL (decl))
1418 && REGNO (DECL_RTL (decl)) < FIRST_PSEUDO_REGISTER)
1420 rtx reg = DECL_RTL (decl);
1421 unsigned int regno;
1423 for (regno = REGNO (reg);
1424 regno < (REGNO (reg)
1425 + HARD_REGNO_NREGS (REGNO (reg), GET_MODE (reg)));
1426 regno++)
1427 if (TEST_HARD_REG_BIT (clobbered_regs, regno))
1429 error ("asm-specifier for variable `%s' conflicts with asm clobber list",
1430 IDENTIFIER_POINTER (DECL_NAME (decl)));
1432 /* Reset registerness to stop multiple errors emitted for a
1433 single variable. */
1434 DECL_REGISTER (decl) = 0;
1435 return true;
1438 return false;
1441 /* Generate RTL for an asm statement with arguments.
1442 STRING is the instruction template.
1443 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
1444 Each output or input has an expression in the TREE_VALUE and
1445 and a tree list in TREE_PURPOSE which in turn contains a constraint
1446 name in TREE_VALUE (or NULL_TREE) and a constraint string
1447 in TREE_PURPOSE.
1448 CLOBBERS is a list of STRING_CST nodes each naming a hard register
1449 that is clobbered by this insn.
1451 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
1452 Some elements of OUTPUTS may be replaced with trees representing temporary
1453 values. The caller should copy those temporary values to the originally
1454 specified lvalues.
1456 VOL nonzero means the insn is volatile; don't optimize it. */
1458 void
1459 expand_asm_operands (tree string, tree outputs, tree inputs,
1460 tree clobbers, int vol, const char *filename, int line)
1462 rtvec argvec, constraintvec;
1463 rtx body;
1464 int ninputs = list_length (inputs);
1465 int noutputs = list_length (outputs);
1466 int ninout;
1467 int nclobbers;
1468 HARD_REG_SET clobbered_regs;
1469 int clobber_conflict_found = 0;
1470 tree tail;
1471 tree t;
1472 int i;
1473 /* Vector of RTX's of evaluated output operands. */
1474 rtx *output_rtx = (rtx *) alloca (noutputs * sizeof (rtx));
1475 int *inout_opnum = (int *) alloca (noutputs * sizeof (int));
1476 rtx *real_output_rtx = (rtx *) alloca (noutputs * sizeof (rtx));
1477 enum machine_mode *inout_mode
1478 = (enum machine_mode *) alloca (noutputs * sizeof (enum machine_mode));
1479 const char **constraints
1480 = (const char **) alloca ((noutputs + ninputs) * sizeof (const char *));
1481 int old_generating_concat_p = generating_concat_p;
1483 /* An ASM with no outputs needs to be treated as volatile, for now. */
1484 if (noutputs == 0)
1485 vol = 1;
1487 if (! check_operand_nalternatives (outputs, inputs))
1488 return;
1490 if (! check_unique_operand_names (outputs, inputs))
1491 return;
1493 string = resolve_asm_operand_names (string, outputs, inputs);
1495 /* Collect constraints. */
1496 i = 0;
1497 for (t = outputs; t ; t = TREE_CHAIN (t), i++)
1498 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
1499 for (t = inputs; t ; t = TREE_CHAIN (t), i++)
1500 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
1502 #ifdef MD_ASM_CLOBBERS
1503 /* Sometimes we wish to automatically clobber registers across an asm.
1504 Case in point is when the i386 backend moved from cc0 to a hard reg --
1505 maintaining source-level compatibility means automatically clobbering
1506 the flags register. */
1507 MD_ASM_CLOBBERS (clobbers);
1508 #endif
1510 /* Count the number of meaningful clobbered registers, ignoring what
1511 we would ignore later. */
1512 nclobbers = 0;
1513 CLEAR_HARD_REG_SET (clobbered_regs);
1514 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1516 const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1518 i = decode_reg_name (regname);
1519 if (i >= 0 || i == -4)
1520 ++nclobbers;
1521 else if (i == -2)
1522 error ("unknown register name `%s' in `asm'", regname);
1524 /* Mark clobbered registers. */
1525 if (i >= 0)
1527 /* Clobbering the PIC register is an error */
1528 if (i == (int) PIC_OFFSET_TABLE_REGNUM)
1530 error ("PIC register `%s' clobbered in `asm'", regname);
1531 return;
1534 SET_HARD_REG_BIT (clobbered_regs, i);
1538 clear_last_expr ();
1540 /* First pass over inputs and outputs checks validity and sets
1541 mark_addressable if needed. */
1543 ninout = 0;
1544 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1546 tree val = TREE_VALUE (tail);
1547 tree type = TREE_TYPE (val);
1548 const char *constraint;
1549 bool is_inout;
1550 bool allows_reg;
1551 bool allows_mem;
1553 /* If there's an erroneous arg, emit no insn. */
1554 if (type == error_mark_node)
1555 return;
1557 /* Try to parse the output constraint. If that fails, there's
1558 no point in going further. */
1559 constraint = constraints[i];
1560 if (!parse_output_constraint (&constraint, i, ninputs, noutputs,
1561 &allows_mem, &allows_reg, &is_inout))
1562 return;
1564 if (! allows_reg
1565 && (allows_mem
1566 || is_inout
1567 || (DECL_P (val)
1568 && GET_CODE (DECL_RTL (val)) == REG
1569 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type))))
1570 (*lang_hooks.mark_addressable) (val);
1572 if (is_inout)
1573 ninout++;
1576 ninputs += ninout;
1577 if (ninputs + noutputs > MAX_RECOG_OPERANDS)
1579 error ("more than %d operands in `asm'", MAX_RECOG_OPERANDS);
1580 return;
1583 for (i = 0, tail = inputs; tail; i++, tail = TREE_CHAIN (tail))
1585 bool allows_reg, allows_mem;
1586 const char *constraint;
1588 /* If there's an erroneous arg, emit no insn, because the ASM_INPUT
1589 would get VOIDmode and that could cause a crash in reload. */
1590 if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
1591 return;
1593 constraint = constraints[i + noutputs];
1594 if (! parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
1595 constraints, &allows_mem, &allows_reg))
1596 return;
1598 if (! allows_reg && allows_mem)
1599 (*lang_hooks.mark_addressable) (TREE_VALUE (tail));
1602 /* Second pass evaluates arguments. */
1604 ninout = 0;
1605 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1607 tree val = TREE_VALUE (tail);
1608 tree type = TREE_TYPE (val);
1609 bool is_inout;
1610 bool allows_reg;
1611 bool allows_mem;
1612 rtx op;
1614 if (!parse_output_constraint (&constraints[i], i, ninputs,
1615 noutputs, &allows_mem, &allows_reg,
1616 &is_inout))
1617 abort ();
1619 /* If an output operand is not a decl or indirect ref and our constraint
1620 allows a register, make a temporary to act as an intermediate.
1621 Make the asm insn write into that, then our caller will copy it to
1622 the real output operand. Likewise for promoted variables. */
1624 generating_concat_p = 0;
1626 real_output_rtx[i] = NULL_RTX;
1627 if ((TREE_CODE (val) == INDIRECT_REF
1628 && allows_mem)
1629 || (DECL_P (val)
1630 && (allows_mem || GET_CODE (DECL_RTL (val)) == REG)
1631 && ! (GET_CODE (DECL_RTL (val)) == REG
1632 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
1633 || ! allows_reg
1634 || is_inout)
1636 op = expand_expr (val, NULL_RTX, VOIDmode, EXPAND_WRITE);
1637 if (GET_CODE (op) == MEM)
1638 op = validize_mem (op);
1640 if (! allows_reg && GET_CODE (op) != MEM)
1641 error ("output number %d not directly addressable", i);
1642 if ((! allows_mem && GET_CODE (op) == MEM)
1643 || GET_CODE (op) == CONCAT)
1645 real_output_rtx[i] = protect_from_queue (op, 1);
1646 op = gen_reg_rtx (GET_MODE (op));
1647 if (is_inout)
1648 emit_move_insn (op, real_output_rtx[i]);
1651 else
1653 op = assign_temp (type, 0, 0, 1);
1654 op = validize_mem (op);
1655 TREE_VALUE (tail) = make_tree (type, op);
1657 output_rtx[i] = op;
1659 generating_concat_p = old_generating_concat_p;
1661 if (is_inout)
1663 inout_mode[ninout] = TYPE_MODE (type);
1664 inout_opnum[ninout++] = i;
1667 if (decl_conflicts_with_clobbers_p (val, clobbered_regs))
1668 clobber_conflict_found = 1;
1671 /* Make vectors for the expression-rtx, constraint strings,
1672 and named operands. */
1674 argvec = rtvec_alloc (ninputs);
1675 constraintvec = rtvec_alloc (ninputs);
1677 body = gen_rtx_ASM_OPERANDS ((noutputs == 0 ? VOIDmode
1678 : GET_MODE (output_rtx[0])),
1679 TREE_STRING_POINTER (string),
1680 empty_string, 0, argvec, constraintvec,
1681 filename, line);
1683 MEM_VOLATILE_P (body) = vol;
1685 /* Eval the inputs and put them into ARGVEC.
1686 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
1688 for (i = 0, tail = inputs; tail; tail = TREE_CHAIN (tail), ++i)
1690 bool allows_reg, allows_mem;
1691 const char *constraint;
1692 tree val, type;
1693 rtx op;
1695 constraint = constraints[i + noutputs];
1696 if (! parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
1697 constraints, &allows_mem, &allows_reg))
1698 abort ();
1700 generating_concat_p = 0;
1702 val = TREE_VALUE (tail);
1703 type = TREE_TYPE (val);
1704 op = expand_expr (val, NULL_RTX, VOIDmode,
1705 (allows_mem && !allows_reg
1706 ? EXPAND_MEMORY : EXPAND_NORMAL));
1708 /* Never pass a CONCAT to an ASM. */
1709 if (GET_CODE (op) == CONCAT)
1710 op = force_reg (GET_MODE (op), op);
1711 else if (GET_CODE (op) == MEM)
1712 op = validize_mem (op);
1714 if (asm_operand_ok (op, constraint) <= 0)
1716 if (allows_reg)
1717 op = force_reg (TYPE_MODE (type), op);
1718 else if (!allows_mem)
1719 warning ("asm operand %d probably doesn't match constraints",
1720 i + noutputs);
1721 else if (GET_CODE (op) == MEM)
1723 /* We won't recognize either volatile memory or memory
1724 with a queued address as available a memory_operand
1725 at this point. Ignore it: clearly this *is* a memory. */
1727 else
1729 warning ("use of memory input without lvalue in "
1730 "asm operand %d is deprecated", i + noutputs);
1732 if (CONSTANT_P (op))
1734 op = force_const_mem (TYPE_MODE (type), op);
1735 op = validize_mem (op);
1737 else if (GET_CODE (op) == REG
1738 || GET_CODE (op) == SUBREG
1739 || GET_CODE (op) == ADDRESSOF
1740 || GET_CODE (op) == CONCAT)
1742 tree qual_type = build_qualified_type (type,
1743 (TYPE_QUALS (type)
1744 | TYPE_QUAL_CONST));
1745 rtx memloc = assign_temp (qual_type, 1, 1, 1);
1746 memloc = validize_mem (memloc);
1747 emit_move_insn (memloc, op);
1748 op = memloc;
1753 generating_concat_p = old_generating_concat_p;
1754 ASM_OPERANDS_INPUT (body, i) = op;
1756 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, i)
1757 = gen_rtx_ASM_INPUT (TYPE_MODE (type), constraints[i + noutputs]);
1759 if (decl_conflicts_with_clobbers_p (val, clobbered_regs))
1760 clobber_conflict_found = 1;
1763 /* Protect all the operands from the queue now that they have all been
1764 evaluated. */
1766 generating_concat_p = 0;
1768 for (i = 0; i < ninputs - ninout; i++)
1769 ASM_OPERANDS_INPUT (body, i)
1770 = protect_from_queue (ASM_OPERANDS_INPUT (body, i), 0);
1772 for (i = 0; i < noutputs; i++)
1773 output_rtx[i] = protect_from_queue (output_rtx[i], 1);
1775 /* For in-out operands, copy output rtx to input rtx. */
1776 for (i = 0; i < ninout; i++)
1778 int j = inout_opnum[i];
1779 char buffer[16];
1781 ASM_OPERANDS_INPUT (body, ninputs - ninout + i)
1782 = output_rtx[j];
1784 sprintf (buffer, "%d", j);
1785 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, ninputs - ninout + i)
1786 = gen_rtx_ASM_INPUT (inout_mode[i], ggc_strdup (buffer));
1789 generating_concat_p = old_generating_concat_p;
1791 /* Now, for each output, construct an rtx
1792 (set OUTPUT (asm_operands INSN OUTPUTCONSTRAINT OUTPUTNUMBER
1793 ARGVEC CONSTRAINTS OPNAMES))
1794 If there is more than one, put them inside a PARALLEL. */
1796 if (noutputs == 1 && nclobbers == 0)
1798 ASM_OPERANDS_OUTPUT_CONSTRAINT (body) = constraints[0];
1799 emit_insn (gen_rtx_SET (VOIDmode, output_rtx[0], body));
1802 else if (noutputs == 0 && nclobbers == 0)
1804 /* No output operands: put in a raw ASM_OPERANDS rtx. */
1805 emit_insn (body);
1808 else
1810 rtx obody = body;
1811 int num = noutputs;
1813 if (num == 0)
1814 num = 1;
1816 body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
1818 /* For each output operand, store a SET. */
1819 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1821 XVECEXP (body, 0, i)
1822 = gen_rtx_SET (VOIDmode,
1823 output_rtx[i],
1824 gen_rtx_ASM_OPERANDS
1825 (GET_MODE (output_rtx[i]),
1826 TREE_STRING_POINTER (string),
1827 constraints[i], i, argvec, constraintvec,
1828 filename, line));
1830 MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
1833 /* If there are no outputs (but there are some clobbers)
1834 store the bare ASM_OPERANDS into the PARALLEL. */
1836 if (i == 0)
1837 XVECEXP (body, 0, i++) = obody;
1839 /* Store (clobber REG) for each clobbered register specified. */
1841 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1843 const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1844 int j = decode_reg_name (regname);
1845 rtx clobbered_reg;
1847 if (j < 0)
1849 if (j == -3) /* `cc', which is not a register */
1850 continue;
1852 if (j == -4) /* `memory', don't cache memory across asm */
1854 XVECEXP (body, 0, i++)
1855 = gen_rtx_CLOBBER (VOIDmode,
1856 gen_rtx_MEM
1857 (BLKmode,
1858 gen_rtx_SCRATCH (VOIDmode)));
1859 continue;
1862 /* Ignore unknown register, error already signaled. */
1863 continue;
1866 /* Use QImode since that's guaranteed to clobber just one reg. */
1867 clobbered_reg = gen_rtx_REG (QImode, j);
1869 /* Do sanity check for overlap between clobbers and respectively
1870 input and outputs that hasn't been handled. Such overlap
1871 should have been detected and reported above. */
1872 if (!clobber_conflict_found)
1874 int opno;
1876 /* We test the old body (obody) contents to avoid tripping
1877 over the under-construction body. */
1878 for (opno = 0; opno < noutputs; opno++)
1879 if (reg_overlap_mentioned_p (clobbered_reg, output_rtx[opno]))
1880 internal_error ("asm clobber conflict with output operand");
1882 for (opno = 0; opno < ninputs - ninout; opno++)
1883 if (reg_overlap_mentioned_p (clobbered_reg,
1884 ASM_OPERANDS_INPUT (obody, opno)))
1885 internal_error ("asm clobber conflict with input operand");
1888 XVECEXP (body, 0, i++)
1889 = gen_rtx_CLOBBER (VOIDmode, clobbered_reg);
1892 emit_insn (body);
1895 /* For any outputs that needed reloading into registers, spill them
1896 back to where they belong. */
1897 for (i = 0; i < noutputs; ++i)
1898 if (real_output_rtx[i])
1899 emit_move_insn (real_output_rtx[i], output_rtx[i]);
1901 free_temp_slots ();
1904 /* A subroutine of expand_asm_operands. Check that all operands have
1905 the same number of alternatives. Return true if so. */
1907 static bool
1908 check_operand_nalternatives (tree outputs, tree inputs)
1910 if (outputs || inputs)
1912 tree tmp = TREE_PURPOSE (outputs ? outputs : inputs);
1913 int nalternatives
1914 = n_occurrences (',', TREE_STRING_POINTER (TREE_VALUE (tmp)));
1915 tree next = inputs;
1917 if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
1919 error ("too many alternatives in `asm'");
1920 return false;
1923 tmp = outputs;
1924 while (tmp)
1926 const char *constraint
1927 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (tmp)));
1929 if (n_occurrences (',', constraint) != nalternatives)
1931 error ("operand constraints for `asm' differ in number of alternatives");
1932 return false;
1935 if (TREE_CHAIN (tmp))
1936 tmp = TREE_CHAIN (tmp);
1937 else
1938 tmp = next, next = 0;
1942 return true;
1945 /* A subroutine of expand_asm_operands. Check that all operand names
1946 are unique. Return true if so. We rely on the fact that these names
1947 are identifiers, and so have been canonicalized by get_identifier,
1948 so all we need are pointer comparisons. */
1950 static bool
1951 check_unique_operand_names (tree outputs, tree inputs)
1953 tree i, j;
1955 for (i = outputs; i ; i = TREE_CHAIN (i))
1957 tree i_name = TREE_PURPOSE (TREE_PURPOSE (i));
1958 if (! i_name)
1959 continue;
1961 for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
1962 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1963 goto failure;
1966 for (i = inputs; i ; i = TREE_CHAIN (i))
1968 tree i_name = TREE_PURPOSE (TREE_PURPOSE (i));
1969 if (! i_name)
1970 continue;
1972 for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
1973 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1974 goto failure;
1975 for (j = outputs; j ; j = TREE_CHAIN (j))
1976 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1977 goto failure;
1980 return true;
1982 failure:
1983 error ("duplicate asm operand name '%s'",
1984 TREE_STRING_POINTER (TREE_PURPOSE (TREE_PURPOSE (i))));
1985 return false;
1988 /* A subroutine of expand_asm_operands. Resolve the names of the operands
1989 in *POUTPUTS and *PINPUTS to numbers, and replace the name expansions in
1990 STRING and in the constraints to those numbers. */
1992 tree
1993 resolve_asm_operand_names (tree string, tree outputs, tree inputs)
1995 char *buffer;
1996 char *p;
1997 tree t;
1999 /* Substitute [<name>] in input constraint strings. There should be no
2000 named operands in output constraints. */
2001 for (t = inputs; t ; t = TREE_CHAIN (t))
2003 const char *c = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
2004 if (strchr (c, '[') != NULL)
2006 p = buffer = xstrdup (c);
2007 while ((p = strchr (p, '[')) != NULL)
2008 p = resolve_operand_name_1 (p, outputs, inputs);
2009 TREE_VALUE (TREE_PURPOSE (t))
2010 = build_string (strlen (buffer), buffer);
2011 free (buffer);
2015 if (strchr (TREE_STRING_POINTER (string), '[') == NULL)
2016 return string;
2018 /* Assume that we will not need extra space to perform the substitution.
2019 This because we get to remove '[' and ']', which means we cannot have
2020 a problem until we have more than 999 operands. */
2022 p = buffer = xstrdup (TREE_STRING_POINTER (string));
2023 while ((p = strchr (p, '%')) != NULL)
2025 if (p[1] == '[')
2026 p += 1;
2027 else if (ISALPHA (p[1]) && p[2] == '[')
2028 p += 2;
2029 else
2031 p += 1;
2032 continue;
2035 p = resolve_operand_name_1 (p, outputs, inputs);
2038 string = build_string (strlen (buffer), buffer);
2039 free (buffer);
2041 return string;
2044 /* A subroutine of resolve_operand_names. P points to the '[' for a
2045 potential named operand of the form [<name>]. In place, replace
2046 the name and brackets with a number. Return a pointer to the
2047 balance of the string after substitution. */
2049 static char *
2050 resolve_operand_name_1 (char *p, tree outputs, tree inputs)
2052 char *q;
2053 int op;
2054 tree t;
2055 size_t len;
2057 /* Collect the operand name. */
2058 q = strchr (p, ']');
2059 if (!q)
2061 error ("missing close brace for named operand");
2062 return strchr (p, '\0');
2064 len = q - p - 1;
2066 /* Resolve the name to a number. */
2067 for (op = 0, t = outputs; t ; t = TREE_CHAIN (t), op++)
2069 tree name = TREE_PURPOSE (TREE_PURPOSE (t));
2070 if (name)
2072 const char *c = TREE_STRING_POINTER (name);
2073 if (strncmp (c, p + 1, len) == 0 && c[len] == '\0')
2074 goto found;
2077 for (t = inputs; t ; t = TREE_CHAIN (t), op++)
2079 tree name = TREE_PURPOSE (TREE_PURPOSE (t));
2080 if (name)
2082 const char *c = TREE_STRING_POINTER (name);
2083 if (strncmp (c, p + 1, len) == 0 && c[len] == '\0')
2084 goto found;
2088 *q = '\0';
2089 error ("undefined named operand '%s'", p + 1);
2090 op = 0;
2091 found:
2093 /* Replace the name with the number. Unfortunately, not all libraries
2094 get the return value of sprintf correct, so search for the end of the
2095 generated string by hand. */
2096 sprintf (p, "%d", op);
2097 p = strchr (p, '\0');
2099 /* Verify the no extra buffer space assumption. */
2100 if (p > q)
2101 abort ();
2103 /* Shift the rest of the buffer down to fill the gap. */
2104 memmove (p, q + 1, strlen (q + 1) + 1);
2106 return p;
2109 /* Generate RTL to evaluate the expression EXP
2110 and remember it in case this is the VALUE in a ({... VALUE; }) constr.
2111 Provided just for backward-compatibility. expand_expr_stmt_value()
2112 should be used for new code. */
2114 void
2115 expand_expr_stmt (tree exp)
2117 expand_expr_stmt_value (exp, -1, 1);
2120 /* Generate RTL to evaluate the expression EXP. WANT_VALUE tells
2121 whether to (1) save the value of the expression, (0) discard it or
2122 (-1) use expr_stmts_for_value to tell. The use of -1 is
2123 deprecated, and retained only for backward compatibility. */
2125 void
2126 expand_expr_stmt_value (tree exp, int want_value, int maybe_last)
2128 rtx value;
2129 tree type;
2131 if (want_value == -1)
2132 want_value = expr_stmts_for_value != 0;
2134 /* If -Wextra, warn about statements with no side effects,
2135 except for an explicit cast to void (e.g. for assert()), and
2136 except for last statement in ({...}) where they may be useful. */
2137 if (! want_value
2138 && (expr_stmts_for_value == 0 || ! maybe_last)
2139 && exp != error_mark_node)
2141 if (! TREE_SIDE_EFFECTS (exp))
2143 if (warn_unused_value
2144 && !(TREE_CODE (exp) == CONVERT_EXPR
2145 && VOID_TYPE_P (TREE_TYPE (exp))))
2146 warning ("%Hstatement with no effect", &emit_locus);
2148 else if (warn_unused_value)
2149 warn_if_unused_value (exp);
2152 /* If EXP is of function type and we are expanding statements for
2153 value, convert it to pointer-to-function. */
2154 if (want_value && TREE_CODE (TREE_TYPE (exp)) == FUNCTION_TYPE)
2155 exp = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (exp)), exp);
2157 /* The call to `expand_expr' could cause last_expr_type and
2158 last_expr_value to get reset. Therefore, we set last_expr_value
2159 and last_expr_type *after* calling expand_expr. */
2160 value = expand_expr (exp, want_value ? NULL_RTX : const0_rtx,
2161 VOIDmode, 0);
2162 type = TREE_TYPE (exp);
2164 /* If all we do is reference a volatile value in memory,
2165 copy it to a register to be sure it is actually touched. */
2166 if (value && GET_CODE (value) == MEM && TREE_THIS_VOLATILE (exp))
2168 if (TYPE_MODE (type) == VOIDmode)
2170 else if (TYPE_MODE (type) != BLKmode)
2171 value = copy_to_reg (value);
2172 else
2174 rtx lab = gen_label_rtx ();
2176 /* Compare the value with itself to reference it. */
2177 emit_cmp_and_jump_insns (value, value, EQ,
2178 expand_expr (TYPE_SIZE (type),
2179 NULL_RTX, VOIDmode, 0),
2180 BLKmode, 0, lab);
2181 emit_label (lab);
2185 /* If this expression is part of a ({...}) and is in memory, we may have
2186 to preserve temporaries. */
2187 preserve_temp_slots (value);
2189 /* Free any temporaries used to evaluate this expression. Any temporary
2190 used as a result of this expression will already have been preserved
2191 above. */
2192 free_temp_slots ();
2194 if (want_value)
2196 last_expr_value = value;
2197 last_expr_type = type;
2200 emit_queue ();
2203 /* Warn if EXP contains any computations whose results are not used.
2204 Return 1 if a warning is printed; 0 otherwise. */
2207 warn_if_unused_value (tree exp)
2209 if (TREE_USED (exp))
2210 return 0;
2212 /* Don't warn about void constructs. This includes casting to void,
2213 void function calls, and statement expressions with a final cast
2214 to void. */
2215 if (VOID_TYPE_P (TREE_TYPE (exp)))
2216 return 0;
2218 switch (TREE_CODE (exp))
2220 case PREINCREMENT_EXPR:
2221 case POSTINCREMENT_EXPR:
2222 case PREDECREMENT_EXPR:
2223 case POSTDECREMENT_EXPR:
2224 case MODIFY_EXPR:
2225 case INIT_EXPR:
2226 case TARGET_EXPR:
2227 case CALL_EXPR:
2228 case METHOD_CALL_EXPR:
2229 case RTL_EXPR:
2230 case TRY_CATCH_EXPR:
2231 case WITH_CLEANUP_EXPR:
2232 case EXIT_EXPR:
2233 return 0;
2235 case BIND_EXPR:
2236 /* For a binding, warn if no side effect within it. */
2237 return warn_if_unused_value (TREE_OPERAND (exp, 1));
2239 case SAVE_EXPR:
2240 return warn_if_unused_value (TREE_OPERAND (exp, 1));
2242 case TRUTH_ORIF_EXPR:
2243 case TRUTH_ANDIF_EXPR:
2244 /* In && or ||, warn if 2nd operand has no side effect. */
2245 return warn_if_unused_value (TREE_OPERAND (exp, 1));
2247 case COMPOUND_EXPR:
2248 if (TREE_NO_UNUSED_WARNING (exp))
2249 return 0;
2250 if (warn_if_unused_value (TREE_OPERAND (exp, 0)))
2251 return 1;
2252 /* Let people do `(foo (), 0)' without a warning. */
2253 if (TREE_CONSTANT (TREE_OPERAND (exp, 1)))
2254 return 0;
2255 return warn_if_unused_value (TREE_OPERAND (exp, 1));
2257 case NOP_EXPR:
2258 case CONVERT_EXPR:
2259 case NON_LVALUE_EXPR:
2260 /* Don't warn about conversions not explicit in the user's program. */
2261 if (TREE_NO_UNUSED_WARNING (exp))
2262 return 0;
2263 /* Assignment to a cast usually results in a cast of a modify.
2264 Don't complain about that. There can be an arbitrary number of
2265 casts before the modify, so we must loop until we find the first
2266 non-cast expression and then test to see if that is a modify. */
2268 tree tem = TREE_OPERAND (exp, 0);
2270 while (TREE_CODE (tem) == CONVERT_EXPR || TREE_CODE (tem) == NOP_EXPR)
2271 tem = TREE_OPERAND (tem, 0);
2273 if (TREE_CODE (tem) == MODIFY_EXPR || TREE_CODE (tem) == INIT_EXPR
2274 || TREE_CODE (tem) == CALL_EXPR)
2275 return 0;
2277 goto maybe_warn;
2279 case INDIRECT_REF:
2280 /* Don't warn about automatic dereferencing of references, since
2281 the user cannot control it. */
2282 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == REFERENCE_TYPE)
2283 return warn_if_unused_value (TREE_OPERAND (exp, 0));
2284 /* Fall through. */
2286 default:
2287 /* Referencing a volatile value is a side effect, so don't warn. */
2288 if ((DECL_P (exp)
2289 || TREE_CODE_CLASS (TREE_CODE (exp)) == 'r')
2290 && TREE_THIS_VOLATILE (exp))
2291 return 0;
2293 /* If this is an expression which has no operands, there is no value
2294 to be unused. There are no such language-independent codes,
2295 but front ends may define such. */
2296 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'e'
2297 && TREE_CODE_LENGTH (TREE_CODE (exp)) == 0)
2298 return 0;
2300 maybe_warn:
2301 /* If this is an expression with side effects, don't warn. */
2302 if (TREE_SIDE_EFFECTS (exp))
2303 return 0;
2305 warning ("%Hvalue computed is not used", &emit_locus);
2306 return 1;
2310 /* Clear out the memory of the last expression evaluated. */
2312 void
2313 clear_last_expr (void)
2315 last_expr_type = NULL_TREE;
2316 last_expr_value = NULL_RTX;
2319 /* Begin a statement-expression, i.e., a series of statements which
2320 may return a value. Return the RTL_EXPR for this statement expr.
2321 The caller must save that value and pass it to
2322 expand_end_stmt_expr. If HAS_SCOPE is nonzero, temporaries created
2323 in the statement-expression are deallocated at the end of the
2324 expression. */
2326 tree
2327 expand_start_stmt_expr (int has_scope)
2329 tree t;
2331 /* Make the RTL_EXPR node temporary, not momentary,
2332 so that rtl_expr_chain doesn't become garbage. */
2333 t = make_node (RTL_EXPR);
2334 do_pending_stack_adjust ();
2335 if (has_scope)
2336 start_sequence_for_rtl_expr (t);
2337 else
2338 start_sequence ();
2339 NO_DEFER_POP;
2340 expr_stmts_for_value++;
2341 return t;
2344 /* Restore the previous state at the end of a statement that returns a value.
2345 Returns a tree node representing the statement's value and the
2346 insns to compute the value.
2348 The nodes of that expression have been freed by now, so we cannot use them.
2349 But we don't want to do that anyway; the expression has already been
2350 evaluated and now we just want to use the value. So generate a RTL_EXPR
2351 with the proper type and RTL value.
2353 If the last substatement was not an expression,
2354 return something with type `void'. */
2356 tree
2357 expand_end_stmt_expr (tree t)
2359 OK_DEFER_POP;
2361 if (! last_expr_value || ! last_expr_type)
2363 last_expr_value = const0_rtx;
2364 last_expr_type = void_type_node;
2366 else if (GET_CODE (last_expr_value) != REG && ! CONSTANT_P (last_expr_value))
2367 /* Remove any possible QUEUED. */
2368 last_expr_value = protect_from_queue (last_expr_value, 0);
2370 emit_queue ();
2372 TREE_TYPE (t) = last_expr_type;
2373 RTL_EXPR_RTL (t) = last_expr_value;
2374 RTL_EXPR_SEQUENCE (t) = get_insns ();
2376 rtl_expr_chain = tree_cons (NULL_TREE, t, rtl_expr_chain);
2378 end_sequence ();
2380 /* Don't consider deleting this expr or containing exprs at tree level. */
2381 TREE_SIDE_EFFECTS (t) = 1;
2382 /* Propagate volatility of the actual RTL expr. */
2383 TREE_THIS_VOLATILE (t) = volatile_refs_p (last_expr_value);
2385 clear_last_expr ();
2386 expr_stmts_for_value--;
2388 return t;
2391 /* Generate RTL for the start of an if-then. COND is the expression
2392 whose truth should be tested.
2394 If EXITFLAG is nonzero, this conditional is visible to
2395 `exit_something'. */
2397 void
2398 expand_start_cond (tree cond, int exitflag)
2400 struct nesting *thiscond = ALLOC_NESTING ();
2402 /* Make an entry on cond_stack for the cond we are entering. */
2404 thiscond->desc = COND_NESTING;
2405 thiscond->next = cond_stack;
2406 thiscond->all = nesting_stack;
2407 thiscond->depth = ++nesting_depth;
2408 thiscond->data.cond.next_label = gen_label_rtx ();
2409 /* Before we encounter an `else', we don't need a separate exit label
2410 unless there are supposed to be exit statements
2411 to exit this conditional. */
2412 thiscond->exit_label = exitflag ? gen_label_rtx () : 0;
2413 thiscond->data.cond.endif_label = thiscond->exit_label;
2414 cond_stack = thiscond;
2415 nesting_stack = thiscond;
2417 do_jump (cond, thiscond->data.cond.next_label, NULL_RTX);
2420 /* Generate RTL between then-clause and the elseif-clause
2421 of an if-then-elseif-.... */
2423 void
2424 expand_start_elseif (tree cond)
2426 if (cond_stack->data.cond.endif_label == 0)
2427 cond_stack->data.cond.endif_label = gen_label_rtx ();
2428 emit_jump (cond_stack->data.cond.endif_label);
2429 emit_label (cond_stack->data.cond.next_label);
2430 cond_stack->data.cond.next_label = gen_label_rtx ();
2431 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
2434 /* Generate RTL between the then-clause and the else-clause
2435 of an if-then-else. */
2437 void
2438 expand_start_else (void)
2440 if (cond_stack->data.cond.endif_label == 0)
2441 cond_stack->data.cond.endif_label = gen_label_rtx ();
2443 emit_jump (cond_stack->data.cond.endif_label);
2444 emit_label (cond_stack->data.cond.next_label);
2445 cond_stack->data.cond.next_label = 0; /* No more _else or _elseif calls. */
2448 /* After calling expand_start_else, turn this "else" into an "else if"
2449 by providing another condition. */
2451 void
2452 expand_elseif (tree cond)
2454 cond_stack->data.cond.next_label = gen_label_rtx ();
2455 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
2458 /* Generate RTL for the end of an if-then.
2459 Pop the record for it off of cond_stack. */
2461 void
2462 expand_end_cond (void)
2464 struct nesting *thiscond = cond_stack;
2466 do_pending_stack_adjust ();
2467 if (thiscond->data.cond.next_label)
2468 emit_label (thiscond->data.cond.next_label);
2469 if (thiscond->data.cond.endif_label)
2470 emit_label (thiscond->data.cond.endif_label);
2472 POPSTACK (cond_stack);
2473 clear_last_expr ();
2476 /* Generate RTL for the start of a loop. EXIT_FLAG is nonzero if this
2477 loop should be exited by `exit_something'. This is a loop for which
2478 `expand_continue' will jump to the top of the loop.
2480 Make an entry on loop_stack to record the labels associated with
2481 this loop. */
2483 struct nesting *
2484 expand_start_loop (int exit_flag)
2486 struct nesting *thisloop = ALLOC_NESTING ();
2488 /* Make an entry on loop_stack for the loop we are entering. */
2490 thisloop->desc = LOOP_NESTING;
2491 thisloop->next = loop_stack;
2492 thisloop->all = nesting_stack;
2493 thisloop->depth = ++nesting_depth;
2494 thisloop->data.loop.start_label = gen_label_rtx ();
2495 thisloop->data.loop.end_label = gen_label_rtx ();
2496 thisloop->data.loop.continue_label = thisloop->data.loop.start_label;
2497 thisloop->exit_label = exit_flag ? thisloop->data.loop.end_label : 0;
2498 loop_stack = thisloop;
2499 nesting_stack = thisloop;
2501 do_pending_stack_adjust ();
2502 emit_queue ();
2503 emit_note (NOTE_INSN_LOOP_BEG);
2504 emit_label (thisloop->data.loop.start_label);
2506 return thisloop;
2509 /* Like expand_start_loop but for a loop where the continuation point
2510 (for expand_continue_loop) will be specified explicitly. */
2512 struct nesting *
2513 expand_start_loop_continue_elsewhere (int exit_flag)
2515 struct nesting *thisloop = expand_start_loop (exit_flag);
2516 loop_stack->data.loop.continue_label = gen_label_rtx ();
2517 return thisloop;
2520 /* Begin a null, aka do { } while (0) "loop". But since the contents
2521 of said loop can still contain a break, we must frob the loop nest. */
2523 struct nesting *
2524 expand_start_null_loop (void)
2526 struct nesting *thisloop = ALLOC_NESTING ();
2528 /* Make an entry on loop_stack for the loop we are entering. */
2530 thisloop->desc = LOOP_NESTING;
2531 thisloop->next = loop_stack;
2532 thisloop->all = nesting_stack;
2533 thisloop->depth = ++nesting_depth;
2534 thisloop->data.loop.start_label = emit_note (NOTE_INSN_DELETED);
2535 thisloop->data.loop.end_label = gen_label_rtx ();
2536 thisloop->data.loop.continue_label = thisloop->data.loop.end_label;
2537 thisloop->exit_label = thisloop->data.loop.end_label;
2538 loop_stack = thisloop;
2539 nesting_stack = thisloop;
2541 return thisloop;
2544 /* Specify the continuation point for a loop started with
2545 expand_start_loop_continue_elsewhere.
2546 Use this at the point in the code to which a continue statement
2547 should jump. */
2549 void
2550 expand_loop_continue_here (void)
2552 do_pending_stack_adjust ();
2553 emit_note (NOTE_INSN_LOOP_CONT);
2554 emit_label (loop_stack->data.loop.continue_label);
2557 /* Finish a loop. Generate a jump back to the top and the loop-exit label.
2558 Pop the block off of loop_stack. */
2560 void
2561 expand_end_loop (void)
2563 rtx start_label = loop_stack->data.loop.start_label;
2564 rtx etc_note;
2565 int eh_regions, debug_blocks;
2566 bool empty_test;
2568 /* Mark the continue-point at the top of the loop if none elsewhere. */
2569 if (start_label == loop_stack->data.loop.continue_label)
2570 emit_note_before (NOTE_INSN_LOOP_CONT, start_label);
2572 do_pending_stack_adjust ();
2574 /* If the loop starts with a loop exit, roll that to the end where
2575 it will optimize together with the jump back.
2577 If the loop presently looks like this (in pseudo-C):
2579 LOOP_BEG
2580 start_label:
2581 if (test) goto end_label;
2582 LOOP_END_TOP_COND
2583 body;
2584 goto start_label;
2585 end_label:
2587 transform it to look like:
2589 LOOP_BEG
2590 goto start_label;
2591 top_label:
2592 body;
2593 start_label:
2594 if (test) goto end_label;
2595 goto top_label;
2596 end_label:
2598 We rely on the presence of NOTE_INSN_LOOP_END_TOP_COND to mark
2599 the end of the entry conditional. Without this, our lexical scan
2600 can't tell the difference between an entry conditional and a
2601 body conditional that exits the loop. Mistaking the two means
2602 that we can misplace the NOTE_INSN_LOOP_CONT note, which can
2603 screw up loop unrolling.
2605 Things will be oh so much better when loop optimization is done
2606 off of a proper control flow graph... */
2608 /* Scan insns from the top of the loop looking for the END_TOP_COND note. */
2610 empty_test = true;
2611 eh_regions = debug_blocks = 0;
2612 for (etc_note = start_label; etc_note ; etc_note = NEXT_INSN (etc_note))
2613 if (GET_CODE (etc_note) == NOTE)
2615 if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_LOOP_END_TOP_COND)
2616 break;
2618 /* We must not walk into a nested loop. */
2619 else if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_LOOP_BEG)
2621 etc_note = NULL_RTX;
2622 break;
2625 /* At the same time, scan for EH region notes, as we don't want
2626 to scrog region nesting. This shouldn't happen, but... */
2627 else if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_EH_REGION_BEG)
2628 eh_regions++;
2629 else if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_EH_REGION_END)
2631 if (--eh_regions < 0)
2632 /* We've come to the end of an EH region, but never saw the
2633 beginning of that region. That means that an EH region
2634 begins before the top of the loop, and ends in the middle
2635 of it. The existence of such a situation violates a basic
2636 assumption in this code, since that would imply that even
2637 when EH_REGIONS is zero, we might move code out of an
2638 exception region. */
2639 abort ();
2642 /* Likewise for debug scopes. In this case we'll either (1) move
2643 all of the notes if they are properly nested or (2) leave the
2644 notes alone and only rotate the loop at high optimization
2645 levels when we expect to scrog debug info. */
2646 else if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_BLOCK_BEG)
2647 debug_blocks++;
2648 else if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_BLOCK_END)
2649 debug_blocks--;
2651 else if (INSN_P (etc_note))
2652 empty_test = false;
2654 if (etc_note
2655 && optimize
2656 && ! empty_test
2657 && eh_regions == 0
2658 && (debug_blocks == 0 || optimize >= 2)
2659 && NEXT_INSN (etc_note) != NULL_RTX
2660 && ! any_condjump_p (get_last_insn ()))
2662 /* We found one. Move everything from START to ETC to the end
2663 of the loop, and add a jump from the top of the loop. */
2664 rtx top_label = gen_label_rtx ();
2665 rtx start_move = start_label;
2667 /* If the start label is preceded by a NOTE_INSN_LOOP_CONT note,
2668 then we want to move this note also. */
2669 if (GET_CODE (PREV_INSN (start_move)) == NOTE
2670 && NOTE_LINE_NUMBER (PREV_INSN (start_move)) == NOTE_INSN_LOOP_CONT)
2671 start_move = PREV_INSN (start_move);
2673 emit_label_before (top_label, start_move);
2675 /* Actually move the insns. If the debug scopes are nested, we
2676 can move everything at once. Otherwise we have to move them
2677 one by one and squeeze out the block notes. */
2678 if (debug_blocks == 0)
2679 reorder_insns (start_move, etc_note, get_last_insn ());
2680 else
2682 rtx insn, next_insn;
2683 for (insn = start_move; insn; insn = next_insn)
2685 /* Figure out which insn comes after this one. We have
2686 to do this before we move INSN. */
2687 next_insn = (insn == etc_note ? NULL : NEXT_INSN (insn));
2689 if (GET_CODE (insn) == NOTE
2690 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2691 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
2692 continue;
2694 reorder_insns (insn, insn, get_last_insn ());
2698 /* Add the jump from the top of the loop. */
2699 emit_jump_insn_before (gen_jump (start_label), top_label);
2700 emit_barrier_before (top_label);
2701 start_label = top_label;
2704 emit_jump (start_label);
2705 emit_note (NOTE_INSN_LOOP_END);
2706 emit_label (loop_stack->data.loop.end_label);
2708 POPSTACK (loop_stack);
2710 clear_last_expr ();
2713 /* Finish a null loop, aka do { } while (0). */
2715 void
2716 expand_end_null_loop (void)
2718 do_pending_stack_adjust ();
2719 emit_label (loop_stack->data.loop.end_label);
2721 POPSTACK (loop_stack);
2723 clear_last_expr ();
2726 /* Generate a jump to the current loop's continue-point.
2727 This is usually the top of the loop, but may be specified
2728 explicitly elsewhere. If not currently inside a loop,
2729 return 0 and do nothing; caller will print an error message. */
2732 expand_continue_loop (struct nesting *whichloop)
2734 /* Emit information for branch prediction. */
2735 rtx note;
2737 if (flag_guess_branch_prob)
2739 note = emit_note (NOTE_INSN_PREDICTION);
2740 NOTE_PREDICTION (note) = NOTE_PREDICT (PRED_CONTINUE, IS_TAKEN);
2742 clear_last_expr ();
2743 if (whichloop == 0)
2744 whichloop = loop_stack;
2745 if (whichloop == 0)
2746 return 0;
2747 expand_goto_internal (NULL_TREE, whichloop->data.loop.continue_label,
2748 NULL_RTX);
2749 return 1;
2752 /* Generate a jump to exit the current loop. If not currently inside a loop,
2753 return 0 and do nothing; caller will print an error message. */
2756 expand_exit_loop (struct nesting *whichloop)
2758 clear_last_expr ();
2759 if (whichloop == 0)
2760 whichloop = loop_stack;
2761 if (whichloop == 0)
2762 return 0;
2763 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label, NULL_RTX);
2764 return 1;
2767 /* Generate a conditional jump to exit the current loop if COND
2768 evaluates to zero. If not currently inside a loop,
2769 return 0 and do nothing; caller will print an error message. */
2772 expand_exit_loop_if_false (struct nesting *whichloop, tree cond)
2774 rtx label;
2775 clear_last_expr ();
2777 if (whichloop == 0)
2778 whichloop = loop_stack;
2779 if (whichloop == 0)
2780 return 0;
2782 if (integer_nonzerop (cond))
2783 return 1;
2784 if (integer_zerop (cond))
2785 return expand_exit_loop (whichloop);
2787 /* Check if we definitely won't need a fixup. */
2788 if (whichloop == nesting_stack)
2790 jumpifnot (cond, whichloop->data.loop.end_label);
2791 return 1;
2794 /* In order to handle fixups, we actually create a conditional jump
2795 around an unconditional branch to exit the loop. If fixups are
2796 necessary, they go before the unconditional branch. */
2798 label = gen_label_rtx ();
2799 jumpif (cond, label);
2800 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label,
2801 NULL_RTX);
2802 emit_label (label);
2804 return 1;
2807 /* Like expand_exit_loop_if_false except also emit a note marking
2808 the end of the conditional. Should only be used immediately
2809 after expand_loop_start. */
2812 expand_exit_loop_top_cond (struct nesting *whichloop, tree cond)
2814 if (! expand_exit_loop_if_false (whichloop, cond))
2815 return 0;
2817 emit_note (NOTE_INSN_LOOP_END_TOP_COND);
2818 return 1;
2821 /* Return nonzero if we should preserve sub-expressions as separate
2822 pseudos. We never do so if we aren't optimizing. We always do so
2823 if -fexpensive-optimizations.
2825 Otherwise, we only do so if we are in the "early" part of a loop. I.e.,
2826 the loop may still be a small one. */
2829 preserve_subexpressions_p (void)
2831 rtx insn;
2833 if (flag_expensive_optimizations)
2834 return 1;
2836 if (optimize == 0 || cfun == 0 || cfun->stmt == 0 || loop_stack == 0)
2837 return 0;
2839 insn = get_last_insn_anywhere ();
2841 return (insn
2842 && (INSN_UID (insn) - INSN_UID (loop_stack->data.loop.start_label)
2843 < n_non_fixed_regs * 3));
2847 /* Generate a jump to exit the current loop, conditional, binding contour
2848 or case statement. Not all such constructs are visible to this function,
2849 only those started with EXIT_FLAG nonzero. Individual languages use
2850 the EXIT_FLAG parameter to control which kinds of constructs you can
2851 exit this way.
2853 If not currently inside anything that can be exited,
2854 return 0 and do nothing; caller will print an error message. */
2857 expand_exit_something (void)
2859 struct nesting *n;
2860 clear_last_expr ();
2861 for (n = nesting_stack; n; n = n->all)
2862 if (n->exit_label != 0)
2864 expand_goto_internal (NULL_TREE, n->exit_label, NULL_RTX);
2865 return 1;
2868 return 0;
2871 /* Generate RTL to return from the current function, with no value.
2872 (That is, we do not do anything about returning any value.) */
2874 void
2875 expand_null_return (void)
2877 rtx last_insn;
2879 last_insn = get_last_insn ();
2881 /* If this function was declared to return a value, but we
2882 didn't, clobber the return registers so that they are not
2883 propagated live to the rest of the function. */
2884 clobber_return_register ();
2886 expand_null_return_1 (last_insn);
2889 /* Try to guess whether the value of return means error code. */
2890 static enum br_predictor
2891 return_prediction (rtx val)
2893 /* Different heuristics for pointers and scalars. */
2894 if (POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (current_function_decl))))
2896 /* NULL is usually not returned. */
2897 if (val == const0_rtx)
2898 return PRED_NULL_RETURN;
2900 else
2902 /* Negative return values are often used to indicate
2903 errors. */
2904 if (GET_CODE (val) == CONST_INT
2905 && INTVAL (val) < 0)
2906 return PRED_NEGATIVE_RETURN;
2907 /* Constant return values are also usually erors,
2908 zero/one often mean booleans so exclude them from the
2909 heuristics. */
2910 if (CONSTANT_P (val)
2911 && (val != const0_rtx && val != const1_rtx))
2912 return PRED_CONST_RETURN;
2914 return PRED_NO_PREDICTION;
2917 /* Generate RTL to return from the current function, with value VAL. */
2919 static void
2920 expand_value_return (rtx val)
2922 rtx last_insn;
2923 rtx return_reg;
2924 enum br_predictor pred;
2926 if (flag_guess_branch_prob
2927 && (pred = return_prediction (val)) != PRED_NO_PREDICTION)
2929 /* Emit information for branch prediction. */
2930 rtx note;
2932 note = emit_note (NOTE_INSN_PREDICTION);
2934 NOTE_PREDICTION (note) = NOTE_PREDICT (pred, NOT_TAKEN);
2938 last_insn = get_last_insn ();
2939 return_reg = DECL_RTL (DECL_RESULT (current_function_decl));
2941 /* Copy the value to the return location
2942 unless it's already there. */
2944 if (return_reg != val)
2946 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
2947 #ifdef PROMOTE_FUNCTION_RETURN
2948 int unsignedp = TREE_UNSIGNED (type);
2949 enum machine_mode old_mode
2950 = DECL_MODE (DECL_RESULT (current_function_decl));
2951 enum machine_mode mode
2952 = promote_mode (type, old_mode, &unsignedp, 1);
2954 if (mode != old_mode)
2955 val = convert_modes (mode, old_mode, val, unsignedp);
2956 #endif
2957 if (GET_CODE (return_reg) == PARALLEL)
2958 emit_group_load (return_reg, val, int_size_in_bytes (type));
2959 else
2960 emit_move_insn (return_reg, val);
2963 expand_null_return_1 (last_insn);
2966 /* Output a return with no value. If LAST_INSN is nonzero,
2967 pretend that the return takes place after LAST_INSN. */
2969 static void
2970 expand_null_return_1 (rtx last_insn)
2972 rtx end_label = cleanup_label ? cleanup_label : return_label;
2974 clear_pending_stack_adjust ();
2975 do_pending_stack_adjust ();
2976 clear_last_expr ();
2978 if (end_label == 0)
2979 end_label = return_label = gen_label_rtx ();
2980 expand_goto_internal (NULL_TREE, end_label, last_insn);
2983 /* Generate RTL to evaluate the expression RETVAL and return it
2984 from the current function. */
2986 void
2987 expand_return (tree retval)
2989 /* If there are any cleanups to be performed, then they will
2990 be inserted following LAST_INSN. It is desirable
2991 that the last_insn, for such purposes, should be the
2992 last insn before computing the return value. Otherwise, cleanups
2993 which call functions can clobber the return value. */
2994 /* ??? rms: I think that is erroneous, because in C++ it would
2995 run destructors on variables that might be used in the subsequent
2996 computation of the return value. */
2997 rtx last_insn = 0;
2998 rtx result_rtl;
2999 rtx val = 0;
3000 tree retval_rhs;
3002 /* If function wants no value, give it none. */
3003 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
3005 expand_expr (retval, NULL_RTX, VOIDmode, 0);
3006 emit_queue ();
3007 expand_null_return ();
3008 return;
3011 if (retval == error_mark_node)
3013 /* Treat this like a return of no value from a function that
3014 returns a value. */
3015 expand_null_return ();
3016 return;
3018 else if (TREE_CODE (retval) == RESULT_DECL)
3019 retval_rhs = retval;
3020 else if ((TREE_CODE (retval) == MODIFY_EXPR || TREE_CODE (retval) == INIT_EXPR)
3021 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
3022 retval_rhs = TREE_OPERAND (retval, 1);
3023 else if (VOID_TYPE_P (TREE_TYPE (retval)))
3024 /* Recognize tail-recursive call to void function. */
3025 retval_rhs = retval;
3026 else
3027 retval_rhs = NULL_TREE;
3029 last_insn = get_last_insn ();
3031 /* Distribute return down conditional expr if either of the sides
3032 may involve tail recursion (see test below). This enhances the number
3033 of tail recursions we see. Don't do this always since it can produce
3034 sub-optimal code in some cases and we distribute assignments into
3035 conditional expressions when it would help. */
3037 if (optimize && retval_rhs != 0
3038 && frame_offset == 0
3039 && TREE_CODE (retval_rhs) == COND_EXPR
3040 && (TREE_CODE (TREE_OPERAND (retval_rhs, 1)) == CALL_EXPR
3041 || TREE_CODE (TREE_OPERAND (retval_rhs, 2)) == CALL_EXPR))
3043 rtx label = gen_label_rtx ();
3044 tree expr;
3046 do_jump (TREE_OPERAND (retval_rhs, 0), label, NULL_RTX);
3047 start_cleanup_deferral ();
3048 expr = build (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (current_function_decl)),
3049 DECL_RESULT (current_function_decl),
3050 TREE_OPERAND (retval_rhs, 1));
3051 TREE_SIDE_EFFECTS (expr) = 1;
3052 expand_return (expr);
3053 emit_label (label);
3055 expr = build (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (current_function_decl)),
3056 DECL_RESULT (current_function_decl),
3057 TREE_OPERAND (retval_rhs, 2));
3058 TREE_SIDE_EFFECTS (expr) = 1;
3059 expand_return (expr);
3060 end_cleanup_deferral ();
3061 return;
3064 result_rtl = DECL_RTL (DECL_RESULT (current_function_decl));
3066 /* If the result is an aggregate that is being returned in one (or more)
3067 registers, load the registers here. The compiler currently can't handle
3068 copying a BLKmode value into registers. We could put this code in a
3069 more general area (for use by everyone instead of just function
3070 call/return), but until this feature is generally usable it is kept here
3071 (and in expand_call). The value must go into a pseudo in case there
3072 are cleanups that will clobber the real return register. */
3074 if (retval_rhs != 0
3075 && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
3076 && GET_CODE (result_rtl) == REG)
3078 int i;
3079 unsigned HOST_WIDE_INT bitpos, xbitpos;
3080 unsigned HOST_WIDE_INT big_endian_correction = 0;
3081 unsigned HOST_WIDE_INT bytes
3082 = int_size_in_bytes (TREE_TYPE (retval_rhs));
3083 int n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
3084 unsigned int bitsize
3085 = MIN (TYPE_ALIGN (TREE_TYPE (retval_rhs)), BITS_PER_WORD);
3086 rtx *result_pseudos = (rtx *) alloca (sizeof (rtx) * n_regs);
3087 rtx result_reg, src = NULL_RTX, dst = NULL_RTX;
3088 rtx result_val = expand_expr (retval_rhs, NULL_RTX, VOIDmode, 0);
3089 enum machine_mode tmpmode, result_reg_mode;
3091 if (bytes == 0)
3093 expand_null_return ();
3094 return;
3097 /* Structures whose size is not a multiple of a word are aligned
3098 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
3099 machine, this means we must skip the empty high order bytes when
3100 calculating the bit offset. */
3101 if (BYTES_BIG_ENDIAN
3102 && bytes % UNITS_PER_WORD)
3103 big_endian_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
3104 * BITS_PER_UNIT));
3106 /* Copy the structure BITSIZE bits at a time. */
3107 for (bitpos = 0, xbitpos = big_endian_correction;
3108 bitpos < bytes * BITS_PER_UNIT;
3109 bitpos += bitsize, xbitpos += bitsize)
3111 /* We need a new destination pseudo each time xbitpos is
3112 on a word boundary and when xbitpos == big_endian_correction
3113 (the first time through). */
3114 if (xbitpos % BITS_PER_WORD == 0
3115 || xbitpos == big_endian_correction)
3117 /* Generate an appropriate register. */
3118 dst = gen_reg_rtx (word_mode);
3119 result_pseudos[xbitpos / BITS_PER_WORD] = dst;
3121 /* Clear the destination before we move anything into it. */
3122 emit_move_insn (dst, CONST0_RTX (GET_MODE (dst)));
3125 /* We need a new source operand each time bitpos is on a word
3126 boundary. */
3127 if (bitpos % BITS_PER_WORD == 0)
3128 src = operand_subword_force (result_val,
3129 bitpos / BITS_PER_WORD,
3130 BLKmode);
3132 /* Use bitpos for the source extraction (left justified) and
3133 xbitpos for the destination store (right justified). */
3134 store_bit_field (dst, bitsize, xbitpos % BITS_PER_WORD, word_mode,
3135 extract_bit_field (src, bitsize,
3136 bitpos % BITS_PER_WORD, 1,
3137 NULL_RTX, word_mode, word_mode,
3138 BITS_PER_WORD),
3139 BITS_PER_WORD);
3142 /* Find the smallest integer mode large enough to hold the
3143 entire structure and use that mode instead of BLKmode
3144 on the USE insn for the return register. */
3145 for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3146 tmpmode != VOIDmode;
3147 tmpmode = GET_MODE_WIDER_MODE (tmpmode))
3148 /* Have we found a large enough mode? */
3149 if (GET_MODE_SIZE (tmpmode) >= bytes)
3150 break;
3152 /* No suitable mode found. */
3153 if (tmpmode == VOIDmode)
3154 abort ();
3156 PUT_MODE (result_rtl, tmpmode);
3158 if (GET_MODE_SIZE (tmpmode) < GET_MODE_SIZE (word_mode))
3159 result_reg_mode = word_mode;
3160 else
3161 result_reg_mode = tmpmode;
3162 result_reg = gen_reg_rtx (result_reg_mode);
3164 emit_queue ();
3165 for (i = 0; i < n_regs; i++)
3166 emit_move_insn (operand_subword (result_reg, i, 0, result_reg_mode),
3167 result_pseudos[i]);
3169 if (tmpmode != result_reg_mode)
3170 result_reg = gen_lowpart (tmpmode, result_reg);
3172 expand_value_return (result_reg);
3174 else if (retval_rhs != 0
3175 && !VOID_TYPE_P (TREE_TYPE (retval_rhs))
3176 && (GET_CODE (result_rtl) == REG
3177 || (GET_CODE (result_rtl) == PARALLEL)))
3179 /* Calculate the return value into a temporary (usually a pseudo
3180 reg). */
3181 tree ot = TREE_TYPE (DECL_RESULT (current_function_decl));
3182 tree nt = build_qualified_type (ot, TYPE_QUALS (ot) | TYPE_QUAL_CONST);
3184 val = assign_temp (nt, 0, 0, 1);
3185 val = expand_expr (retval_rhs, val, GET_MODE (val), 0);
3186 val = force_not_mem (val);
3187 emit_queue ();
3188 /* Return the calculated value, doing cleanups first. */
3189 expand_value_return (val);
3191 else
3193 /* No cleanups or no hard reg used;
3194 calculate value into hard return reg. */
3195 expand_expr (retval, const0_rtx, VOIDmode, 0);
3196 emit_queue ();
3197 expand_value_return (result_rtl);
3201 /* Attempt to optimize a potential tail recursion call into a goto.
3202 ARGUMENTS are the arguments to a CALL_EXPR; LAST_INSN indicates
3203 where to place the jump to the tail recursion label.
3205 Return TRUE if the call was optimized into a goto. */
3208 optimize_tail_recursion (tree arguments, rtx last_insn)
3210 /* Finish checking validity, and if valid emit code to set the
3211 argument variables for the new call. */
3212 if (tail_recursion_args (arguments, DECL_ARGUMENTS (current_function_decl)))
3214 if (tail_recursion_label == 0)
3216 tail_recursion_label = gen_label_rtx ();
3217 emit_label_after (tail_recursion_label,
3218 tail_recursion_reentry);
3220 emit_queue ();
3221 expand_goto_internal (NULL_TREE, tail_recursion_label, last_insn);
3222 emit_barrier ();
3223 return 1;
3225 return 0;
3228 /* Emit code to alter this function's formal parms for a tail-recursive call.
3229 ACTUALS is a list of actual parameter expressions (chain of TREE_LISTs).
3230 FORMALS is the chain of decls of formals.
3231 Return 1 if this can be done;
3232 otherwise return 0 and do not emit any code. */
3234 static int
3235 tail_recursion_args (tree actuals, tree formals)
3237 tree a = actuals, f = formals;
3238 int i;
3239 rtx *argvec;
3241 /* Check that number and types of actuals are compatible
3242 with the formals. This is not always true in valid C code.
3243 Also check that no formal needs to be addressable
3244 and that all formals are scalars. */
3246 /* Also count the args. */
3248 for (a = actuals, f = formals, i = 0; a && f; a = TREE_CHAIN (a), f = TREE_CHAIN (f), i++)
3250 if (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_VALUE (a)))
3251 != TYPE_MAIN_VARIANT (TREE_TYPE (f)))
3252 return 0;
3253 if (GET_CODE (DECL_RTL (f)) != REG || DECL_MODE (f) == BLKmode)
3254 return 0;
3256 if (a != 0 || f != 0)
3257 return 0;
3259 /* Compute all the actuals. */
3261 argvec = (rtx *) alloca (i * sizeof (rtx));
3263 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
3264 argvec[i] = expand_expr (TREE_VALUE (a), NULL_RTX, VOIDmode, 0);
3266 /* Find which actual values refer to current values of previous formals.
3267 Copy each of them now, before any formal is changed. */
3269 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
3271 int copy = 0;
3272 int j;
3273 for (f = formals, j = 0; j < i; f = TREE_CHAIN (f), j++)
3274 if (reg_mentioned_p (DECL_RTL (f), argvec[i]))
3276 copy = 1;
3277 break;
3279 if (copy)
3280 argvec[i] = copy_to_reg (argvec[i]);
3283 /* Store the values of the actuals into the formals. */
3285 for (f = formals, a = actuals, i = 0; f;
3286 f = TREE_CHAIN (f), a = TREE_CHAIN (a), i++)
3288 if (GET_MODE (DECL_RTL (f)) == GET_MODE (argvec[i]))
3289 emit_move_insn (DECL_RTL (f), argvec[i]);
3290 else
3292 rtx tmp = argvec[i];
3293 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_VALUE (a)));
3294 promote_mode(TREE_TYPE (TREE_VALUE (a)), GET_MODE (tmp),
3295 &unsignedp, 0);
3296 if (DECL_MODE (f) != GET_MODE (DECL_RTL (f)))
3298 tmp = gen_reg_rtx (DECL_MODE (f));
3299 convert_move (tmp, argvec[i], unsignedp);
3301 convert_move (DECL_RTL (f), tmp, unsignedp);
3305 free_temp_slots ();
3306 return 1;
3309 /* Generate the RTL code for entering a binding contour.
3310 The variables are declared one by one, by calls to `expand_decl'.
3312 FLAGS is a bitwise or of the following flags:
3314 1 - Nonzero if this construct should be visible to
3315 `exit_something'.
3317 2 - Nonzero if this contour does not require a
3318 NOTE_INSN_BLOCK_BEG note. Virtually all calls from
3319 language-independent code should set this flag because they
3320 will not create corresponding BLOCK nodes. (There should be
3321 a one-to-one correspondence between NOTE_INSN_BLOCK_BEG notes
3322 and BLOCKs.) If this flag is set, MARK_ENDS should be zero
3323 when expand_end_bindings is called.
3325 If we are creating a NOTE_INSN_BLOCK_BEG note, a BLOCK may
3326 optionally be supplied. If so, it becomes the NOTE_BLOCK for the
3327 note. */
3329 void
3330 expand_start_bindings_and_block (int flags, tree block)
3332 struct nesting *thisblock = ALLOC_NESTING ();
3333 rtx note;
3334 int exit_flag = ((flags & 1) != 0);
3335 int block_flag = ((flags & 2) == 0);
3337 /* If a BLOCK is supplied, then the caller should be requesting a
3338 NOTE_INSN_BLOCK_BEG note. */
3339 if (!block_flag && block)
3340 abort ();
3342 /* Create a note to mark the beginning of the block. */
3343 if (block_flag)
3345 note = emit_note (NOTE_INSN_BLOCK_BEG);
3346 NOTE_BLOCK (note) = block;
3348 else
3349 note = emit_note (NOTE_INSN_DELETED);
3351 /* Make an entry on block_stack for the block we are entering. */
3353 thisblock->desc = BLOCK_NESTING;
3354 thisblock->next = block_stack;
3355 thisblock->all = nesting_stack;
3356 thisblock->depth = ++nesting_depth;
3357 thisblock->data.block.stack_level = 0;
3358 thisblock->data.block.cleanups = 0;
3359 thisblock->data.block.exception_region = 0;
3360 thisblock->data.block.block_target_temp_slot_level = target_temp_slot_level;
3362 thisblock->data.block.conditional_code = 0;
3363 thisblock->data.block.last_unconditional_cleanup = note;
3364 /* When we insert instructions after the last unconditional cleanup,
3365 we don't adjust last_insn. That means that a later add_insn will
3366 clobber the instructions we've just added. The easiest way to
3367 fix this is to just insert another instruction here, so that the
3368 instructions inserted after the last unconditional cleanup are
3369 never the last instruction. */
3370 emit_note (NOTE_INSN_DELETED);
3372 if (block_stack
3373 && !(block_stack->data.block.cleanups == NULL_TREE
3374 && block_stack->data.block.outer_cleanups == NULL_TREE))
3375 thisblock->data.block.outer_cleanups
3376 = tree_cons (NULL_TREE, block_stack->data.block.cleanups,
3377 block_stack->data.block.outer_cleanups);
3378 else
3379 thisblock->data.block.outer_cleanups = 0;
3380 thisblock->data.block.label_chain = 0;
3381 thisblock->data.block.innermost_stack_block = stack_block_stack;
3382 thisblock->data.block.first_insn = note;
3383 thisblock->data.block.block_start_count = ++current_block_start_count;
3384 thisblock->exit_label = exit_flag ? gen_label_rtx () : 0;
3385 block_stack = thisblock;
3386 nesting_stack = thisblock;
3388 /* Make a new level for allocating stack slots. */
3389 push_temp_slots ();
3392 /* Specify the scope of temporaries created by TARGET_EXPRs. Similar
3393 to CLEANUP_POINT_EXPR, but handles cases when a series of calls to
3394 expand_expr are made. After we end the region, we know that all
3395 space for all temporaries that were created by TARGET_EXPRs will be
3396 destroyed and their space freed for reuse. */
3398 void
3399 expand_start_target_temps (void)
3401 /* This is so that even if the result is preserved, the space
3402 allocated will be freed, as we know that it is no longer in use. */
3403 push_temp_slots ();
3405 /* Start a new binding layer that will keep track of all cleanup
3406 actions to be performed. */
3407 expand_start_bindings (2);
3409 target_temp_slot_level = temp_slot_level;
3412 void
3413 expand_end_target_temps (void)
3415 expand_end_bindings (NULL_TREE, 0, 0);
3417 /* This is so that even if the result is preserved, the space
3418 allocated will be freed, as we know that it is no longer in use. */
3419 pop_temp_slots ();
3422 /* Given a pointer to a BLOCK node return nonzero if (and only if) the node
3423 in question represents the outermost pair of curly braces (i.e. the "body
3424 block") of a function or method.
3426 For any BLOCK node representing a "body block" of a function or method, the
3427 BLOCK_SUPERCONTEXT of the node will point to another BLOCK node which
3428 represents the outermost (function) scope for the function or method (i.e.
3429 the one which includes the formal parameters). The BLOCK_SUPERCONTEXT of
3430 *that* node in turn will point to the relevant FUNCTION_DECL node. */
3433 is_body_block (tree stmt)
3435 if (lang_hooks.no_body_blocks)
3436 return 0;
3438 if (TREE_CODE (stmt) == BLOCK)
3440 tree parent = BLOCK_SUPERCONTEXT (stmt);
3442 if (parent && TREE_CODE (parent) == BLOCK)
3444 tree grandparent = BLOCK_SUPERCONTEXT (parent);
3446 if (grandparent && TREE_CODE (grandparent) == FUNCTION_DECL)
3447 return 1;
3451 return 0;
3454 /* True if we are currently emitting insns in an area of output code
3455 that is controlled by a conditional expression. This is used by
3456 the cleanup handling code to generate conditional cleanup actions. */
3459 conditional_context (void)
3461 return block_stack && block_stack->data.block.conditional_code;
3464 /* Return an opaque pointer to the current nesting level, so frontend code
3465 can check its own sanity. */
3467 struct nesting *
3468 current_nesting_level (void)
3470 return cfun ? block_stack : 0;
3473 /* Emit a handler label for a nonlocal goto handler.
3474 Also emit code to store the handler label in SLOT before BEFORE_INSN. */
3476 static rtx
3477 expand_nl_handler_label (rtx slot, rtx before_insn)
3479 rtx insns;
3480 rtx handler_label = gen_label_rtx ();
3482 /* Don't let cleanup_cfg delete the handler. */
3483 LABEL_PRESERVE_P (handler_label) = 1;
3485 start_sequence ();
3486 emit_move_insn (slot, gen_rtx_LABEL_REF (Pmode, handler_label));
3487 insns = get_insns ();
3488 end_sequence ();
3489 emit_insn_before (insns, before_insn);
3491 emit_label (handler_label);
3493 return handler_label;
3496 /* Emit code to restore vital registers at the beginning of a nonlocal goto
3497 handler. */
3498 static void
3499 expand_nl_goto_receiver (void)
3501 #ifdef HAVE_nonlocal_goto
3502 if (! HAVE_nonlocal_goto)
3503 #endif
3504 /* First adjust our frame pointer to its actual value. It was
3505 previously set to the start of the virtual area corresponding to
3506 the stacked variables when we branched here and now needs to be
3507 adjusted to the actual hardware fp value.
3509 Assignments are to virtual registers are converted by
3510 instantiate_virtual_regs into the corresponding assignment
3511 to the underlying register (fp in this case) that makes
3512 the original assignment true.
3513 So the following insn will actually be
3514 decrementing fp by STARTING_FRAME_OFFSET. */
3515 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
3517 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3518 if (fixed_regs[ARG_POINTER_REGNUM])
3520 #ifdef ELIMINABLE_REGS
3521 /* If the argument pointer can be eliminated in favor of the
3522 frame pointer, we don't need to restore it. We assume here
3523 that if such an elimination is present, it can always be used.
3524 This is the case on all known machines; if we don't make this
3525 assumption, we do unnecessary saving on many machines. */
3526 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
3527 size_t i;
3529 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
3530 if (elim_regs[i].from == ARG_POINTER_REGNUM
3531 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
3532 break;
3534 if (i == ARRAY_SIZE (elim_regs))
3535 #endif
3537 /* Now restore our arg pointer from the address at which it
3538 was saved in our stack frame. */
3539 emit_move_insn (virtual_incoming_args_rtx,
3540 copy_to_reg (get_arg_pointer_save_area (cfun)));
3543 #endif
3545 #ifdef HAVE_nonlocal_goto_receiver
3546 if (HAVE_nonlocal_goto_receiver)
3547 emit_insn (gen_nonlocal_goto_receiver ());
3548 #endif
3551 /* Make handlers for nonlocal gotos taking place in the function calls in
3552 block THISBLOCK. */
3554 static void
3555 expand_nl_goto_receivers (struct nesting *thisblock)
3557 tree link;
3558 rtx afterward = gen_label_rtx ();
3559 rtx insns, slot;
3560 rtx label_list;
3561 int any_invalid;
3563 /* Record the handler address in the stack slot for that purpose,
3564 during this block, saving and restoring the outer value. */
3565 if (thisblock->next != 0)
3566 for (slot = nonlocal_goto_handler_slots; slot; slot = XEXP (slot, 1))
3568 rtx save_receiver = gen_reg_rtx (Pmode);
3569 emit_move_insn (XEXP (slot, 0), save_receiver);
3571 start_sequence ();
3572 emit_move_insn (save_receiver, XEXP (slot, 0));
3573 insns = get_insns ();
3574 end_sequence ();
3575 emit_insn_before (insns, thisblock->data.block.first_insn);
3578 /* Jump around the handlers; they run only when specially invoked. */
3579 emit_jump (afterward);
3581 /* Make a separate handler for each label. */
3582 link = nonlocal_labels;
3583 slot = nonlocal_goto_handler_slots;
3584 label_list = NULL_RTX;
3585 for (; link; link = TREE_CHAIN (link), slot = XEXP (slot, 1))
3586 /* Skip any labels we shouldn't be able to jump to from here,
3587 we generate one special handler for all of them below which just calls
3588 abort. */
3589 if (! DECL_TOO_LATE (TREE_VALUE (link)))
3591 rtx lab;
3592 lab = expand_nl_handler_label (XEXP (slot, 0),
3593 thisblock->data.block.first_insn);
3594 label_list = gen_rtx_EXPR_LIST (VOIDmode, lab, label_list);
3596 expand_nl_goto_receiver ();
3598 /* Jump to the "real" nonlocal label. */
3599 expand_goto (TREE_VALUE (link));
3602 /* A second pass over all nonlocal labels; this time we handle those
3603 we should not be able to jump to at this point. */
3604 link = nonlocal_labels;
3605 slot = nonlocal_goto_handler_slots;
3606 any_invalid = 0;
3607 for (; link; link = TREE_CHAIN (link), slot = XEXP (slot, 1))
3608 if (DECL_TOO_LATE (TREE_VALUE (link)))
3610 rtx lab;
3611 lab = expand_nl_handler_label (XEXP (slot, 0),
3612 thisblock->data.block.first_insn);
3613 label_list = gen_rtx_EXPR_LIST (VOIDmode, lab, label_list);
3614 any_invalid = 1;
3617 if (any_invalid)
3619 expand_nl_goto_receiver ();
3620 expand_builtin_trap ();
3623 nonlocal_goto_handler_labels = label_list;
3624 emit_label (afterward);
3627 /* Warn about any unused VARS (which may contain nodes other than
3628 VAR_DECLs, but such nodes are ignored). The nodes are connected
3629 via the TREE_CHAIN field. */
3631 void
3632 warn_about_unused_variables (tree vars)
3634 tree decl;
3636 if (warn_unused_variable)
3637 for (decl = vars; decl; decl = TREE_CHAIN (decl))
3638 if (TREE_CODE (decl) == VAR_DECL
3639 && ! TREE_USED (decl)
3640 && ! DECL_IN_SYSTEM_HEADER (decl)
3641 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
3642 warning_with_decl (decl, "unused variable `%s'");
3645 /* Generate RTL code to terminate a binding contour.
3647 VARS is the chain of VAR_DECL nodes for the variables bound in this
3648 contour. There may actually be other nodes in this chain, but any
3649 nodes other than VAR_DECLS are ignored.
3651 MARK_ENDS is nonzero if we should put a note at the beginning
3652 and end of this binding contour.
3654 DONT_JUMP_IN is positive if it is not valid to jump into this contour,
3655 zero if we can jump into this contour only if it does not have a saved
3656 stack level, and negative if we are not to check for invalid use of
3657 labels (because the front end does that). */
3659 void
3660 expand_end_bindings (tree vars, int mark_ends, int dont_jump_in)
3662 struct nesting *thisblock = block_stack;
3664 /* If any of the variables in this scope were not used, warn the
3665 user. */
3666 warn_about_unused_variables (vars);
3668 if (thisblock->exit_label)
3670 do_pending_stack_adjust ();
3671 emit_label (thisblock->exit_label);
3674 /* If necessary, make handlers for nonlocal gotos taking
3675 place in the function calls in this block. */
3676 if (function_call_count != 0 && nonlocal_labels
3677 /* Make handler for outermost block
3678 if there were any nonlocal gotos to this function. */
3679 && (thisblock->next == 0 ? current_function_has_nonlocal_label
3680 /* Make handler for inner block if it has something
3681 special to do when you jump out of it. */
3682 : (thisblock->data.block.cleanups != 0
3683 || thisblock->data.block.stack_level != 0)))
3684 expand_nl_goto_receivers (thisblock);
3686 /* Don't allow jumping into a block that has a stack level.
3687 Cleanups are allowed, though. */
3688 if (dont_jump_in > 0
3689 || (dont_jump_in == 0 && thisblock->data.block.stack_level != 0))
3691 struct label_chain *chain;
3693 /* Any labels in this block are no longer valid to go to.
3694 Mark them to cause an error message. */
3695 for (chain = thisblock->data.block.label_chain; chain; chain = chain->next)
3697 DECL_TOO_LATE (chain->label) = 1;
3698 /* If any goto without a fixup came to this label,
3699 that must be an error, because gotos without fixups
3700 come from outside all saved stack-levels. */
3701 if (TREE_ADDRESSABLE (chain->label))
3702 error_with_decl (chain->label,
3703 "label `%s' used before containing binding contour");
3707 /* Restore stack level in effect before the block
3708 (only if variable-size objects allocated). */
3709 /* Perform any cleanups associated with the block. */
3711 if (thisblock->data.block.stack_level != 0
3712 || thisblock->data.block.cleanups != 0)
3714 int reachable;
3715 rtx insn;
3717 /* Don't let cleanups affect ({...}) constructs. */
3718 int old_expr_stmts_for_value = expr_stmts_for_value;
3719 rtx old_last_expr_value = last_expr_value;
3720 tree old_last_expr_type = last_expr_type;
3721 expr_stmts_for_value = 0;
3723 /* Only clean up here if this point can actually be reached. */
3724 insn = get_last_insn ();
3725 if (GET_CODE (insn) == NOTE)
3726 insn = prev_nonnote_insn (insn);
3727 reachable = (! insn || GET_CODE (insn) != BARRIER);
3729 /* Do the cleanups. */
3730 expand_cleanups (thisblock->data.block.cleanups, 0, reachable);
3731 if (reachable)
3732 do_pending_stack_adjust ();
3734 expr_stmts_for_value = old_expr_stmts_for_value;
3735 last_expr_value = old_last_expr_value;
3736 last_expr_type = old_last_expr_type;
3738 /* Restore the stack level. */
3740 if (reachable && thisblock->data.block.stack_level != 0)
3742 emit_stack_restore (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3743 thisblock->data.block.stack_level, NULL_RTX);
3744 if (nonlocal_goto_handler_slots != 0)
3745 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level,
3746 NULL_RTX);
3749 /* Any gotos out of this block must also do these things.
3750 Also report any gotos with fixups that came to labels in this
3751 level. */
3752 fixup_gotos (thisblock,
3753 thisblock->data.block.stack_level,
3754 thisblock->data.block.cleanups,
3755 thisblock->data.block.first_insn,
3756 dont_jump_in);
3759 /* Mark the beginning and end of the scope if requested.
3760 We do this now, after running cleanups on the variables
3761 just going out of scope, so they are in scope for their cleanups. */
3763 if (mark_ends)
3765 rtx note = emit_note (NOTE_INSN_BLOCK_END);
3766 NOTE_BLOCK (note) = NOTE_BLOCK (thisblock->data.block.first_insn);
3768 else
3769 /* Get rid of the beginning-mark if we don't make an end-mark. */
3770 NOTE_LINE_NUMBER (thisblock->data.block.first_insn) = NOTE_INSN_DELETED;
3772 /* Restore the temporary level of TARGET_EXPRs. */
3773 target_temp_slot_level = thisblock->data.block.block_target_temp_slot_level;
3775 /* Restore block_stack level for containing block. */
3777 stack_block_stack = thisblock->data.block.innermost_stack_block;
3778 POPSTACK (block_stack);
3780 /* Pop the stack slot nesting and free any slots at this level. */
3781 pop_temp_slots ();
3784 /* Generate code to save the stack pointer at the start of the current block
3785 and set up to restore it on exit. */
3787 void
3788 save_stack_pointer (void)
3790 struct nesting *thisblock = block_stack;
3792 if (thisblock->data.block.stack_level == 0)
3794 emit_stack_save (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3795 &thisblock->data.block.stack_level,
3796 thisblock->data.block.first_insn);
3797 stack_block_stack = thisblock;
3801 /* Generate RTL for the automatic variable declaration DECL.
3802 (Other kinds of declarations are simply ignored if seen here.) */
3804 void
3805 expand_decl (tree decl)
3807 tree type;
3809 type = TREE_TYPE (decl);
3811 /* For a CONST_DECL, set mode, alignment, and sizes from those of the
3812 type in case this node is used in a reference. */
3813 if (TREE_CODE (decl) == CONST_DECL)
3815 DECL_MODE (decl) = TYPE_MODE (type);
3816 DECL_ALIGN (decl) = TYPE_ALIGN (type);
3817 DECL_SIZE (decl) = TYPE_SIZE (type);
3818 DECL_SIZE_UNIT (decl) = TYPE_SIZE_UNIT (type);
3819 return;
3822 /* Otherwise, only automatic variables need any expansion done. Static and
3823 external variables, and external functions, will be handled by
3824 `assemble_variable' (called from finish_decl). TYPE_DECL requires
3825 nothing. PARM_DECLs are handled in `assign_parms'. */
3826 if (TREE_CODE (decl) != VAR_DECL)
3827 return;
3829 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
3830 return;
3832 /* Create the RTL representation for the variable. */
3834 if (type == error_mark_node)
3835 SET_DECL_RTL (decl, gen_rtx_MEM (BLKmode, const0_rtx));
3837 else if (DECL_SIZE (decl) == 0)
3838 /* Variable with incomplete type. */
3840 rtx x;
3841 if (DECL_INITIAL (decl) == 0)
3842 /* Error message was already done; now avoid a crash. */
3843 x = gen_rtx_MEM (BLKmode, const0_rtx);
3844 else
3845 /* An initializer is going to decide the size of this array.
3846 Until we know the size, represent its address with a reg. */
3847 x = gen_rtx_MEM (BLKmode, gen_reg_rtx (Pmode));
3849 set_mem_attributes (x, decl, 1);
3850 SET_DECL_RTL (decl, x);
3852 else if (DECL_MODE (decl) != BLKmode
3853 /* If -ffloat-store, don't put explicit float vars
3854 into regs. */
3855 && !(flag_float_store
3856 && TREE_CODE (type) == REAL_TYPE)
3857 && ! TREE_THIS_VOLATILE (decl)
3858 && ! DECL_NONLOCAL (decl)
3859 && (DECL_REGISTER (decl) || DECL_ARTIFICIAL (decl) || optimize))
3861 /* Automatic variable that can go in a register. */
3862 int unsignedp = TREE_UNSIGNED (type);
3863 enum machine_mode reg_mode
3864 = promote_mode (type, DECL_MODE (decl), &unsignedp, 0);
3866 SET_DECL_RTL (decl, gen_reg_rtx (reg_mode));
3868 if (!DECL_ARTIFICIAL (decl))
3869 mark_user_reg (DECL_RTL (decl));
3871 if (POINTER_TYPE_P (type))
3872 mark_reg_pointer (DECL_RTL (decl),
3873 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (decl))));
3875 maybe_set_unchanging (DECL_RTL (decl), decl);
3877 /* If something wants our address, try to use ADDRESSOF. */
3878 if (TREE_ADDRESSABLE (decl))
3879 put_var_into_stack (decl, /*rescan=*/false);
3882 else if (TREE_CODE (DECL_SIZE_UNIT (decl)) == INTEGER_CST
3883 && ! (flag_stack_check && ! STACK_CHECK_BUILTIN
3884 && 0 < compare_tree_int (DECL_SIZE_UNIT (decl),
3885 STACK_CHECK_MAX_VAR_SIZE)))
3887 /* Variable of fixed size that goes on the stack. */
3888 rtx oldaddr = 0;
3889 rtx addr;
3890 rtx x;
3892 /* If we previously made RTL for this decl, it must be an array
3893 whose size was determined by the initializer.
3894 The old address was a register; set that register now
3895 to the proper address. */
3896 if (DECL_RTL_SET_P (decl))
3898 if (GET_CODE (DECL_RTL (decl)) != MEM
3899 || GET_CODE (XEXP (DECL_RTL (decl), 0)) != REG)
3900 abort ();
3901 oldaddr = XEXP (DECL_RTL (decl), 0);
3904 /* Set alignment we actually gave this decl. */
3905 DECL_ALIGN (decl) = (DECL_MODE (decl) == BLKmode ? BIGGEST_ALIGNMENT
3906 : GET_MODE_BITSIZE (DECL_MODE (decl)));
3907 DECL_USER_ALIGN (decl) = 0;
3909 x = assign_temp (decl, 1, 1, 1);
3910 set_mem_attributes (x, decl, 1);
3911 SET_DECL_RTL (decl, x);
3913 if (oldaddr)
3915 addr = force_operand (XEXP (DECL_RTL (decl), 0), oldaddr);
3916 if (addr != oldaddr)
3917 emit_move_insn (oldaddr, addr);
3920 else
3921 /* Dynamic-size object: must push space on the stack. */
3923 rtx address, size, x;
3925 /* Record the stack pointer on entry to block, if have
3926 not already done so. */
3927 do_pending_stack_adjust ();
3928 save_stack_pointer ();
3930 /* In function-at-a-time mode, variable_size doesn't expand this,
3931 so do it now. */
3932 if (TREE_CODE (type) == ARRAY_TYPE && TYPE_DOMAIN (type))
3933 expand_expr (TYPE_MAX_VALUE (TYPE_DOMAIN (type)),
3934 const0_rtx, VOIDmode, 0);
3936 /* Compute the variable's size, in bytes. */
3937 size = expand_expr (DECL_SIZE_UNIT (decl), NULL_RTX, VOIDmode, 0);
3938 free_temp_slots ();
3940 /* Allocate space on the stack for the variable. Note that
3941 DECL_ALIGN says how the variable is to be aligned and we
3942 cannot use it to conclude anything about the alignment of
3943 the size. */
3944 address = allocate_dynamic_stack_space (size, NULL_RTX,
3945 TYPE_ALIGN (TREE_TYPE (decl)));
3947 /* Reference the variable indirect through that rtx. */
3948 x = gen_rtx_MEM (DECL_MODE (decl), address);
3949 set_mem_attributes (x, decl, 1);
3950 SET_DECL_RTL (decl, x);
3953 /* Indicate the alignment we actually gave this variable. */
3954 #ifdef STACK_BOUNDARY
3955 DECL_ALIGN (decl) = STACK_BOUNDARY;
3956 #else
3957 DECL_ALIGN (decl) = BIGGEST_ALIGNMENT;
3958 #endif
3959 DECL_USER_ALIGN (decl) = 0;
3963 /* Emit code to perform the initialization of a declaration DECL. */
3965 void
3966 expand_decl_init (tree decl)
3968 int was_used = TREE_USED (decl);
3970 /* If this is a CONST_DECL, we don't have to generate any code. Likewise
3971 for static decls. */
3972 if (TREE_CODE (decl) == CONST_DECL
3973 || TREE_STATIC (decl))
3974 return;
3976 /* Compute and store the initial value now. */
3978 push_temp_slots ();
3980 if (DECL_INITIAL (decl) == error_mark_node)
3982 enum tree_code code = TREE_CODE (TREE_TYPE (decl));
3984 if (code == INTEGER_TYPE || code == REAL_TYPE || code == ENUMERAL_TYPE
3985 || code == POINTER_TYPE || code == REFERENCE_TYPE)
3986 expand_assignment (decl, convert (TREE_TYPE (decl), integer_zero_node),
3987 0, 0);
3988 emit_queue ();
3990 else if (DECL_INITIAL (decl) && TREE_CODE (DECL_INITIAL (decl)) != TREE_LIST)
3992 emit_line_note (DECL_SOURCE_LOCATION (decl));
3993 expand_assignment (decl, DECL_INITIAL (decl), 0, 0);
3994 emit_queue ();
3997 /* Don't let the initialization count as "using" the variable. */
3998 TREE_USED (decl) = was_used;
4000 /* Free any temporaries we made while initializing the decl. */
4001 preserve_temp_slots (NULL_RTX);
4002 free_temp_slots ();
4003 pop_temp_slots ();
4006 /* CLEANUP is an expression to be executed at exit from this binding contour;
4007 for example, in C++, it might call the destructor for this variable.
4009 We wrap CLEANUP in an UNSAVE_EXPR node, so that we can expand the
4010 CLEANUP multiple times, and have the correct semantics. This
4011 happens in exception handling, for gotos, returns, breaks that
4012 leave the current scope.
4014 If CLEANUP is nonzero and DECL is zero, we record a cleanup
4015 that is not associated with any particular variable. */
4018 expand_decl_cleanup (tree decl, tree cleanup)
4020 struct nesting *thisblock;
4022 /* Error if we are not in any block. */
4023 if (cfun == 0 || block_stack == 0)
4024 return 0;
4026 thisblock = block_stack;
4028 /* Record the cleanup if there is one. */
4030 if (cleanup != 0)
4032 tree t;
4033 rtx seq;
4034 tree *cleanups = &thisblock->data.block.cleanups;
4035 int cond_context = conditional_context ();
4037 if (cond_context)
4039 rtx flag = gen_reg_rtx (word_mode);
4040 rtx set_flag_0;
4041 tree cond;
4043 start_sequence ();
4044 emit_move_insn (flag, const0_rtx);
4045 set_flag_0 = get_insns ();
4046 end_sequence ();
4048 thisblock->data.block.last_unconditional_cleanup
4049 = emit_insn_after (set_flag_0,
4050 thisblock->data.block.last_unconditional_cleanup);
4052 emit_move_insn (flag, const1_rtx);
4054 cond = build_decl (VAR_DECL, NULL_TREE,
4055 (*lang_hooks.types.type_for_mode) (word_mode, 1));
4056 SET_DECL_RTL (cond, flag);
4058 /* Conditionalize the cleanup. */
4059 cleanup = build (COND_EXPR, void_type_node,
4060 (*lang_hooks.truthvalue_conversion) (cond),
4061 cleanup, integer_zero_node);
4062 cleanup = fold (cleanup);
4064 cleanups = &thisblock->data.block.cleanups;
4067 cleanup = unsave_expr (cleanup);
4069 t = *cleanups = tree_cons (decl, cleanup, *cleanups);
4071 if (! cond_context)
4072 /* If this block has a cleanup, it belongs in stack_block_stack. */
4073 stack_block_stack = thisblock;
4075 if (cond_context)
4077 start_sequence ();
4080 if (! using_eh_for_cleanups_p)
4081 TREE_ADDRESSABLE (t) = 1;
4082 else
4083 expand_eh_region_start ();
4085 if (cond_context)
4087 seq = get_insns ();
4088 end_sequence ();
4089 if (seq)
4090 thisblock->data.block.last_unconditional_cleanup
4091 = emit_insn_after (seq,
4092 thisblock->data.block.last_unconditional_cleanup);
4094 else
4096 thisblock->data.block.last_unconditional_cleanup
4097 = get_last_insn ();
4098 /* When we insert instructions after the last unconditional cleanup,
4099 we don't adjust last_insn. That means that a later add_insn will
4100 clobber the instructions we've just added. The easiest way to
4101 fix this is to just insert another instruction here, so that the
4102 instructions inserted after the last unconditional cleanup are
4103 never the last instruction. */
4104 emit_note (NOTE_INSN_DELETED);
4107 return 1;
4110 /* Like expand_decl_cleanup, but maybe only run the cleanup if an exception
4111 is thrown. */
4114 expand_decl_cleanup_eh (tree decl, tree cleanup, int eh_only)
4116 int ret = expand_decl_cleanup (decl, cleanup);
4117 if (cleanup && ret)
4119 tree node = block_stack->data.block.cleanups;
4120 CLEANUP_EH_ONLY (node) = eh_only;
4122 return ret;
4125 /* DECL is an anonymous union. CLEANUP is a cleanup for DECL.
4126 DECL_ELTS is the list of elements that belong to DECL's type.
4127 In each, the TREE_VALUE is a VAR_DECL, and the TREE_PURPOSE a cleanup. */
4129 void
4130 expand_anon_union_decl (tree decl, tree cleanup, tree decl_elts)
4132 struct nesting *thisblock = cfun == 0 ? 0 : block_stack;
4133 rtx x;
4134 tree t;
4136 /* If any of the elements are addressable, so is the entire union. */
4137 for (t = decl_elts; t; t = TREE_CHAIN (t))
4138 if (TREE_ADDRESSABLE (TREE_VALUE (t)))
4140 TREE_ADDRESSABLE (decl) = 1;
4141 break;
4144 expand_decl (decl);
4145 expand_decl_cleanup (decl, cleanup);
4146 x = DECL_RTL (decl);
4148 /* Go through the elements, assigning RTL to each. */
4149 for (t = decl_elts; t; t = TREE_CHAIN (t))
4151 tree decl_elt = TREE_VALUE (t);
4152 tree cleanup_elt = TREE_PURPOSE (t);
4153 enum machine_mode mode = TYPE_MODE (TREE_TYPE (decl_elt));
4155 /* If any of the elements are addressable, so is the entire
4156 union. */
4157 if (TREE_USED (decl_elt))
4158 TREE_USED (decl) = 1;
4160 /* Propagate the union's alignment to the elements. */
4161 DECL_ALIGN (decl_elt) = DECL_ALIGN (decl);
4162 DECL_USER_ALIGN (decl_elt) = DECL_USER_ALIGN (decl);
4164 /* If the element has BLKmode and the union doesn't, the union is
4165 aligned such that the element doesn't need to have BLKmode, so
4166 change the element's mode to the appropriate one for its size. */
4167 if (mode == BLKmode && DECL_MODE (decl) != BLKmode)
4168 DECL_MODE (decl_elt) = mode
4169 = mode_for_size_tree (DECL_SIZE (decl_elt), MODE_INT, 1);
4171 /* (SUBREG (MEM ...)) at RTL generation time is invalid, so we
4172 instead create a new MEM rtx with the proper mode. */
4173 if (GET_CODE (x) == MEM)
4175 if (mode == GET_MODE (x))
4176 SET_DECL_RTL (decl_elt, x);
4177 else
4178 SET_DECL_RTL (decl_elt, adjust_address_nv (x, mode, 0));
4180 else if (GET_CODE (x) == REG)
4182 if (mode == GET_MODE (x))
4183 SET_DECL_RTL (decl_elt, x);
4184 else
4185 SET_DECL_RTL (decl_elt, gen_lowpart_SUBREG (mode, x));
4187 else
4188 abort ();
4190 /* Record the cleanup if there is one. */
4192 if (cleanup != 0)
4193 thisblock->data.block.cleanups
4194 = tree_cons (decl_elt, cleanup_elt,
4195 thisblock->data.block.cleanups);
4199 /* Expand a list of cleanups LIST.
4200 Elements may be expressions or may be nested lists.
4202 If IN_FIXUP is nonzero, we are generating this cleanup for a fixup
4203 goto and handle protection regions specially in that case.
4205 If REACHABLE, we emit code, otherwise just inform the exception handling
4206 code about this finalization. */
4208 static void
4209 expand_cleanups (tree list, int in_fixup, int reachable)
4211 tree tail;
4212 for (tail = list; tail; tail = TREE_CHAIN (tail))
4213 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
4214 expand_cleanups (TREE_VALUE (tail), in_fixup, reachable);
4215 else
4217 if (! in_fixup && using_eh_for_cleanups_p)
4218 expand_eh_region_end_cleanup (TREE_VALUE (tail));
4220 if (reachable && !CLEANUP_EH_ONLY (tail))
4222 /* Cleanups may be run multiple times. For example,
4223 when exiting a binding contour, we expand the
4224 cleanups associated with that contour. When a goto
4225 within that binding contour has a target outside that
4226 contour, it will expand all cleanups from its scope to
4227 the target. Though the cleanups are expanded multiple
4228 times, the control paths are non-overlapping so the
4229 cleanups will not be executed twice. */
4231 /* We may need to protect from outer cleanups. */
4232 if (in_fixup && using_eh_for_cleanups_p)
4234 expand_eh_region_start ();
4236 expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
4238 expand_eh_region_end_fixup (TREE_VALUE (tail));
4240 else
4241 expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
4243 free_temp_slots ();
4248 /* Mark when the context we are emitting RTL for as a conditional
4249 context, so that any cleanup actions we register with
4250 expand_decl_init will be properly conditionalized when those
4251 cleanup actions are later performed. Must be called before any
4252 expression (tree) is expanded that is within a conditional context. */
4254 void
4255 start_cleanup_deferral (void)
4257 /* block_stack can be NULL if we are inside the parameter list. It is
4258 OK to do nothing, because cleanups aren't possible here. */
4259 if (block_stack)
4260 ++block_stack->data.block.conditional_code;
4263 /* Mark the end of a conditional region of code. Because cleanup
4264 deferrals may be nested, we may still be in a conditional region
4265 after we end the currently deferred cleanups, only after we end all
4266 deferred cleanups, are we back in unconditional code. */
4268 void
4269 end_cleanup_deferral (void)
4271 /* block_stack can be NULL if we are inside the parameter list. It is
4272 OK to do nothing, because cleanups aren't possible here. */
4273 if (block_stack)
4274 --block_stack->data.block.conditional_code;
4277 tree
4278 last_cleanup_this_contour (void)
4280 if (block_stack == 0)
4281 return 0;
4283 return block_stack->data.block.cleanups;
4286 /* Return 1 if there are any pending cleanups at this point.
4287 Check the current contour as well as contours that enclose
4288 the current contour. */
4291 any_pending_cleanups (void)
4293 struct nesting *block;
4295 if (cfun == NULL || cfun->stmt == NULL || block_stack == 0)
4296 return 0;
4298 if (block_stack->data.block.cleanups != NULL)
4299 return 1;
4301 if (block_stack->data.block.outer_cleanups == 0)
4302 return 0;
4304 for (block = block_stack->next; block; block = block->next)
4305 if (block->data.block.cleanups != 0)
4306 return 1;
4308 return 0;
4311 /* Enter a case (Pascal) or switch (C) statement.
4312 Push a block onto case_stack and nesting_stack
4313 to accumulate the case-labels that are seen
4314 and to record the labels generated for the statement.
4316 EXIT_FLAG is nonzero if `exit_something' should exit this case stmt.
4317 Otherwise, this construct is transparent for `exit_something'.
4319 EXPR is the index-expression to be dispatched on.
4320 TYPE is its nominal type. We could simply convert EXPR to this type,
4321 but instead we take short cuts. */
4323 void
4324 expand_start_case (int exit_flag, tree expr, tree type,
4325 const char *printname)
4327 struct nesting *thiscase = ALLOC_NESTING ();
4329 /* Make an entry on case_stack for the case we are entering. */
4331 thiscase->desc = CASE_NESTING;
4332 thiscase->next = case_stack;
4333 thiscase->all = nesting_stack;
4334 thiscase->depth = ++nesting_depth;
4335 thiscase->exit_label = exit_flag ? gen_label_rtx () : 0;
4336 thiscase->data.case_stmt.case_list = 0;
4337 thiscase->data.case_stmt.index_expr = expr;
4338 thiscase->data.case_stmt.nominal_type = type;
4339 thiscase->data.case_stmt.default_label = 0;
4340 thiscase->data.case_stmt.printname = printname;
4341 thiscase->data.case_stmt.line_number_status = force_line_numbers ();
4342 case_stack = thiscase;
4343 nesting_stack = thiscase;
4345 do_pending_stack_adjust ();
4346 emit_queue ();
4348 /* Make sure case_stmt.start points to something that won't
4349 need any transformation before expand_end_case. */
4350 if (GET_CODE (get_last_insn ()) != NOTE)
4351 emit_note (NOTE_INSN_DELETED);
4353 thiscase->data.case_stmt.start = get_last_insn ();
4355 start_cleanup_deferral ();
4358 /* Start a "dummy case statement" within which case labels are invalid
4359 and are not connected to any larger real case statement.
4360 This can be used if you don't want to let a case statement jump
4361 into the middle of certain kinds of constructs. */
4363 void
4364 expand_start_case_dummy (void)
4366 struct nesting *thiscase = ALLOC_NESTING ();
4368 /* Make an entry on case_stack for the dummy. */
4370 thiscase->desc = CASE_NESTING;
4371 thiscase->next = case_stack;
4372 thiscase->all = nesting_stack;
4373 thiscase->depth = ++nesting_depth;
4374 thiscase->exit_label = 0;
4375 thiscase->data.case_stmt.case_list = 0;
4376 thiscase->data.case_stmt.start = 0;
4377 thiscase->data.case_stmt.nominal_type = 0;
4378 thiscase->data.case_stmt.default_label = 0;
4379 case_stack = thiscase;
4380 nesting_stack = thiscase;
4381 start_cleanup_deferral ();
4384 static void
4385 check_seenlabel (void)
4387 /* If this is the first label, warn if any insns have been emitted. */
4388 if (case_stack->data.case_stmt.line_number_status >= 0)
4390 rtx insn;
4392 restore_line_number_status
4393 (case_stack->data.case_stmt.line_number_status);
4394 case_stack->data.case_stmt.line_number_status = -1;
4396 for (insn = case_stack->data.case_stmt.start;
4397 insn;
4398 insn = NEXT_INSN (insn))
4400 if (GET_CODE (insn) == CODE_LABEL)
4401 break;
4402 if (GET_CODE (insn) != NOTE
4403 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn)) != USE))
4406 insn = PREV_INSN (insn);
4407 while (insn && (GET_CODE (insn) != NOTE || NOTE_LINE_NUMBER (insn) < 0));
4409 /* If insn is zero, then there must have been a syntax error. */
4410 if (insn)
4412 location_t locus;
4413 locus.file = NOTE_SOURCE_FILE (insn);
4414 locus.line = NOTE_LINE_NUMBER (insn);
4415 warning ("%Hunreachable code at beginning of %s", &locus,
4416 case_stack->data.case_stmt.printname);
4418 break;
4424 /* Accumulate one case or default label inside a case or switch statement.
4425 VALUE is the value of the case (a null pointer, for a default label).
4426 The function CONVERTER, when applied to arguments T and V,
4427 converts the value V to the type T.
4429 If not currently inside a case or switch statement, return 1 and do
4430 nothing. The caller will print a language-specific error message.
4431 If VALUE is a duplicate or overlaps, return 2 and do nothing
4432 except store the (first) duplicate node in *DUPLICATE.
4433 If VALUE is out of range, return 3 and do nothing.
4434 If we are jumping into the scope of a cleanup or var-sized array, return 5.
4435 Return 0 on success.
4437 Extended to handle range statements. */
4440 pushcase (tree value, tree (*converter) (tree, tree), tree label,
4441 tree *duplicate)
4443 tree index_type;
4444 tree nominal_type;
4446 /* Fail if not inside a real case statement. */
4447 if (! (case_stack && case_stack->data.case_stmt.start))
4448 return 1;
4450 if (stack_block_stack
4451 && stack_block_stack->depth > case_stack->depth)
4452 return 5;
4454 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4455 nominal_type = case_stack->data.case_stmt.nominal_type;
4457 /* If the index is erroneous, avoid more problems: pretend to succeed. */
4458 if (index_type == error_mark_node)
4459 return 0;
4461 /* Convert VALUE to the type in which the comparisons are nominally done. */
4462 if (value != 0)
4463 value = (*converter) (nominal_type, value);
4465 check_seenlabel ();
4467 /* Fail if this value is out of range for the actual type of the index
4468 (which may be narrower than NOMINAL_TYPE). */
4469 if (value != 0
4470 && (TREE_CONSTANT_OVERFLOW (value)
4471 || ! int_fits_type_p (value, index_type)))
4472 return 3;
4474 return add_case_node (value, value, label, duplicate);
4477 /* Like pushcase but this case applies to all values between VALUE1 and
4478 VALUE2 (inclusive). If VALUE1 is NULL, the range starts at the lowest
4479 value of the index type and ends at VALUE2. If VALUE2 is NULL, the range
4480 starts at VALUE1 and ends at the highest value of the index type.
4481 If both are NULL, this case applies to all values.
4483 The return value is the same as that of pushcase but there is one
4484 additional error code: 4 means the specified range was empty. */
4487 pushcase_range (tree value1, tree value2, tree (*converter) (tree, tree),
4488 tree label, tree *duplicate)
4490 tree index_type;
4491 tree nominal_type;
4493 /* Fail if not inside a real case statement. */
4494 if (! (case_stack && case_stack->data.case_stmt.start))
4495 return 1;
4497 if (stack_block_stack
4498 && stack_block_stack->depth > case_stack->depth)
4499 return 5;
4501 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4502 nominal_type = case_stack->data.case_stmt.nominal_type;
4504 /* If the index is erroneous, avoid more problems: pretend to succeed. */
4505 if (index_type == error_mark_node)
4506 return 0;
4508 check_seenlabel ();
4510 /* Convert VALUEs to type in which the comparisons are nominally done
4511 and replace any unspecified value with the corresponding bound. */
4512 if (value1 == 0)
4513 value1 = TYPE_MIN_VALUE (index_type);
4514 if (value2 == 0)
4515 value2 = TYPE_MAX_VALUE (index_type);
4517 /* Fail if the range is empty. Do this before any conversion since
4518 we want to allow out-of-range empty ranges. */
4519 if (value2 != 0 && tree_int_cst_lt (value2, value1))
4520 return 4;
4522 /* If the max was unbounded, use the max of the nominal_type we are
4523 converting to. Do this after the < check above to suppress false
4524 positives. */
4525 if (value2 == 0)
4526 value2 = TYPE_MAX_VALUE (nominal_type);
4528 value1 = (*converter) (nominal_type, value1);
4529 value2 = (*converter) (nominal_type, value2);
4531 /* Fail if these values are out of range. */
4532 if (TREE_CONSTANT_OVERFLOW (value1)
4533 || ! int_fits_type_p (value1, index_type))
4534 return 3;
4536 if (TREE_CONSTANT_OVERFLOW (value2)
4537 || ! int_fits_type_p (value2, index_type))
4538 return 3;
4540 return add_case_node (value1, value2, label, duplicate);
4543 /* Do the actual insertion of a case label for pushcase and pushcase_range
4544 into case_stack->data.case_stmt.case_list. Use an AVL tree to avoid
4545 slowdown for large switch statements. */
4548 add_case_node (tree low, tree high, tree label, tree *duplicate)
4550 struct case_node *p, **q, *r;
4552 /* If there's no HIGH value, then this is not a case range; it's
4553 just a simple case label. But that's just a degenerate case
4554 range. */
4555 if (!high)
4556 high = low;
4558 /* Handle default labels specially. */
4559 if (!high && !low)
4561 if (case_stack->data.case_stmt.default_label != 0)
4563 *duplicate = case_stack->data.case_stmt.default_label;
4564 return 2;
4566 case_stack->data.case_stmt.default_label = label;
4567 expand_label (label);
4568 return 0;
4571 q = &case_stack->data.case_stmt.case_list;
4572 p = *q;
4574 while ((r = *q))
4576 p = r;
4578 /* Keep going past elements distinctly greater than HIGH. */
4579 if (tree_int_cst_lt (high, p->low))
4580 q = &p->left;
4582 /* or distinctly less than LOW. */
4583 else if (tree_int_cst_lt (p->high, low))
4584 q = &p->right;
4586 else
4588 /* We have an overlap; this is an error. */
4589 *duplicate = p->code_label;
4590 return 2;
4594 /* Add this label to the chain, and succeed. */
4596 r = (struct case_node *) ggc_alloc (sizeof (struct case_node));
4597 r->low = low;
4599 /* If the bounds are equal, turn this into the one-value case. */
4600 if (tree_int_cst_equal (low, high))
4601 r->high = r->low;
4602 else
4603 r->high = high;
4605 r->code_label = label;
4606 expand_label (label);
4608 *q = r;
4609 r->parent = p;
4610 r->left = 0;
4611 r->right = 0;
4612 r->balance = 0;
4614 while (p)
4616 struct case_node *s;
4618 if (r == p->left)
4620 int b;
4622 if (! (b = p->balance))
4623 /* Growth propagation from left side. */
4624 p->balance = -1;
4625 else if (b < 0)
4627 if (r->balance < 0)
4629 /* R-Rotation */
4630 if ((p->left = s = r->right))
4631 s->parent = p;
4633 r->right = p;
4634 p->balance = 0;
4635 r->balance = 0;
4636 s = p->parent;
4637 p->parent = r;
4639 if ((r->parent = s))
4641 if (s->left == p)
4642 s->left = r;
4643 else
4644 s->right = r;
4646 else
4647 case_stack->data.case_stmt.case_list = r;
4649 else
4650 /* r->balance == +1 */
4652 /* LR-Rotation */
4654 int b2;
4655 struct case_node *t = r->right;
4657 if ((p->left = s = t->right))
4658 s->parent = p;
4660 t->right = p;
4661 if ((r->right = s = t->left))
4662 s->parent = r;
4664 t->left = r;
4665 b = t->balance;
4666 b2 = b < 0;
4667 p->balance = b2;
4668 b2 = -b2 - b;
4669 r->balance = b2;
4670 t->balance = 0;
4671 s = p->parent;
4672 p->parent = t;
4673 r->parent = t;
4675 if ((t->parent = s))
4677 if (s->left == p)
4678 s->left = t;
4679 else
4680 s->right = t;
4682 else
4683 case_stack->data.case_stmt.case_list = t;
4685 break;
4688 else
4690 /* p->balance == +1; growth of left side balances the node. */
4691 p->balance = 0;
4692 break;
4695 else
4696 /* r == p->right */
4698 int b;
4700 if (! (b = p->balance))
4701 /* Growth propagation from right side. */
4702 p->balance++;
4703 else if (b > 0)
4705 if (r->balance > 0)
4707 /* L-Rotation */
4709 if ((p->right = s = r->left))
4710 s->parent = p;
4712 r->left = p;
4713 p->balance = 0;
4714 r->balance = 0;
4715 s = p->parent;
4716 p->parent = r;
4717 if ((r->parent = s))
4719 if (s->left == p)
4720 s->left = r;
4721 else
4722 s->right = r;
4725 else
4726 case_stack->data.case_stmt.case_list = r;
4729 else
4730 /* r->balance == -1 */
4732 /* RL-Rotation */
4733 int b2;
4734 struct case_node *t = r->left;
4736 if ((p->right = s = t->left))
4737 s->parent = p;
4739 t->left = p;
4741 if ((r->left = s = t->right))
4742 s->parent = r;
4744 t->right = r;
4745 b = t->balance;
4746 b2 = b < 0;
4747 r->balance = b2;
4748 b2 = -b2 - b;
4749 p->balance = b2;
4750 t->balance = 0;
4751 s = p->parent;
4752 p->parent = t;
4753 r->parent = t;
4755 if ((t->parent = s))
4757 if (s->left == p)
4758 s->left = t;
4759 else
4760 s->right = t;
4763 else
4764 case_stack->data.case_stmt.case_list = t;
4766 break;
4768 else
4770 /* p->balance == -1; growth of right side balances the node. */
4771 p->balance = 0;
4772 break;
4776 r = p;
4777 p = p->parent;
4780 return 0;
4783 /* Returns the number of possible values of TYPE.
4784 Returns -1 if the number is unknown, variable, or if the number does not
4785 fit in a HOST_WIDE_INT.
4786 Sets *SPARSENESS to 2 if TYPE is an ENUMERAL_TYPE whose values
4787 do not increase monotonically (there may be duplicates);
4788 to 1 if the values increase monotonically, but not always by 1;
4789 otherwise sets it to 0. */
4791 HOST_WIDE_INT
4792 all_cases_count (tree type, int *sparseness)
4794 tree t;
4795 HOST_WIDE_INT count, minval, lastval;
4797 *sparseness = 0;
4799 switch (TREE_CODE (type))
4801 case BOOLEAN_TYPE:
4802 count = 2;
4803 break;
4805 case CHAR_TYPE:
4806 count = 1 << BITS_PER_UNIT;
4807 break;
4809 default:
4810 case INTEGER_TYPE:
4811 if (TYPE_MAX_VALUE (type) != 0
4812 && 0 != (t = fold (build (MINUS_EXPR, type, TYPE_MAX_VALUE (type),
4813 TYPE_MIN_VALUE (type))))
4814 && 0 != (t = fold (build (PLUS_EXPR, type, t,
4815 convert (type, integer_zero_node))))
4816 && host_integerp (t, 1))
4817 count = tree_low_cst (t, 1);
4818 else
4819 return -1;
4820 break;
4822 case ENUMERAL_TYPE:
4823 /* Don't waste time with enumeral types with huge values. */
4824 if (! host_integerp (TYPE_MIN_VALUE (type), 0)
4825 || TYPE_MAX_VALUE (type) == 0
4826 || ! host_integerp (TYPE_MAX_VALUE (type), 0))
4827 return -1;
4829 lastval = minval = tree_low_cst (TYPE_MIN_VALUE (type), 0);
4830 count = 0;
4832 for (t = TYPE_VALUES (type); t != NULL_TREE; t = TREE_CHAIN (t))
4834 HOST_WIDE_INT thisval = tree_low_cst (TREE_VALUE (t), 0);
4836 if (*sparseness == 2 || thisval <= lastval)
4837 *sparseness = 2;
4838 else if (thisval != minval + count)
4839 *sparseness = 1;
4841 lastval = thisval;
4842 count++;
4846 return count;
4849 #define BITARRAY_TEST(ARRAY, INDEX) \
4850 ((ARRAY)[(unsigned) (INDEX) / HOST_BITS_PER_CHAR]\
4851 & (1 << ((unsigned) (INDEX) % HOST_BITS_PER_CHAR)))
4852 #define BITARRAY_SET(ARRAY, INDEX) \
4853 ((ARRAY)[(unsigned) (INDEX) / HOST_BITS_PER_CHAR]\
4854 |= 1 << ((unsigned) (INDEX) % HOST_BITS_PER_CHAR))
4856 /* Set the elements of the bitstring CASES_SEEN (which has length COUNT),
4857 with the case values we have seen, assuming the case expression
4858 has the given TYPE.
4859 SPARSENESS is as determined by all_cases_count.
4861 The time needed is proportional to COUNT, unless
4862 SPARSENESS is 2, in which case quadratic time is needed. */
4864 void
4865 mark_seen_cases (tree type, unsigned char *cases_seen, HOST_WIDE_INT count,
4866 int sparseness)
4868 tree next_node_to_try = NULL_TREE;
4869 HOST_WIDE_INT next_node_offset = 0;
4871 struct case_node *n, *root = case_stack->data.case_stmt.case_list;
4872 tree val = make_node (INTEGER_CST);
4874 TREE_TYPE (val) = type;
4875 if (! root)
4876 /* Do nothing. */
4878 else if (sparseness == 2)
4880 tree t;
4881 unsigned HOST_WIDE_INT xlo;
4883 /* This less efficient loop is only needed to handle
4884 duplicate case values (multiple enum constants
4885 with the same value). */
4886 TREE_TYPE (val) = TREE_TYPE (root->low);
4887 for (t = TYPE_VALUES (type), xlo = 0; t != NULL_TREE;
4888 t = TREE_CHAIN (t), xlo++)
4890 TREE_INT_CST_LOW (val) = TREE_INT_CST_LOW (TREE_VALUE (t));
4891 TREE_INT_CST_HIGH (val) = TREE_INT_CST_HIGH (TREE_VALUE (t));
4892 n = root;
4895 /* Keep going past elements distinctly greater than VAL. */
4896 if (tree_int_cst_lt (val, n->low))
4897 n = n->left;
4899 /* or distinctly less than VAL. */
4900 else if (tree_int_cst_lt (n->high, val))
4901 n = n->right;
4903 else
4905 /* We have found a matching range. */
4906 BITARRAY_SET (cases_seen, xlo);
4907 break;
4910 while (n);
4913 else
4915 if (root->left)
4916 case_stack->data.case_stmt.case_list = root = case_tree2list (root, 0);
4918 for (n = root; n; n = n->right)
4920 TREE_INT_CST_LOW (val) = TREE_INT_CST_LOW (n->low);
4921 TREE_INT_CST_HIGH (val) = TREE_INT_CST_HIGH (n->low);
4922 while (! tree_int_cst_lt (n->high, val))
4924 /* Calculate (into xlo) the "offset" of the integer (val).
4925 The element with lowest value has offset 0, the next smallest
4926 element has offset 1, etc. */
4928 unsigned HOST_WIDE_INT xlo;
4929 HOST_WIDE_INT xhi;
4930 tree t;
4932 if (sparseness && TYPE_VALUES (type) != NULL_TREE)
4934 /* The TYPE_VALUES will be in increasing order, so
4935 starting searching where we last ended. */
4936 t = next_node_to_try;
4937 xlo = next_node_offset;
4938 xhi = 0;
4939 for (;;)
4941 if (t == NULL_TREE)
4943 t = TYPE_VALUES (type);
4944 xlo = 0;
4946 if (tree_int_cst_equal (val, TREE_VALUE (t)))
4948 next_node_to_try = TREE_CHAIN (t);
4949 next_node_offset = xlo + 1;
4950 break;
4952 xlo++;
4953 t = TREE_CHAIN (t);
4954 if (t == next_node_to_try)
4956 xlo = -1;
4957 break;
4961 else
4963 t = TYPE_MIN_VALUE (type);
4964 if (t)
4965 neg_double (TREE_INT_CST_LOW (t), TREE_INT_CST_HIGH (t),
4966 &xlo, &xhi);
4967 else
4968 xlo = xhi = 0;
4969 add_double (xlo, xhi,
4970 TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
4971 &xlo, &xhi);
4974 if (xhi == 0 && xlo < (unsigned HOST_WIDE_INT) count)
4975 BITARRAY_SET (cases_seen, xlo);
4977 add_double (TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
4978 1, 0,
4979 &TREE_INT_CST_LOW (val), &TREE_INT_CST_HIGH (val));
4985 /* Given a switch statement with an expression that is an enumeration
4986 type, warn if any of the enumeration type's literals are not
4987 covered by the case expressions of the switch. Also, warn if there
4988 are any extra switch cases that are *not* elements of the
4989 enumerated type.
4991 Historical note:
4993 At one stage this function would: ``If all enumeration literals
4994 were covered by the case expressions, turn one of the expressions
4995 into the default expression since it should not be possible to fall
4996 through such a switch.''
4998 That code has since been removed as: ``This optimization is
4999 disabled because it causes valid programs to fail. ANSI C does not
5000 guarantee that an expression with enum type will have a value that
5001 is the same as one of the enumeration literals.'' */
5003 void
5004 check_for_full_enumeration_handling (tree type)
5006 struct case_node *n;
5007 tree chain;
5009 /* True iff the selector type is a numbered set mode. */
5010 int sparseness = 0;
5012 /* The number of possible selector values. */
5013 HOST_WIDE_INT size;
5015 /* For each possible selector value. a one iff it has been matched
5016 by a case value alternative. */
5017 unsigned char *cases_seen;
5019 /* The allocated size of cases_seen, in chars. */
5020 HOST_WIDE_INT bytes_needed;
5022 size = all_cases_count (type, &sparseness);
5023 bytes_needed = (size + HOST_BITS_PER_CHAR) / HOST_BITS_PER_CHAR;
5025 if (size > 0 && size < 600000
5026 /* We deliberately use calloc here, not cmalloc, so that we can suppress
5027 this optimization if we don't have enough memory rather than
5028 aborting, as xmalloc would do. */
5029 && (cases_seen =
5030 (unsigned char *) really_call_calloc (bytes_needed, 1)) != NULL)
5032 HOST_WIDE_INT i;
5033 tree v = TYPE_VALUES (type);
5035 /* The time complexity of this code is normally O(N), where
5036 N being the number of members in the enumerated type.
5037 However, if type is an ENUMERAL_TYPE whose values do not
5038 increase monotonically, O(N*log(N)) time may be needed. */
5040 mark_seen_cases (type, cases_seen, size, sparseness);
5042 for (i = 0; v != NULL_TREE && i < size; i++, v = TREE_CHAIN (v))
5043 if (BITARRAY_TEST (cases_seen, i) == 0)
5044 warning ("enumeration value `%s' not handled in switch",
5045 IDENTIFIER_POINTER (TREE_PURPOSE (v)));
5047 free (cases_seen);
5050 /* Now we go the other way around; we warn if there are case
5051 expressions that don't correspond to enumerators. This can
5052 occur since C and C++ don't enforce type-checking of
5053 assignments to enumeration variables. */
5055 if (case_stack->data.case_stmt.case_list
5056 && case_stack->data.case_stmt.case_list->left)
5057 case_stack->data.case_stmt.case_list
5058 = case_tree2list (case_stack->data.case_stmt.case_list, 0);
5059 for (n = case_stack->data.case_stmt.case_list; n; n = n->right)
5061 for (chain = TYPE_VALUES (type);
5062 chain && !tree_int_cst_equal (n->low, TREE_VALUE (chain));
5063 chain = TREE_CHAIN (chain))
5066 if (!chain)
5068 if (TYPE_NAME (type) == 0)
5069 warning ("case value `%ld' not in enumerated type",
5070 (long) TREE_INT_CST_LOW (n->low));
5071 else
5072 warning ("case value `%ld' not in enumerated type `%s'",
5073 (long) TREE_INT_CST_LOW (n->low),
5074 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
5075 == IDENTIFIER_NODE)
5076 ? TYPE_NAME (type)
5077 : DECL_NAME (TYPE_NAME (type))));
5079 if (!tree_int_cst_equal (n->low, n->high))
5081 for (chain = TYPE_VALUES (type);
5082 chain && !tree_int_cst_equal (n->high, TREE_VALUE (chain));
5083 chain = TREE_CHAIN (chain))
5086 if (!chain)
5088 if (TYPE_NAME (type) == 0)
5089 warning ("case value `%ld' not in enumerated type",
5090 (long) TREE_INT_CST_LOW (n->high));
5091 else
5092 warning ("case value `%ld' not in enumerated type `%s'",
5093 (long) TREE_INT_CST_LOW (n->high),
5094 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
5095 == IDENTIFIER_NODE)
5096 ? TYPE_NAME (type)
5097 : DECL_NAME (TYPE_NAME (type))));
5104 /* Maximum number of case bit tests. */
5105 #define MAX_CASE_BIT_TESTS 3
5107 /* By default, enable case bit tests on targets with ashlsi3. */
5108 #ifndef CASE_USE_BIT_TESTS
5109 #define CASE_USE_BIT_TESTS (ashl_optab->handlers[word_mode].insn_code \
5110 != CODE_FOR_nothing)
5111 #endif
5114 /* A case_bit_test represents a set of case nodes that may be
5115 selected from using a bit-wise comparison. HI and LO hold
5116 the integer to be tested against, LABEL contains the label
5117 to jump to upon success and BITS counts the number of case
5118 nodes handled by this test, typically the number of bits
5119 set in HI:LO. */
5121 struct case_bit_test
5123 HOST_WIDE_INT hi;
5124 HOST_WIDE_INT lo;
5125 rtx label;
5126 int bits;
5129 /* Determine whether "1 << x" is relatively cheap in word_mode. */
5131 static
5132 bool lshift_cheap_p (void)
5134 static bool init = false;
5135 static bool cheap = true;
5137 if (!init)
5139 rtx reg = gen_rtx_REG (word_mode, 10000);
5140 int cost = rtx_cost (gen_rtx_ASHIFT (word_mode, const1_rtx, reg), SET);
5141 cheap = cost < COSTS_N_INSNS (3);
5142 init = true;
5145 return cheap;
5148 /* Comparison function for qsort to order bit tests by decreasing
5149 number of case nodes, i.e. the node with the most cases gets
5150 tested first. */
5152 static
5153 int case_bit_test_cmp (const void *p1, const void *p2)
5155 const struct case_bit_test *d1 = p1;
5156 const struct case_bit_test *d2 = p2;
5158 return d2->bits - d1->bits;
5161 /* Expand a switch statement by a short sequence of bit-wise
5162 comparisons. "switch(x)" is effectively converted into
5163 "if ((1 << (x-MINVAL)) & CST)" where CST and MINVAL are
5164 integer constants.
5166 INDEX_EXPR is the value being switched on, which is of
5167 type INDEX_TYPE. MINVAL is the lowest case value of in
5168 the case nodes, of INDEX_TYPE type, and RANGE is highest
5169 value minus MINVAL, also of type INDEX_TYPE. NODES is
5170 the set of case nodes, and DEFAULT_LABEL is the label to
5171 branch to should none of the cases match.
5173 There *MUST* be MAX_CASE_BIT_TESTS or less unique case
5174 node targets. */
5176 static void
5177 emit_case_bit_tests (tree index_type, tree index_expr, tree minval,
5178 tree range, case_node_ptr nodes, rtx default_label)
5180 struct case_bit_test test[MAX_CASE_BIT_TESTS];
5181 enum machine_mode mode;
5182 rtx expr, index, label;
5183 unsigned int i,j,lo,hi;
5184 struct case_node *n;
5185 unsigned int count;
5187 count = 0;
5188 for (n = nodes; n; n = n->right)
5190 label = label_rtx (n->code_label);
5191 for (i = 0; i < count; i++)
5192 if (same_case_target_p (label, test[i].label))
5193 break;
5195 if (i == count)
5197 if (count >= MAX_CASE_BIT_TESTS)
5198 abort ();
5199 test[i].hi = 0;
5200 test[i].lo = 0;
5201 test[i].label = label;
5202 test[i].bits = 1;
5203 count++;
5205 else
5206 test[i].bits++;
5208 lo = tree_low_cst (fold (build (MINUS_EXPR, index_type,
5209 n->low, minval)), 1);
5210 hi = tree_low_cst (fold (build (MINUS_EXPR, index_type,
5211 n->high, minval)), 1);
5212 for (j = lo; j <= hi; j++)
5213 if (j >= HOST_BITS_PER_WIDE_INT)
5214 test[i].hi |= (HOST_WIDE_INT) 1 << (j - HOST_BITS_PER_INT);
5215 else
5216 test[i].lo |= (HOST_WIDE_INT) 1 << j;
5219 qsort (test, count, sizeof(*test), case_bit_test_cmp);
5221 index_expr = fold (build (MINUS_EXPR, index_type,
5222 convert (index_type, index_expr),
5223 convert (index_type, minval)));
5224 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5225 emit_queue ();
5226 index = protect_from_queue (index, 0);
5227 do_pending_stack_adjust ();
5229 mode = TYPE_MODE (index_type);
5230 expr = expand_expr (range, NULL_RTX, VOIDmode, 0);
5231 emit_cmp_and_jump_insns (index, expr, GTU, NULL_RTX, mode, 1,
5232 default_label);
5234 index = convert_to_mode (word_mode, index, 0);
5235 index = expand_binop (word_mode, ashl_optab, const1_rtx,
5236 index, NULL_RTX, 1, OPTAB_WIDEN);
5238 for (i = 0; i < count; i++)
5240 expr = immed_double_const (test[i].lo, test[i].hi, word_mode);
5241 expr = expand_binop (word_mode, and_optab, index, expr,
5242 NULL_RTX, 1, OPTAB_WIDEN);
5243 emit_cmp_and_jump_insns (expr, const0_rtx, NE, NULL_RTX,
5244 word_mode, 1, test[i].label);
5247 emit_jump (default_label);
5250 /* Terminate a case (Pascal) or switch (C) statement
5251 in which ORIG_INDEX is the expression to be tested.
5252 If ORIG_TYPE is not NULL, it is the original ORIG_INDEX
5253 type as given in the source before any compiler conversions.
5254 Generate the code to test it and jump to the right place. */
5256 void
5257 expand_end_case_type (tree orig_index, tree orig_type)
5259 tree minval = NULL_TREE, maxval = NULL_TREE, range = NULL_TREE;
5260 rtx default_label = 0;
5261 struct case_node *n, *m;
5262 unsigned int count, uniq;
5263 rtx index;
5264 rtx table_label;
5265 int ncases;
5266 rtx *labelvec;
5267 int i;
5268 rtx before_case, end, lab;
5269 struct nesting *thiscase = case_stack;
5270 tree index_expr, index_type;
5271 bool exit_done = false;
5272 int unsignedp;
5274 /* Don't crash due to previous errors. */
5275 if (thiscase == NULL)
5276 return;
5278 index_expr = thiscase->data.case_stmt.index_expr;
5279 index_type = TREE_TYPE (index_expr);
5280 unsignedp = TREE_UNSIGNED (index_type);
5281 if (orig_type == NULL)
5282 orig_type = TREE_TYPE (orig_index);
5284 do_pending_stack_adjust ();
5286 /* This might get a spurious warning in the presence of a syntax error;
5287 it could be fixed by moving the call to check_seenlabel after the
5288 check for error_mark_node, and copying the code of check_seenlabel that
5289 deals with case_stack->data.case_stmt.line_number_status /
5290 restore_line_number_status in front of the call to end_cleanup_deferral;
5291 However, this might miss some useful warnings in the presence of
5292 non-syntax errors. */
5293 check_seenlabel ();
5295 /* An ERROR_MARK occurs for various reasons including invalid data type. */
5296 if (index_type != error_mark_node)
5298 /* If the switch expression was an enumerated type, check that
5299 exactly all enumeration literals are covered by the cases.
5300 The check is made when -Wswitch was specified and there is no
5301 default case, or when -Wswitch-enum was specified. */
5302 if (((warn_switch && !thiscase->data.case_stmt.default_label)
5303 || warn_switch_enum)
5304 && TREE_CODE (orig_type) == ENUMERAL_TYPE
5305 && TREE_CODE (index_expr) != INTEGER_CST)
5306 check_for_full_enumeration_handling (orig_type);
5308 if (warn_switch_default && !thiscase->data.case_stmt.default_label)
5309 warning ("switch missing default case");
5311 /* If we don't have a default-label, create one here,
5312 after the body of the switch. */
5313 if (thiscase->data.case_stmt.default_label == 0)
5315 thiscase->data.case_stmt.default_label
5316 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
5317 /* Share the exit label if possible. */
5318 if (thiscase->exit_label)
5320 SET_DECL_RTL (thiscase->data.case_stmt.default_label,
5321 thiscase->exit_label);
5322 exit_done = true;
5324 expand_label (thiscase->data.case_stmt.default_label);
5326 default_label = label_rtx (thiscase->data.case_stmt.default_label);
5328 before_case = get_last_insn ();
5330 if (thiscase->data.case_stmt.case_list
5331 && thiscase->data.case_stmt.case_list->left)
5332 thiscase->data.case_stmt.case_list
5333 = case_tree2list (thiscase->data.case_stmt.case_list, 0);
5335 /* Simplify the case-list before we count it. */
5336 group_case_nodes (thiscase->data.case_stmt.case_list);
5337 strip_default_case_nodes (&thiscase->data.case_stmt.case_list,
5338 default_label);
5340 /* Get upper and lower bounds of case values.
5341 Also convert all the case values to the index expr's data type. */
5343 uniq = 0;
5344 count = 0;
5345 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5347 /* Check low and high label values are integers. */
5348 if (TREE_CODE (n->low) != INTEGER_CST)
5349 abort ();
5350 if (TREE_CODE (n->high) != INTEGER_CST)
5351 abort ();
5353 n->low = convert (index_type, n->low);
5354 n->high = convert (index_type, n->high);
5356 /* Count the elements and track the largest and smallest
5357 of them (treating them as signed even if they are not). */
5358 if (count++ == 0)
5360 minval = n->low;
5361 maxval = n->high;
5363 else
5365 if (INT_CST_LT (n->low, minval))
5366 minval = n->low;
5367 if (INT_CST_LT (maxval, n->high))
5368 maxval = n->high;
5370 /* A range counts double, since it requires two compares. */
5371 if (! tree_int_cst_equal (n->low, n->high))
5372 count++;
5374 /* Count the number of unique case node targets. */
5375 uniq++;
5376 lab = label_rtx (n->code_label);
5377 for (m = thiscase->data.case_stmt.case_list; m != n; m = m->right)
5378 if (same_case_target_p (label_rtx (m->code_label), lab))
5380 uniq--;
5381 break;
5385 /* Compute span of values. */
5386 if (count != 0)
5387 range = fold (build (MINUS_EXPR, index_type, maxval, minval));
5389 end_cleanup_deferral ();
5391 if (count == 0)
5393 expand_expr (index_expr, const0_rtx, VOIDmode, 0);
5394 emit_queue ();
5395 emit_jump (default_label);
5398 /* Try implementing this switch statement by a short sequence of
5399 bit-wise comparisons. However, we let the binary-tree case
5400 below handle constant index expressions. */
5401 else if (CASE_USE_BIT_TESTS
5402 && ! TREE_CONSTANT (index_expr)
5403 && compare_tree_int (range, GET_MODE_BITSIZE (word_mode)) < 0
5404 && compare_tree_int (range, 0) > 0
5405 && lshift_cheap_p ()
5406 && ((uniq == 1 && count >= 3)
5407 || (uniq == 2 && count >= 5)
5408 || (uniq == 3 && count >= 6)))
5410 /* Optimize the case where all the case values fit in a
5411 word without having to subtract MINVAL. In this case,
5412 we can optimize away the subtraction. */
5413 if (compare_tree_int (minval, 0) > 0
5414 && compare_tree_int (maxval, GET_MODE_BITSIZE (word_mode)) < 0)
5416 minval = integer_zero_node;
5417 range = maxval;
5419 emit_case_bit_tests (index_type, index_expr, minval, range,
5420 thiscase->data.case_stmt.case_list,
5421 default_label);
5424 /* If range of values is much bigger than number of values,
5425 make a sequence of conditional branches instead of a dispatch.
5426 If the switch-index is a constant, do it this way
5427 because we can optimize it. */
5429 else if (count < case_values_threshold ()
5430 || compare_tree_int (range, 10 * count) > 0
5431 /* RANGE may be signed, and really large ranges will show up
5432 as negative numbers. */
5433 || compare_tree_int (range, 0) < 0
5434 #ifndef ASM_OUTPUT_ADDR_DIFF_ELT
5435 || flag_pic
5436 #endif
5437 || TREE_CONSTANT (index_expr))
5439 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5441 /* If the index is a short or char that we do not have
5442 an insn to handle comparisons directly, convert it to
5443 a full integer now, rather than letting each comparison
5444 generate the conversion. */
5446 if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT
5447 && ! have_insn_for (COMPARE, GET_MODE (index)))
5449 enum machine_mode wider_mode;
5450 for (wider_mode = GET_MODE (index); wider_mode != VOIDmode;
5451 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
5452 if (have_insn_for (COMPARE, wider_mode))
5454 index = convert_to_mode (wider_mode, index, unsignedp);
5455 break;
5459 emit_queue ();
5460 do_pending_stack_adjust ();
5462 index = protect_from_queue (index, 0);
5463 if (GET_CODE (index) == MEM)
5464 index = copy_to_reg (index);
5465 if (GET_CODE (index) == CONST_INT
5466 || TREE_CODE (index_expr) == INTEGER_CST)
5468 /* Make a tree node with the proper constant value
5469 if we don't already have one. */
5470 if (TREE_CODE (index_expr) != INTEGER_CST)
5472 index_expr
5473 = build_int_2 (INTVAL (index),
5474 unsignedp || INTVAL (index) >= 0 ? 0 : -1);
5475 index_expr = convert (index_type, index_expr);
5478 /* For constant index expressions we need only
5479 issue an unconditional branch to the appropriate
5480 target code. The job of removing any unreachable
5481 code is left to the optimization phase if the
5482 "-O" option is specified. */
5483 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5484 if (! tree_int_cst_lt (index_expr, n->low)
5485 && ! tree_int_cst_lt (n->high, index_expr))
5486 break;
5488 if (n)
5489 emit_jump (label_rtx (n->code_label));
5490 else
5491 emit_jump (default_label);
5493 else
5495 /* If the index expression is not constant we generate
5496 a binary decision tree to select the appropriate
5497 target code. This is done as follows:
5499 The list of cases is rearranged into a binary tree,
5500 nearly optimal assuming equal probability for each case.
5502 The tree is transformed into RTL, eliminating
5503 redundant test conditions at the same time.
5505 If program flow could reach the end of the
5506 decision tree an unconditional jump to the
5507 default code is emitted. */
5509 use_cost_table
5510 = (TREE_CODE (orig_type) != ENUMERAL_TYPE
5511 && estimate_case_costs (thiscase->data.case_stmt.case_list));
5512 balance_case_nodes (&thiscase->data.case_stmt.case_list, NULL);
5513 emit_case_nodes (index, thiscase->data.case_stmt.case_list,
5514 default_label, index_type);
5515 emit_jump_if_reachable (default_label);
5518 else
5520 table_label = gen_label_rtx ();
5521 if (! try_casesi (index_type, index_expr, minval, range,
5522 table_label, default_label))
5524 index_type = thiscase->data.case_stmt.nominal_type;
5526 /* Index jumptables from zero for suitable values of
5527 minval to avoid a subtraction. */
5528 if (! optimize_size
5529 && compare_tree_int (minval, 0) > 0
5530 && compare_tree_int (minval, 3) < 0)
5532 minval = integer_zero_node;
5533 range = maxval;
5536 if (! try_tablejump (index_type, index_expr, minval, range,
5537 table_label, default_label))
5538 abort ();
5541 /* Get table of labels to jump to, in order of case index. */
5543 ncases = tree_low_cst (range, 0) + 1;
5544 labelvec = (rtx *) alloca (ncases * sizeof (rtx));
5545 memset ((char *) labelvec, 0, ncases * sizeof (rtx));
5547 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5549 /* Compute the low and high bounds relative to the minimum
5550 value since that should fit in a HOST_WIDE_INT while the
5551 actual values may not. */
5552 HOST_WIDE_INT i_low
5553 = tree_low_cst (fold (build (MINUS_EXPR, index_type,
5554 n->low, minval)), 1);
5555 HOST_WIDE_INT i_high
5556 = tree_low_cst (fold (build (MINUS_EXPR, index_type,
5557 n->high, minval)), 1);
5558 HOST_WIDE_INT i;
5560 for (i = i_low; i <= i_high; i ++)
5561 labelvec[i]
5562 = gen_rtx_LABEL_REF (Pmode, label_rtx (n->code_label));
5565 /* Fill in the gaps with the default. */
5566 for (i = 0; i < ncases; i++)
5567 if (labelvec[i] == 0)
5568 labelvec[i] = gen_rtx_LABEL_REF (Pmode, default_label);
5570 /* Output the table. */
5571 emit_label (table_label);
5573 if (CASE_VECTOR_PC_RELATIVE || flag_pic)
5574 emit_jump_insn (gen_rtx_ADDR_DIFF_VEC (CASE_VECTOR_MODE,
5575 gen_rtx_LABEL_REF (Pmode, table_label),
5576 gen_rtvec_v (ncases, labelvec),
5577 const0_rtx, const0_rtx));
5578 else
5579 emit_jump_insn (gen_rtx_ADDR_VEC (CASE_VECTOR_MODE,
5580 gen_rtvec_v (ncases, labelvec)));
5582 /* If the case insn drops through the table,
5583 after the table we must jump to the default-label.
5584 Otherwise record no drop-through after the table. */
5585 #ifdef CASE_DROPS_THROUGH
5586 emit_jump (default_label);
5587 #else
5588 emit_barrier ();
5589 #endif
5592 before_case = NEXT_INSN (before_case);
5593 end = get_last_insn ();
5594 if (squeeze_notes (&before_case, &end))
5595 abort ();
5596 reorder_insns (before_case, end,
5597 thiscase->data.case_stmt.start);
5599 else
5600 end_cleanup_deferral ();
5602 if (thiscase->exit_label && !exit_done)
5603 emit_label (thiscase->exit_label);
5605 POPSTACK (case_stack);
5607 free_temp_slots ();
5610 /* Convert the tree NODE into a list linked by the right field, with the left
5611 field zeroed. RIGHT is used for recursion; it is a list to be placed
5612 rightmost in the resulting list. */
5614 static struct case_node *
5615 case_tree2list (struct case_node *node, struct case_node *right)
5617 struct case_node *left;
5619 if (node->right)
5620 right = case_tree2list (node->right, right);
5622 node->right = right;
5623 if ((left = node->left))
5625 node->left = 0;
5626 return case_tree2list (left, node);
5629 return node;
5632 /* Generate code to jump to LABEL if OP1 and OP2 are equal. */
5634 static void
5635 do_jump_if_equal (rtx op1, rtx op2, rtx label, int unsignedp)
5637 if (GET_CODE (op1) == CONST_INT && GET_CODE (op2) == CONST_INT)
5639 if (op1 == op2)
5640 emit_jump (label);
5642 else
5643 emit_cmp_and_jump_insns (op1, op2, EQ, NULL_RTX,
5644 (GET_MODE (op1) == VOIDmode
5645 ? GET_MODE (op2) : GET_MODE (op1)),
5646 unsignedp, label);
5649 /* Not all case values are encountered equally. This function
5650 uses a heuristic to weight case labels, in cases where that
5651 looks like a reasonable thing to do.
5653 Right now, all we try to guess is text, and we establish the
5654 following weights:
5656 chars above space: 16
5657 digits: 16
5658 default: 12
5659 space, punct: 8
5660 tab: 4
5661 newline: 2
5662 other "\" chars: 1
5663 remaining chars: 0
5665 If we find any cases in the switch that are not either -1 or in the range
5666 of valid ASCII characters, or are control characters other than those
5667 commonly used with "\", don't treat this switch scanning text.
5669 Return 1 if these nodes are suitable for cost estimation, otherwise
5670 return 0. */
5672 static int
5673 estimate_case_costs (case_node_ptr node)
5675 tree min_ascii = integer_minus_one_node;
5676 tree max_ascii = convert (TREE_TYPE (node->high), build_int_2 (127, 0));
5677 case_node_ptr n;
5678 int i;
5680 /* If we haven't already made the cost table, make it now. Note that the
5681 lower bound of the table is -1, not zero. */
5683 if (! cost_table_initialized)
5685 cost_table_initialized = 1;
5687 for (i = 0; i < 128; i++)
5689 if (ISALNUM (i))
5690 COST_TABLE (i) = 16;
5691 else if (ISPUNCT (i))
5692 COST_TABLE (i) = 8;
5693 else if (ISCNTRL (i))
5694 COST_TABLE (i) = -1;
5697 COST_TABLE (' ') = 8;
5698 COST_TABLE ('\t') = 4;
5699 COST_TABLE ('\0') = 4;
5700 COST_TABLE ('\n') = 2;
5701 COST_TABLE ('\f') = 1;
5702 COST_TABLE ('\v') = 1;
5703 COST_TABLE ('\b') = 1;
5706 /* See if all the case expressions look like text. It is text if the
5707 constant is >= -1 and the highest constant is <= 127. Do all comparisons
5708 as signed arithmetic since we don't want to ever access cost_table with a
5709 value less than -1. Also check that none of the constants in a range
5710 are strange control characters. */
5712 for (n = node; n; n = n->right)
5714 if ((INT_CST_LT (n->low, min_ascii)) || INT_CST_LT (max_ascii, n->high))
5715 return 0;
5717 for (i = (HOST_WIDE_INT) TREE_INT_CST_LOW (n->low);
5718 i <= (HOST_WIDE_INT) TREE_INT_CST_LOW (n->high); i++)
5719 if (COST_TABLE (i) < 0)
5720 return 0;
5723 /* All interesting values are within the range of interesting
5724 ASCII characters. */
5725 return 1;
5728 /* Determine whether two case labels branch to the same target. */
5730 static bool
5731 same_case_target_p (rtx l1, rtx l2)
5733 rtx i1, i2;
5735 if (l1 == l2)
5736 return true;
5738 i1 = next_real_insn (l1);
5739 i2 = next_real_insn (l2);
5740 if (i1 == i2)
5741 return true;
5743 if (i1 && simplejump_p (i1))
5745 l1 = XEXP (SET_SRC (PATTERN (i1)), 0);
5748 if (i2 && simplejump_p (i2))
5750 l2 = XEXP (SET_SRC (PATTERN (i2)), 0);
5752 return l1 == l2;
5755 /* Delete nodes that branch to the default label from a list of
5756 case nodes. Eg. case 5: default: becomes just default: */
5758 static void
5759 strip_default_case_nodes (case_node_ptr *prev, rtx deflab)
5761 case_node_ptr ptr;
5763 while (*prev)
5765 ptr = *prev;
5766 if (same_case_target_p (label_rtx (ptr->code_label), deflab))
5767 *prev = ptr->right;
5768 else
5769 prev = &ptr->right;
5773 /* Scan an ordered list of case nodes
5774 combining those with consecutive values or ranges.
5776 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
5778 static void
5779 group_case_nodes (case_node_ptr head)
5781 case_node_ptr node = head;
5783 while (node)
5785 rtx lab = label_rtx (node->code_label);
5786 case_node_ptr np = node;
5788 /* Try to group the successors of NODE with NODE. */
5789 while (((np = np->right) != 0)
5790 /* Do they jump to the same place? */
5791 && same_case_target_p (label_rtx (np->code_label), lab)
5792 /* Are their ranges consecutive? */
5793 && tree_int_cst_equal (np->low,
5794 fold (build (PLUS_EXPR,
5795 TREE_TYPE (node->high),
5796 node->high,
5797 integer_one_node)))
5798 /* An overflow is not consecutive. */
5799 && tree_int_cst_lt (node->high,
5800 fold (build (PLUS_EXPR,
5801 TREE_TYPE (node->high),
5802 node->high,
5803 integer_one_node))))
5805 node->high = np->high;
5807 /* NP is the first node after NODE which can't be grouped with it.
5808 Delete the nodes in between, and move on to that node. */
5809 node->right = np;
5810 node = np;
5814 /* Take an ordered list of case nodes
5815 and transform them into a near optimal binary tree,
5816 on the assumption that any target code selection value is as
5817 likely as any other.
5819 The transformation is performed by splitting the ordered
5820 list into two equal sections plus a pivot. The parts are
5821 then attached to the pivot as left and right branches. Each
5822 branch is then transformed recursively. */
5824 static void
5825 balance_case_nodes (case_node_ptr *head, case_node_ptr parent)
5827 case_node_ptr np;
5829 np = *head;
5830 if (np)
5832 int cost = 0;
5833 int i = 0;
5834 int ranges = 0;
5835 case_node_ptr *npp;
5836 case_node_ptr left;
5838 /* Count the number of entries on branch. Also count the ranges. */
5840 while (np)
5842 if (!tree_int_cst_equal (np->low, np->high))
5844 ranges++;
5845 if (use_cost_table)
5846 cost += COST_TABLE (TREE_INT_CST_LOW (np->high));
5849 if (use_cost_table)
5850 cost += COST_TABLE (TREE_INT_CST_LOW (np->low));
5852 i++;
5853 np = np->right;
5856 if (i > 2)
5858 /* Split this list if it is long enough for that to help. */
5859 npp = head;
5860 left = *npp;
5861 if (use_cost_table)
5863 /* Find the place in the list that bisects the list's total cost,
5864 Here I gets half the total cost. */
5865 int n_moved = 0;
5866 i = (cost + 1) / 2;
5867 while (1)
5869 /* Skip nodes while their cost does not reach that amount. */
5870 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5871 i -= COST_TABLE (TREE_INT_CST_LOW ((*npp)->high));
5872 i -= COST_TABLE (TREE_INT_CST_LOW ((*npp)->low));
5873 if (i <= 0)
5874 break;
5875 npp = &(*npp)->right;
5876 n_moved += 1;
5878 if (n_moved == 0)
5880 /* Leave this branch lopsided, but optimize left-hand
5881 side and fill in `parent' fields for right-hand side. */
5882 np = *head;
5883 np->parent = parent;
5884 balance_case_nodes (&np->left, np);
5885 for (; np->right; np = np->right)
5886 np->right->parent = np;
5887 return;
5890 /* If there are just three nodes, split at the middle one. */
5891 else if (i == 3)
5892 npp = &(*npp)->right;
5893 else
5895 /* Find the place in the list that bisects the list's total cost,
5896 where ranges count as 2.
5897 Here I gets half the total cost. */
5898 i = (i + ranges + 1) / 2;
5899 while (1)
5901 /* Skip nodes while their cost does not reach that amount. */
5902 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5903 i--;
5904 i--;
5905 if (i <= 0)
5906 break;
5907 npp = &(*npp)->right;
5910 *head = np = *npp;
5911 *npp = 0;
5912 np->parent = parent;
5913 np->left = left;
5915 /* Optimize each of the two split parts. */
5916 balance_case_nodes (&np->left, np);
5917 balance_case_nodes (&np->right, np);
5919 else
5921 /* Else leave this branch as one level,
5922 but fill in `parent' fields. */
5923 np = *head;
5924 np->parent = parent;
5925 for (; np->right; np = np->right)
5926 np->right->parent = np;
5931 /* Search the parent sections of the case node tree
5932 to see if a test for the lower bound of NODE would be redundant.
5933 INDEX_TYPE is the type of the index expression.
5935 The instructions to generate the case decision tree are
5936 output in the same order as nodes are processed so it is
5937 known that if a parent node checks the range of the current
5938 node minus one that the current node is bounded at its lower
5939 span. Thus the test would be redundant. */
5941 static int
5942 node_has_low_bound (case_node_ptr node, tree index_type)
5944 tree low_minus_one;
5945 case_node_ptr pnode;
5947 /* If the lower bound of this node is the lowest value in the index type,
5948 we need not test it. */
5950 if (tree_int_cst_equal (node->low, TYPE_MIN_VALUE (index_type)))
5951 return 1;
5953 /* If this node has a left branch, the value at the left must be less
5954 than that at this node, so it cannot be bounded at the bottom and
5955 we need not bother testing any further. */
5957 if (node->left)
5958 return 0;
5960 low_minus_one = fold (build (MINUS_EXPR, TREE_TYPE (node->low),
5961 node->low, integer_one_node));
5963 /* If the subtraction above overflowed, we can't verify anything.
5964 Otherwise, look for a parent that tests our value - 1. */
5966 if (! tree_int_cst_lt (low_minus_one, node->low))
5967 return 0;
5969 for (pnode = node->parent; pnode; pnode = pnode->parent)
5970 if (tree_int_cst_equal (low_minus_one, pnode->high))
5971 return 1;
5973 return 0;
5976 /* Search the parent sections of the case node tree
5977 to see if a test for the upper bound of NODE would be redundant.
5978 INDEX_TYPE is the type of the index expression.
5980 The instructions to generate the case decision tree are
5981 output in the same order as nodes are processed so it is
5982 known that if a parent node checks the range of the current
5983 node plus one that the current node is bounded at its upper
5984 span. Thus the test would be redundant. */
5986 static int
5987 node_has_high_bound (case_node_ptr node, tree index_type)
5989 tree high_plus_one;
5990 case_node_ptr pnode;
5992 /* If there is no upper bound, obviously no test is needed. */
5994 if (TYPE_MAX_VALUE (index_type) == NULL)
5995 return 1;
5997 /* If the upper bound of this node is the highest value in the type
5998 of the index expression, we need not test against it. */
6000 if (tree_int_cst_equal (node->high, TYPE_MAX_VALUE (index_type)))
6001 return 1;
6003 /* If this node has a right branch, the value at the right must be greater
6004 than that at this node, so it cannot be bounded at the top and
6005 we need not bother testing any further. */
6007 if (node->right)
6008 return 0;
6010 high_plus_one = fold (build (PLUS_EXPR, TREE_TYPE (node->high),
6011 node->high, integer_one_node));
6013 /* If the addition above overflowed, we can't verify anything.
6014 Otherwise, look for a parent that tests our value + 1. */
6016 if (! tree_int_cst_lt (node->high, high_plus_one))
6017 return 0;
6019 for (pnode = node->parent; pnode; pnode = pnode->parent)
6020 if (tree_int_cst_equal (high_plus_one, pnode->low))
6021 return 1;
6023 return 0;
6026 /* Search the parent sections of the
6027 case node tree to see if both tests for the upper and lower
6028 bounds of NODE would be redundant. */
6030 static int
6031 node_is_bounded (case_node_ptr node, tree index_type)
6033 return (node_has_low_bound (node, index_type)
6034 && node_has_high_bound (node, index_type));
6037 /* Emit an unconditional jump to LABEL unless it would be dead code. */
6039 static void
6040 emit_jump_if_reachable (rtx label)
6042 if (GET_CODE (get_last_insn ()) != BARRIER)
6043 emit_jump (label);
6046 /* Emit step-by-step code to select a case for the value of INDEX.
6047 The thus generated decision tree follows the form of the
6048 case-node binary tree NODE, whose nodes represent test conditions.
6049 INDEX_TYPE is the type of the index of the switch.
6051 Care is taken to prune redundant tests from the decision tree
6052 by detecting any boundary conditions already checked by
6053 emitted rtx. (See node_has_high_bound, node_has_low_bound
6054 and node_is_bounded, above.)
6056 Where the test conditions can be shown to be redundant we emit
6057 an unconditional jump to the target code. As a further
6058 optimization, the subordinates of a tree node are examined to
6059 check for bounded nodes. In this case conditional and/or
6060 unconditional jumps as a result of the boundary check for the
6061 current node are arranged to target the subordinates associated
6062 code for out of bound conditions on the current node.
6064 We can assume that when control reaches the code generated here,
6065 the index value has already been compared with the parents
6066 of this node, and determined to be on the same side of each parent
6067 as this node is. Thus, if this node tests for the value 51,
6068 and a parent tested for 52, we don't need to consider
6069 the possibility of a value greater than 51. If another parent
6070 tests for the value 50, then this node need not test anything. */
6072 static void
6073 emit_case_nodes (rtx index, case_node_ptr node, rtx default_label,
6074 tree index_type)
6076 /* If INDEX has an unsigned type, we must make unsigned branches. */
6077 int unsignedp = TREE_UNSIGNED (index_type);
6078 enum machine_mode mode = GET_MODE (index);
6079 enum machine_mode imode = TYPE_MODE (index_type);
6081 /* See if our parents have already tested everything for us.
6082 If they have, emit an unconditional jump for this node. */
6083 if (node_is_bounded (node, index_type))
6084 emit_jump (label_rtx (node->code_label));
6086 else if (tree_int_cst_equal (node->low, node->high))
6088 /* Node is single valued. First see if the index expression matches
6089 this node and then check our children, if any. */
6091 do_jump_if_equal (index,
6092 convert_modes (mode, imode,
6093 expand_expr (node->low, NULL_RTX,
6094 VOIDmode, 0),
6095 unsignedp),
6096 label_rtx (node->code_label), unsignedp);
6098 if (node->right != 0 && node->left != 0)
6100 /* This node has children on both sides.
6101 Dispatch to one side or the other
6102 by comparing the index value with this node's value.
6103 If one subtree is bounded, check that one first,
6104 so we can avoid real branches in the tree. */
6106 if (node_is_bounded (node->right, index_type))
6108 emit_cmp_and_jump_insns (index,
6109 convert_modes
6110 (mode, imode,
6111 expand_expr (node->high, NULL_RTX,
6112 VOIDmode, 0),
6113 unsignedp),
6114 GT, NULL_RTX, mode, unsignedp,
6115 label_rtx (node->right->code_label));
6116 emit_case_nodes (index, node->left, default_label, index_type);
6119 else if (node_is_bounded (node->left, index_type))
6121 emit_cmp_and_jump_insns (index,
6122 convert_modes
6123 (mode, imode,
6124 expand_expr (node->high, NULL_RTX,
6125 VOIDmode, 0),
6126 unsignedp),
6127 LT, NULL_RTX, mode, unsignedp,
6128 label_rtx (node->left->code_label));
6129 emit_case_nodes (index, node->right, default_label, index_type);
6132 else
6134 /* Neither node is bounded. First distinguish the two sides;
6135 then emit the code for one side at a time. */
6137 tree test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
6139 /* See if the value is on the right. */
6140 emit_cmp_and_jump_insns (index,
6141 convert_modes
6142 (mode, imode,
6143 expand_expr (node->high, NULL_RTX,
6144 VOIDmode, 0),
6145 unsignedp),
6146 GT, NULL_RTX, mode, unsignedp,
6147 label_rtx (test_label));
6149 /* Value must be on the left.
6150 Handle the left-hand subtree. */
6151 emit_case_nodes (index, node->left, default_label, index_type);
6152 /* If left-hand subtree does nothing,
6153 go to default. */
6154 emit_jump_if_reachable (default_label);
6156 /* Code branches here for the right-hand subtree. */
6157 expand_label (test_label);
6158 emit_case_nodes (index, node->right, default_label, index_type);
6162 else if (node->right != 0 && node->left == 0)
6164 /* Here we have a right child but no left so we issue conditional
6165 branch to default and process the right child.
6167 Omit the conditional branch to default if we it avoid only one
6168 right child; it costs too much space to save so little time. */
6170 if (node->right->right || node->right->left
6171 || !tree_int_cst_equal (node->right->low, node->right->high))
6173 if (!node_has_low_bound (node, index_type))
6175 emit_cmp_and_jump_insns (index,
6176 convert_modes
6177 (mode, imode,
6178 expand_expr (node->high, NULL_RTX,
6179 VOIDmode, 0),
6180 unsignedp),
6181 LT, NULL_RTX, mode, unsignedp,
6182 default_label);
6185 emit_case_nodes (index, node->right, default_label, index_type);
6187 else
6188 /* We cannot process node->right normally
6189 since we haven't ruled out the numbers less than
6190 this node's value. So handle node->right explicitly. */
6191 do_jump_if_equal (index,
6192 convert_modes
6193 (mode, imode,
6194 expand_expr (node->right->low, NULL_RTX,
6195 VOIDmode, 0),
6196 unsignedp),
6197 label_rtx (node->right->code_label), unsignedp);
6200 else if (node->right == 0 && node->left != 0)
6202 /* Just one subtree, on the left. */
6203 if (node->left->left || node->left->right
6204 || !tree_int_cst_equal (node->left->low, node->left->high))
6206 if (!node_has_high_bound (node, index_type))
6208 emit_cmp_and_jump_insns (index,
6209 convert_modes
6210 (mode, imode,
6211 expand_expr (node->high, NULL_RTX,
6212 VOIDmode, 0),
6213 unsignedp),
6214 GT, NULL_RTX, mode, unsignedp,
6215 default_label);
6218 emit_case_nodes (index, node->left, default_label, index_type);
6220 else
6221 /* We cannot process node->left normally
6222 since we haven't ruled out the numbers less than
6223 this node's value. So handle node->left explicitly. */
6224 do_jump_if_equal (index,
6225 convert_modes
6226 (mode, imode,
6227 expand_expr (node->left->low, NULL_RTX,
6228 VOIDmode, 0),
6229 unsignedp),
6230 label_rtx (node->left->code_label), unsignedp);
6233 else
6235 /* Node is a range. These cases are very similar to those for a single
6236 value, except that we do not start by testing whether this node
6237 is the one to branch to. */
6239 if (node->right != 0 && node->left != 0)
6241 /* Node has subtrees on both sides.
6242 If the right-hand subtree is bounded,
6243 test for it first, since we can go straight there.
6244 Otherwise, we need to make a branch in the control structure,
6245 then handle the two subtrees. */
6246 tree test_label = 0;
6248 if (node_is_bounded (node->right, index_type))
6249 /* Right hand node is fully bounded so we can eliminate any
6250 testing and branch directly to the target code. */
6251 emit_cmp_and_jump_insns (index,
6252 convert_modes
6253 (mode, imode,
6254 expand_expr (node->high, NULL_RTX,
6255 VOIDmode, 0),
6256 unsignedp),
6257 GT, NULL_RTX, mode, unsignedp,
6258 label_rtx (node->right->code_label));
6259 else
6261 /* Right hand node requires testing.
6262 Branch to a label where we will handle it later. */
6264 test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
6265 emit_cmp_and_jump_insns (index,
6266 convert_modes
6267 (mode, imode,
6268 expand_expr (node->high, NULL_RTX,
6269 VOIDmode, 0),
6270 unsignedp),
6271 GT, NULL_RTX, mode, unsignedp,
6272 label_rtx (test_label));
6275 /* Value belongs to this node or to the left-hand subtree. */
6277 emit_cmp_and_jump_insns (index,
6278 convert_modes
6279 (mode, imode,
6280 expand_expr (node->low, NULL_RTX,
6281 VOIDmode, 0),
6282 unsignedp),
6283 GE, NULL_RTX, mode, unsignedp,
6284 label_rtx (node->code_label));
6286 /* Handle the left-hand subtree. */
6287 emit_case_nodes (index, node->left, default_label, index_type);
6289 /* If right node had to be handled later, do that now. */
6291 if (test_label)
6293 /* If the left-hand subtree fell through,
6294 don't let it fall into the right-hand subtree. */
6295 emit_jump_if_reachable (default_label);
6297 expand_label (test_label);
6298 emit_case_nodes (index, node->right, default_label, index_type);
6302 else if (node->right != 0 && node->left == 0)
6304 /* Deal with values to the left of this node,
6305 if they are possible. */
6306 if (!node_has_low_bound (node, index_type))
6308 emit_cmp_and_jump_insns (index,
6309 convert_modes
6310 (mode, imode,
6311 expand_expr (node->low, NULL_RTX,
6312 VOIDmode, 0),
6313 unsignedp),
6314 LT, NULL_RTX, mode, unsignedp,
6315 default_label);
6318 /* Value belongs to this node or to the right-hand subtree. */
6320 emit_cmp_and_jump_insns (index,
6321 convert_modes
6322 (mode, imode,
6323 expand_expr (node->high, NULL_RTX,
6324 VOIDmode, 0),
6325 unsignedp),
6326 LE, NULL_RTX, mode, unsignedp,
6327 label_rtx (node->code_label));
6329 emit_case_nodes (index, node->right, default_label, index_type);
6332 else if (node->right == 0 && node->left != 0)
6334 /* Deal with values to the right of this node,
6335 if they are possible. */
6336 if (!node_has_high_bound (node, index_type))
6338 emit_cmp_and_jump_insns (index,
6339 convert_modes
6340 (mode, imode,
6341 expand_expr (node->high, NULL_RTX,
6342 VOIDmode, 0),
6343 unsignedp),
6344 GT, NULL_RTX, mode, unsignedp,
6345 default_label);
6348 /* Value belongs to this node or to the left-hand subtree. */
6350 emit_cmp_and_jump_insns (index,
6351 convert_modes
6352 (mode, imode,
6353 expand_expr (node->low, NULL_RTX,
6354 VOIDmode, 0),
6355 unsignedp),
6356 GE, NULL_RTX, mode, unsignedp,
6357 label_rtx (node->code_label));
6359 emit_case_nodes (index, node->left, default_label, index_type);
6362 else
6364 /* Node has no children so we check low and high bounds to remove
6365 redundant tests. Only one of the bounds can exist,
6366 since otherwise this node is bounded--a case tested already. */
6367 int high_bound = node_has_high_bound (node, index_type);
6368 int low_bound = node_has_low_bound (node, index_type);
6370 if (!high_bound && low_bound)
6372 emit_cmp_and_jump_insns (index,
6373 convert_modes
6374 (mode, imode,
6375 expand_expr (node->high, NULL_RTX,
6376 VOIDmode, 0),
6377 unsignedp),
6378 GT, NULL_RTX, mode, unsignedp,
6379 default_label);
6382 else if (!low_bound && high_bound)
6384 emit_cmp_and_jump_insns (index,
6385 convert_modes
6386 (mode, imode,
6387 expand_expr (node->low, NULL_RTX,
6388 VOIDmode, 0),
6389 unsignedp),
6390 LT, NULL_RTX, mode, unsignedp,
6391 default_label);
6393 else if (!low_bound && !high_bound)
6395 /* Widen LOW and HIGH to the same width as INDEX. */
6396 tree type = (*lang_hooks.types.type_for_mode) (mode, unsignedp);
6397 tree low = build1 (CONVERT_EXPR, type, node->low);
6398 tree high = build1 (CONVERT_EXPR, type, node->high);
6399 rtx low_rtx, new_index, new_bound;
6401 /* Instead of doing two branches, emit one unsigned branch for
6402 (index-low) > (high-low). */
6403 low_rtx = expand_expr (low, NULL_RTX, mode, 0);
6404 new_index = expand_simple_binop (mode, MINUS, index, low_rtx,
6405 NULL_RTX, unsignedp,
6406 OPTAB_WIDEN);
6407 new_bound = expand_expr (fold (build (MINUS_EXPR, type,
6408 high, low)),
6409 NULL_RTX, mode, 0);
6411 emit_cmp_and_jump_insns (new_index, new_bound, GT, NULL_RTX,
6412 mode, 1, default_label);
6415 emit_jump (label_rtx (node->code_label));
6420 #include "gt-stmt.h"