Rename -W to -Wextra.
[official-gcc.git] / gcc / stmt.c
blobadc7114c7e2634c0ca16cccdf66b9c4c62eb5536
1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 /* This file handles the generation of rtl code from tree structure
23 above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
24 It also creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
27 The functions whose names start with `expand_' are called by the
28 parser to generate RTL instructions for various kinds of constructs.
30 Some control and binding constructs require calling several such
31 functions at different times. For example, a simple if-then
32 is expanded by calling `expand_start_cond' (with the condition-expression
33 as argument) before parsing the then-clause and calling `expand_end_cond'
34 after parsing the then-clause. */
36 #include "config.h"
37 #include "system.h"
38 #include "coretypes.h"
39 #include "tm.h"
41 #include "rtl.h"
42 #include "tree.h"
43 #include "tm_p.h"
44 #include "flags.h"
45 #include "except.h"
46 #include "function.h"
47 #include "insn-config.h"
48 #include "expr.h"
49 #include "libfuncs.h"
50 #include "hard-reg-set.h"
51 #include "loop.h"
52 #include "recog.h"
53 #include "machmode.h"
54 #include "toplev.h"
55 #include "output.h"
56 #include "ggc.h"
57 #include "langhooks.h"
58 #include "predict.h"
60 /* Assume that case vectors are not pc-relative. */
61 #ifndef CASE_VECTOR_PC_RELATIVE
62 #define CASE_VECTOR_PC_RELATIVE 0
63 #endif
65 /* Functions and data structures for expanding case statements. */
67 /* Case label structure, used to hold info on labels within case
68 statements. We handle "range" labels; for a single-value label
69 as in C, the high and low limits are the same.
71 An AVL tree of case nodes is initially created, and later transformed
72 to a list linked via the RIGHT fields in the nodes. Nodes with
73 higher case values are later in the list.
75 Switch statements can be output in one of two forms. A branch table
76 is used if there are more than a few labels and the labels are dense
77 within the range between the smallest and largest case value. If a
78 branch table is used, no further manipulations are done with the case
79 node chain.
81 The alternative to the use of a branch table is to generate a series
82 of compare and jump insns. When that is done, we use the LEFT, RIGHT,
83 and PARENT fields to hold a binary tree. Initially the tree is
84 totally unbalanced, with everything on the right. We balance the tree
85 with nodes on the left having lower case values than the parent
86 and nodes on the right having higher values. We then output the tree
87 in order. */
89 struct case_node GTY(())
91 struct case_node *left; /* Left son in binary tree */
92 struct case_node *right; /* Right son in binary tree; also node chain */
93 struct case_node *parent; /* Parent of node in binary tree */
94 tree low; /* Lowest index value for this label */
95 tree high; /* Highest index value for this label */
96 tree code_label; /* Label to jump to when node matches */
97 int balance;
100 typedef struct case_node case_node;
101 typedef struct case_node *case_node_ptr;
103 /* These are used by estimate_case_costs and balance_case_nodes. */
105 /* This must be a signed type, and non-ANSI compilers lack signed char. */
106 static short cost_table_[129];
107 static int use_cost_table;
108 static int cost_table_initialized;
110 /* Special care is needed because we allow -1, but TREE_INT_CST_LOW
111 is unsigned. */
112 #define COST_TABLE(I) cost_table_[(unsigned HOST_WIDE_INT) ((I) + 1)]
114 /* Stack of control and binding constructs we are currently inside.
116 These constructs begin when you call `expand_start_WHATEVER'
117 and end when you call `expand_end_WHATEVER'. This stack records
118 info about how the construct began that tells the end-function
119 what to do. It also may provide information about the construct
120 to alter the behavior of other constructs within the body.
121 For example, they may affect the behavior of C `break' and `continue'.
123 Each construct gets one `struct nesting' object.
124 All of these objects are chained through the `all' field.
125 `nesting_stack' points to the first object (innermost construct).
126 The position of an entry on `nesting_stack' is in its `depth' field.
128 Each type of construct has its own individual stack.
129 For example, loops have `loop_stack'. Each object points to the
130 next object of the same type through the `next' field.
132 Some constructs are visible to `break' exit-statements and others
133 are not. Which constructs are visible depends on the language.
134 Therefore, the data structure allows each construct to be visible
135 or not, according to the args given when the construct is started.
136 The construct is visible if the `exit_label' field is non-null.
137 In that case, the value should be a CODE_LABEL rtx. */
139 struct nesting GTY(())
141 struct nesting *all;
142 struct nesting *next;
143 int depth;
144 rtx exit_label;
145 enum nesting_desc {
146 COND_NESTING,
147 LOOP_NESTING,
148 BLOCK_NESTING,
149 CASE_NESTING
150 } desc;
151 union nesting_u
153 /* For conds (if-then and if-then-else statements). */
154 struct nesting_cond
156 /* Label for the end of the if construct.
157 There is none if EXITFLAG was not set
158 and no `else' has been seen yet. */
159 rtx endif_label;
160 /* Label for the end of this alternative.
161 This may be the end of the if or the next else/elseif. */
162 rtx next_label;
163 } GTY ((tag ("COND_NESTING"))) cond;
164 /* For loops. */
165 struct nesting_loop
167 /* Label at the top of the loop; place to loop back to. */
168 rtx start_label;
169 /* Label at the end of the whole construct. */
170 rtx end_label;
171 /* Label for `continue' statement to jump to;
172 this is in front of the stepper of the loop. */
173 rtx continue_label;
174 } GTY ((tag ("LOOP_NESTING"))) loop;
175 /* For variable binding contours. */
176 struct nesting_block
178 /* Sequence number of this binding contour within the function,
179 in order of entry. */
180 int block_start_count;
181 /* Nonzero => value to restore stack to on exit. */
182 rtx stack_level;
183 /* The NOTE that starts this contour.
184 Used by expand_goto to check whether the destination
185 is within each contour or not. */
186 rtx first_insn;
187 /* Innermost containing binding contour that has a stack level. */
188 struct nesting *innermost_stack_block;
189 /* List of cleanups to be run on exit from this contour.
190 This is a list of expressions to be evaluated.
191 The TREE_PURPOSE of each link is the ..._DECL node
192 which the cleanup pertains to. */
193 tree cleanups;
194 /* List of cleanup-lists of blocks containing this block,
195 as they were at the locus where this block appears.
196 There is an element for each containing block,
197 ordered innermost containing block first.
198 The tail of this list can be 0,
199 if all remaining elements would be empty lists.
200 The element's TREE_VALUE is the cleanup-list of that block,
201 which may be null. */
202 tree outer_cleanups;
203 /* Chain of labels defined inside this binding contour.
204 For contours that have stack levels or cleanups. */
205 struct label_chain *label_chain;
206 /* Number of function calls seen, as of start of this block. */
207 int n_function_calls;
208 /* Nonzero if this is associated with an EH region. */
209 int exception_region;
210 /* The saved target_temp_slot_level from our outer block.
211 We may reset target_temp_slot_level to be the level of
212 this block, if that is done, target_temp_slot_level
213 reverts to the saved target_temp_slot_level at the very
214 end of the block. */
215 int block_target_temp_slot_level;
216 /* True if we are currently emitting insns in an area of
217 output code that is controlled by a conditional
218 expression. This is used by the cleanup handling code to
219 generate conditional cleanup actions. */
220 int conditional_code;
221 /* A place to move the start of the exception region for any
222 of the conditional cleanups, must be at the end or after
223 the start of the last unconditional cleanup, and before any
224 conditional branch points. */
225 rtx last_unconditional_cleanup;
226 } GTY ((tag ("BLOCK_NESTING"))) block;
227 /* For switch (C) or case (Pascal) statements,
228 and also for dummies (see `expand_start_case_dummy'). */
229 struct nesting_case
231 /* The insn after which the case dispatch should finally
232 be emitted. Zero for a dummy. */
233 rtx start;
234 /* A list of case labels; it is first built as an AVL tree.
235 During expand_end_case, this is converted to a list, and may be
236 rearranged into a nearly balanced binary tree. */
237 struct case_node *case_list;
238 /* Label to jump to if no case matches. */
239 tree default_label;
240 /* The expression to be dispatched on. */
241 tree index_expr;
242 /* Type that INDEX_EXPR should be converted to. */
243 tree nominal_type;
244 /* Name of this kind of statement, for warnings. */
245 const char *printname;
246 /* Used to save no_line_numbers till we see the first case label.
247 We set this to -1 when we see the first case label in this
248 case statement. */
249 int line_number_status;
250 } GTY ((tag ("CASE_NESTING"))) case_stmt;
251 } GTY ((desc ("%1.desc"))) data;
254 /* Allocate and return a new `struct nesting'. */
256 #define ALLOC_NESTING() \
257 (struct nesting *) ggc_alloc (sizeof (struct nesting))
259 /* Pop the nesting stack element by element until we pop off
260 the element which is at the top of STACK.
261 Update all the other stacks, popping off elements from them
262 as we pop them from nesting_stack. */
264 #define POPSTACK(STACK) \
265 do { struct nesting *target = STACK; \
266 struct nesting *this; \
267 do { this = nesting_stack; \
268 if (loop_stack == this) \
269 loop_stack = loop_stack->next; \
270 if (cond_stack == this) \
271 cond_stack = cond_stack->next; \
272 if (block_stack == this) \
273 block_stack = block_stack->next; \
274 if (stack_block_stack == this) \
275 stack_block_stack = stack_block_stack->next; \
276 if (case_stack == this) \
277 case_stack = case_stack->next; \
278 nesting_depth = nesting_stack->depth - 1; \
279 nesting_stack = this->all; } \
280 while (this != target); } while (0)
282 /* In some cases it is impossible to generate code for a forward goto
283 until the label definition is seen. This happens when it may be necessary
284 for the goto to reset the stack pointer: we don't yet know how to do that.
285 So expand_goto puts an entry on this fixup list.
286 Each time a binding contour that resets the stack is exited,
287 we check each fixup.
288 If the target label has now been defined, we can insert the proper code. */
290 struct goto_fixup GTY(())
292 /* Points to following fixup. */
293 struct goto_fixup *next;
294 /* Points to the insn before the jump insn.
295 If more code must be inserted, it goes after this insn. */
296 rtx before_jump;
297 /* The LABEL_DECL that this jump is jumping to, or 0
298 for break, continue or return. */
299 tree target;
300 /* The BLOCK for the place where this goto was found. */
301 tree context;
302 /* The CODE_LABEL rtx that this is jumping to. */
303 rtx target_rtl;
304 /* Number of binding contours started in current function
305 before the label reference. */
306 int block_start_count;
307 /* The outermost stack level that should be restored for this jump.
308 Each time a binding contour that resets the stack is exited,
309 if the target label is *not* yet defined, this slot is updated. */
310 rtx stack_level;
311 /* List of lists of cleanup expressions to be run by this goto.
312 There is one element for each block that this goto is within.
313 The tail of this list can be 0,
314 if all remaining elements would be empty.
315 The TREE_VALUE contains the cleanup list of that block as of the
316 time this goto was seen.
317 The TREE_ADDRESSABLE flag is 1 for a block that has been exited. */
318 tree cleanup_list_list;
321 /* Within any binding contour that must restore a stack level,
322 all labels are recorded with a chain of these structures. */
324 struct label_chain GTY(())
326 /* Points to following fixup. */
327 struct label_chain *next;
328 tree label;
331 struct stmt_status GTY(())
333 /* Chain of all pending binding contours. */
334 struct nesting * x_block_stack;
336 /* If any new stacks are added here, add them to POPSTACKS too. */
338 /* Chain of all pending binding contours that restore stack levels
339 or have cleanups. */
340 struct nesting * x_stack_block_stack;
342 /* Chain of all pending conditional statements. */
343 struct nesting * x_cond_stack;
345 /* Chain of all pending loops. */
346 struct nesting * x_loop_stack;
348 /* Chain of all pending case or switch statements. */
349 struct nesting * x_case_stack;
351 /* Separate chain including all of the above,
352 chained through the `all' field. */
353 struct nesting * x_nesting_stack;
355 /* Number of entries on nesting_stack now. */
356 int x_nesting_depth;
358 /* Number of binding contours started so far in this function. */
359 int x_block_start_count;
361 /* Each time we expand an expression-statement,
362 record the expr's type and its RTL value here. */
363 tree x_last_expr_type;
364 rtx x_last_expr_value;
366 /* Nonzero if within a ({...}) grouping, in which case we must
367 always compute a value for each expr-stmt in case it is the last one. */
368 int x_expr_stmts_for_value;
370 /* Filename and line number of last line-number note,
371 whether we actually emitted it or not. */
372 const char *x_emit_filename;
373 int x_emit_lineno;
375 struct goto_fixup *x_goto_fixup_chain;
378 #define block_stack (cfun->stmt->x_block_stack)
379 #define stack_block_stack (cfun->stmt->x_stack_block_stack)
380 #define cond_stack (cfun->stmt->x_cond_stack)
381 #define loop_stack (cfun->stmt->x_loop_stack)
382 #define case_stack (cfun->stmt->x_case_stack)
383 #define nesting_stack (cfun->stmt->x_nesting_stack)
384 #define nesting_depth (cfun->stmt->x_nesting_depth)
385 #define current_block_start_count (cfun->stmt->x_block_start_count)
386 #define last_expr_type (cfun->stmt->x_last_expr_type)
387 #define last_expr_value (cfun->stmt->x_last_expr_value)
388 #define expr_stmts_for_value (cfun->stmt->x_expr_stmts_for_value)
389 #define emit_filename (cfun->stmt->x_emit_filename)
390 #define emit_lineno (cfun->stmt->x_emit_lineno)
391 #define goto_fixup_chain (cfun->stmt->x_goto_fixup_chain)
393 /* Nonzero if we are using EH to handle cleanups. */
394 static int using_eh_for_cleanups_p = 0;
396 static int n_occurrences PARAMS ((int, const char *));
397 static bool parse_input_constraint PARAMS ((const char **, int, int, int,
398 int, const char * const *,
399 bool *, bool *));
400 static bool decl_conflicts_with_clobbers_p PARAMS ((tree, const HARD_REG_SET));
401 static void expand_goto_internal PARAMS ((tree, rtx, rtx));
402 static int expand_fixup PARAMS ((tree, rtx, rtx));
403 static rtx expand_nl_handler_label PARAMS ((rtx, rtx));
404 static void expand_nl_goto_receiver PARAMS ((void));
405 static void expand_nl_goto_receivers PARAMS ((struct nesting *));
406 static void fixup_gotos PARAMS ((struct nesting *, rtx, tree,
407 rtx, int));
408 static bool check_operand_nalternatives PARAMS ((tree, tree));
409 static bool check_unique_operand_names PARAMS ((tree, tree));
410 static tree resolve_operand_names PARAMS ((tree, tree, tree,
411 const char **));
412 static char *resolve_operand_name_1 PARAMS ((char *, tree, tree));
413 static void expand_null_return_1 PARAMS ((rtx));
414 static enum br_predictor return_prediction PARAMS ((rtx));
415 static void expand_value_return PARAMS ((rtx));
416 static int tail_recursion_args PARAMS ((tree, tree));
417 static void expand_cleanups PARAMS ((tree, tree, int, int));
418 static void check_seenlabel PARAMS ((void));
419 static void do_jump_if_equal PARAMS ((rtx, rtx, rtx, int));
420 static int estimate_case_costs PARAMS ((case_node_ptr));
421 static bool same_case_target_p PARAMS ((rtx, rtx));
422 static void strip_default_case_nodes PARAMS ((case_node_ptr *, rtx));
423 static void group_case_nodes PARAMS ((case_node_ptr));
424 static void balance_case_nodes PARAMS ((case_node_ptr *,
425 case_node_ptr));
426 static int node_has_low_bound PARAMS ((case_node_ptr, tree));
427 static int node_has_high_bound PARAMS ((case_node_ptr, tree));
428 static int node_is_bounded PARAMS ((case_node_ptr, tree));
429 static void emit_jump_if_reachable PARAMS ((rtx));
430 static void emit_case_nodes PARAMS ((rtx, case_node_ptr, rtx, tree));
431 static struct case_node *case_tree2list PARAMS ((case_node *, case_node *));
433 void
434 using_eh_for_cleanups ()
436 using_eh_for_cleanups_p = 1;
439 void
440 init_stmt_for_function ()
442 cfun->stmt = ((struct stmt_status *)ggc_alloc (sizeof (struct stmt_status)));
444 /* We are not currently within any block, conditional, loop or case. */
445 block_stack = 0;
446 stack_block_stack = 0;
447 loop_stack = 0;
448 case_stack = 0;
449 cond_stack = 0;
450 nesting_stack = 0;
451 nesting_depth = 0;
453 current_block_start_count = 0;
455 /* No gotos have been expanded yet. */
456 goto_fixup_chain = 0;
458 /* We are not processing a ({...}) grouping. */
459 expr_stmts_for_value = 0;
460 clear_last_expr ();
463 /* Record the current file and line. Called from emit_line_note. */
464 void
465 set_file_and_line_for_stmt (file, line)
466 const char *file;
467 int line;
469 /* If we're outputting an inline function, and we add a line note,
470 there may be no CFUN->STMT information. So, there's no need to
471 update it. */
472 if (cfun->stmt)
474 emit_filename = file;
475 emit_lineno = line;
479 /* Emit a no-op instruction. */
481 void
482 emit_nop ()
484 rtx last_insn;
486 last_insn = get_last_insn ();
487 if (!optimize
488 && (GET_CODE (last_insn) == CODE_LABEL
489 || (GET_CODE (last_insn) == NOTE
490 && prev_real_insn (last_insn) == 0)))
491 emit_insn (gen_nop ());
494 /* Return the rtx-label that corresponds to a LABEL_DECL,
495 creating it if necessary. */
498 label_rtx (label)
499 tree label;
501 if (TREE_CODE (label) != LABEL_DECL)
502 abort ();
504 if (!DECL_RTL_SET_P (label))
505 SET_DECL_RTL (label, gen_label_rtx ());
507 return DECL_RTL (label);
511 /* Add an unconditional jump to LABEL as the next sequential instruction. */
513 void
514 emit_jump (label)
515 rtx label;
517 do_pending_stack_adjust ();
518 emit_jump_insn (gen_jump (label));
519 emit_barrier ();
522 /* Emit code to jump to the address
523 specified by the pointer expression EXP. */
525 void
526 expand_computed_goto (exp)
527 tree exp;
529 rtx x = expand_expr (exp, NULL_RTX, VOIDmode, 0);
531 #ifdef POINTERS_EXTEND_UNSIGNED
532 if (GET_MODE (x) != Pmode)
533 x = convert_memory_address (Pmode, x);
534 #endif
536 emit_queue ();
537 do_pending_stack_adjust ();
538 emit_indirect_jump (x);
540 current_function_has_computed_jump = 1;
543 /* Handle goto statements and the labels that they can go to. */
545 /* Specify the location in the RTL code of a label LABEL,
546 which is a LABEL_DECL tree node.
548 This is used for the kind of label that the user can jump to with a
549 goto statement, and for alternatives of a switch or case statement.
550 RTL labels generated for loops and conditionals don't go through here;
551 they are generated directly at the RTL level, by other functions below.
553 Note that this has nothing to do with defining label *names*.
554 Languages vary in how they do that and what that even means. */
556 void
557 expand_label (label)
558 tree label;
560 struct label_chain *p;
562 do_pending_stack_adjust ();
563 emit_label (label_rtx (label));
564 if (DECL_NAME (label))
565 LABEL_NAME (DECL_RTL (label)) = IDENTIFIER_POINTER (DECL_NAME (label));
567 if (stack_block_stack != 0)
569 p = (struct label_chain *) ggc_alloc (sizeof (struct label_chain));
570 p->next = stack_block_stack->data.block.label_chain;
571 stack_block_stack->data.block.label_chain = p;
572 p->label = label;
576 /* Declare that LABEL (a LABEL_DECL) may be used for nonlocal gotos
577 from nested functions. */
579 void
580 declare_nonlocal_label (label)
581 tree label;
583 rtx slot = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
585 nonlocal_labels = tree_cons (NULL_TREE, label, nonlocal_labels);
586 LABEL_PRESERVE_P (label_rtx (label)) = 1;
587 if (nonlocal_goto_handler_slots == 0)
589 emit_stack_save (SAVE_NONLOCAL,
590 &nonlocal_goto_stack_level,
591 PREV_INSN (tail_recursion_reentry));
593 nonlocal_goto_handler_slots
594 = gen_rtx_EXPR_LIST (VOIDmode, slot, nonlocal_goto_handler_slots);
597 /* Generate RTL code for a `goto' statement with target label LABEL.
598 LABEL should be a LABEL_DECL tree node that was or will later be
599 defined with `expand_label'. */
601 void
602 expand_goto (label)
603 tree label;
605 tree context;
607 /* Check for a nonlocal goto to a containing function. */
608 context = decl_function_context (label);
609 if (context != 0 && context != current_function_decl)
611 struct function *p = find_function_data (context);
612 rtx label_ref = gen_rtx_LABEL_REF (Pmode, label_rtx (label));
613 rtx handler_slot, static_chain, save_area, insn;
614 tree link;
616 /* Find the corresponding handler slot for this label. */
617 handler_slot = p->x_nonlocal_goto_handler_slots;
618 for (link = p->x_nonlocal_labels; TREE_VALUE (link) != label;
619 link = TREE_CHAIN (link))
620 handler_slot = XEXP (handler_slot, 1);
621 handler_slot = XEXP (handler_slot, 0);
623 p->has_nonlocal_label = 1;
624 current_function_has_nonlocal_goto = 1;
625 LABEL_REF_NONLOCAL_P (label_ref) = 1;
627 /* Copy the rtl for the slots so that they won't be shared in
628 case the virtual stack vars register gets instantiated differently
629 in the parent than in the child. */
631 static_chain = copy_to_reg (lookup_static_chain (label));
633 /* Get addr of containing function's current nonlocal goto handler,
634 which will do any cleanups and then jump to the label. */
635 handler_slot = copy_to_reg (replace_rtx (copy_rtx (handler_slot),
636 virtual_stack_vars_rtx,
637 static_chain));
639 /* Get addr of containing function's nonlocal save area. */
640 save_area = p->x_nonlocal_goto_stack_level;
641 if (save_area)
642 save_area = replace_rtx (copy_rtx (save_area),
643 virtual_stack_vars_rtx, static_chain);
645 #if HAVE_nonlocal_goto
646 if (HAVE_nonlocal_goto)
647 emit_insn (gen_nonlocal_goto (static_chain, handler_slot,
648 save_area, label_ref));
649 else
650 #endif
652 /* Restore frame pointer for containing function.
653 This sets the actual hard register used for the frame pointer
654 to the location of the function's incoming static chain info.
655 The non-local goto handler will then adjust it to contain the
656 proper value and reload the argument pointer, if needed. */
657 emit_move_insn (hard_frame_pointer_rtx, static_chain);
658 emit_stack_restore (SAVE_NONLOCAL, save_area, NULL_RTX);
660 /* USE of hard_frame_pointer_rtx added for consistency;
661 not clear if really needed. */
662 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
663 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
664 emit_indirect_jump (handler_slot);
667 /* Search backwards to the jump insn and mark it as a
668 non-local goto. */
669 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
671 if (GET_CODE (insn) == JUMP_INSN)
673 REG_NOTES (insn) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO,
674 const0_rtx, REG_NOTES (insn));
675 break;
677 else if (GET_CODE (insn) == CALL_INSN)
678 break;
681 else
682 expand_goto_internal (label, label_rtx (label), NULL_RTX);
685 /* Generate RTL code for a `goto' statement with target label BODY.
686 LABEL should be a LABEL_REF.
687 LAST_INSN, if non-0, is the rtx we should consider as the last
688 insn emitted (for the purposes of cleaning up a return). */
690 static void
691 expand_goto_internal (body, label, last_insn)
692 tree body;
693 rtx label;
694 rtx last_insn;
696 struct nesting *block;
697 rtx stack_level = 0;
699 if (GET_CODE (label) != CODE_LABEL)
700 abort ();
702 /* If label has already been defined, we can tell now
703 whether and how we must alter the stack level. */
705 if (PREV_INSN (label) != 0)
707 /* Find the innermost pending block that contains the label.
708 (Check containment by comparing insn-uids.)
709 Then restore the outermost stack level within that block,
710 and do cleanups of all blocks contained in it. */
711 for (block = block_stack; block; block = block->next)
713 if (INSN_UID (block->data.block.first_insn) < INSN_UID (label))
714 break;
715 if (block->data.block.stack_level != 0)
716 stack_level = block->data.block.stack_level;
717 /* Execute the cleanups for blocks we are exiting. */
718 if (block->data.block.cleanups != 0)
720 expand_cleanups (block->data.block.cleanups, NULL_TREE, 1, 1);
721 do_pending_stack_adjust ();
725 if (stack_level)
727 /* Ensure stack adjust isn't done by emit_jump, as this
728 would clobber the stack pointer. This one should be
729 deleted as dead by flow. */
730 clear_pending_stack_adjust ();
731 do_pending_stack_adjust ();
733 /* Don't do this adjust if it's to the end label and this function
734 is to return with a depressed stack pointer. */
735 if (label == return_label
736 && (((TREE_CODE (TREE_TYPE (current_function_decl))
737 == FUNCTION_TYPE)
738 && (TYPE_RETURNS_STACK_DEPRESSED
739 (TREE_TYPE (current_function_decl))))))
741 else
742 emit_stack_restore (SAVE_BLOCK, stack_level, NULL_RTX);
745 if (body != 0 && DECL_TOO_LATE (body))
746 error ("jump to `%s' invalidly jumps into binding contour",
747 IDENTIFIER_POINTER (DECL_NAME (body)));
749 /* Label not yet defined: may need to put this goto
750 on the fixup list. */
751 else if (! expand_fixup (body, label, last_insn))
753 /* No fixup needed. Record that the label is the target
754 of at least one goto that has no fixup. */
755 if (body != 0)
756 TREE_ADDRESSABLE (body) = 1;
759 emit_jump (label);
762 /* Generate if necessary a fixup for a goto
763 whose target label in tree structure (if any) is TREE_LABEL
764 and whose target in rtl is RTL_LABEL.
766 If LAST_INSN is nonzero, we pretend that the jump appears
767 after insn LAST_INSN instead of at the current point in the insn stream.
769 The fixup will be used later to insert insns just before the goto.
770 Those insns will restore the stack level as appropriate for the
771 target label, and will (in the case of C++) also invoke any object
772 destructors which have to be invoked when we exit the scopes which
773 are exited by the goto.
775 Value is nonzero if a fixup is made. */
777 static int
778 expand_fixup (tree_label, rtl_label, last_insn)
779 tree tree_label;
780 rtx rtl_label;
781 rtx last_insn;
783 struct nesting *block, *end_block;
785 /* See if we can recognize which block the label will be output in.
786 This is possible in some very common cases.
787 If we succeed, set END_BLOCK to that block.
788 Otherwise, set it to 0. */
790 if (cond_stack
791 && (rtl_label == cond_stack->data.cond.endif_label
792 || rtl_label == cond_stack->data.cond.next_label))
793 end_block = cond_stack;
794 /* If we are in a loop, recognize certain labels which
795 are likely targets. This reduces the number of fixups
796 we need to create. */
797 else if (loop_stack
798 && (rtl_label == loop_stack->data.loop.start_label
799 || rtl_label == loop_stack->data.loop.end_label
800 || rtl_label == loop_stack->data.loop.continue_label))
801 end_block = loop_stack;
802 else
803 end_block = 0;
805 /* Now set END_BLOCK to the binding level to which we will return. */
807 if (end_block)
809 struct nesting *next_block = end_block->all;
810 block = block_stack;
812 /* First see if the END_BLOCK is inside the innermost binding level.
813 If so, then no cleanups or stack levels are relevant. */
814 while (next_block && next_block != block)
815 next_block = next_block->all;
817 if (next_block)
818 return 0;
820 /* Otherwise, set END_BLOCK to the innermost binding level
821 which is outside the relevant control-structure nesting. */
822 next_block = block_stack->next;
823 for (block = block_stack; block != end_block; block = block->all)
824 if (block == next_block)
825 next_block = next_block->next;
826 end_block = next_block;
829 /* Does any containing block have a stack level or cleanups?
830 If not, no fixup is needed, and that is the normal case
831 (the only case, for standard C). */
832 for (block = block_stack; block != end_block; block = block->next)
833 if (block->data.block.stack_level != 0
834 || block->data.block.cleanups != 0)
835 break;
837 if (block != end_block)
839 /* Ok, a fixup is needed. Add a fixup to the list of such. */
840 struct goto_fixup *fixup
841 = (struct goto_fixup *) ggc_alloc (sizeof (struct goto_fixup));
842 /* In case an old stack level is restored, make sure that comes
843 after any pending stack adjust. */
844 /* ?? If the fixup isn't to come at the present position,
845 doing the stack adjust here isn't useful. Doing it with our
846 settings at that location isn't useful either. Let's hope
847 someone does it! */
848 if (last_insn == 0)
849 do_pending_stack_adjust ();
850 fixup->target = tree_label;
851 fixup->target_rtl = rtl_label;
853 /* Create a BLOCK node and a corresponding matched set of
854 NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes at
855 this point. The notes will encapsulate any and all fixup
856 code which we might later insert at this point in the insn
857 stream. Also, the BLOCK node will be the parent (i.e. the
858 `SUPERBLOCK') of any other BLOCK nodes which we might create
859 later on when we are expanding the fixup code.
861 Note that optimization passes (including expand_end_loop)
862 might move the *_BLOCK notes away, so we use a NOTE_INSN_DELETED
863 as a placeholder. */
866 rtx original_before_jump
867 = last_insn ? last_insn : get_last_insn ();
868 rtx start;
869 rtx end;
870 tree block;
872 block = make_node (BLOCK);
873 TREE_USED (block) = 1;
875 if (!cfun->x_whole_function_mode_p)
876 (*lang_hooks.decls.insert_block) (block);
877 else
879 BLOCK_CHAIN (block)
880 = BLOCK_CHAIN (DECL_INITIAL (current_function_decl));
881 BLOCK_CHAIN (DECL_INITIAL (current_function_decl))
882 = block;
885 start_sequence ();
886 start = emit_note (NULL, NOTE_INSN_BLOCK_BEG);
887 if (cfun->x_whole_function_mode_p)
888 NOTE_BLOCK (start) = block;
889 fixup->before_jump = emit_note (NULL, NOTE_INSN_DELETED);
890 end = emit_note (NULL, NOTE_INSN_BLOCK_END);
891 if (cfun->x_whole_function_mode_p)
892 NOTE_BLOCK (end) = block;
893 fixup->context = block;
894 end_sequence ();
895 emit_insn_after (start, original_before_jump);
898 fixup->block_start_count = current_block_start_count;
899 fixup->stack_level = 0;
900 fixup->cleanup_list_list
901 = ((block->data.block.outer_cleanups
902 || block->data.block.cleanups)
903 ? tree_cons (NULL_TREE, block->data.block.cleanups,
904 block->data.block.outer_cleanups)
905 : 0);
906 fixup->next = goto_fixup_chain;
907 goto_fixup_chain = fixup;
910 return block != 0;
913 /* Expand any needed fixups in the outputmost binding level of the
914 function. FIRST_INSN is the first insn in the function. */
916 void
917 expand_fixups (first_insn)
918 rtx first_insn;
920 fixup_gotos (NULL, NULL_RTX, NULL_TREE, first_insn, 0);
923 /* When exiting a binding contour, process all pending gotos requiring fixups.
924 THISBLOCK is the structure that describes the block being exited.
925 STACK_LEVEL is the rtx for the stack level to restore exiting this contour.
926 CLEANUP_LIST is a list of expressions to evaluate on exiting this contour.
927 FIRST_INSN is the insn that began this contour.
929 Gotos that jump out of this contour must restore the
930 stack level and do the cleanups before actually jumping.
932 DONT_JUMP_IN nonzero means report error there is a jump into this
933 contour from before the beginning of the contour.
934 This is also done if STACK_LEVEL is nonzero. */
936 static void
937 fixup_gotos (thisblock, stack_level, cleanup_list, first_insn, dont_jump_in)
938 struct nesting *thisblock;
939 rtx stack_level;
940 tree cleanup_list;
941 rtx first_insn;
942 int dont_jump_in;
944 struct goto_fixup *f, *prev;
946 /* F is the fixup we are considering; PREV is the previous one. */
947 /* We run this loop in two passes so that cleanups of exited blocks
948 are run first, and blocks that are exited are marked so
949 afterwards. */
951 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
953 /* Test for a fixup that is inactive because it is already handled. */
954 if (f->before_jump == 0)
956 /* Delete inactive fixup from the chain, if that is easy to do. */
957 if (prev != 0)
958 prev->next = f->next;
960 /* Has this fixup's target label been defined?
961 If so, we can finalize it. */
962 else if (PREV_INSN (f->target_rtl) != 0)
964 rtx cleanup_insns;
966 /* If this fixup jumped into this contour from before the beginning
967 of this contour, report an error. This code used to use
968 the first non-label insn after f->target_rtl, but that's
969 wrong since such can be added, by things like put_var_into_stack
970 and have INSN_UIDs that are out of the range of the block. */
971 /* ??? Bug: this does not detect jumping in through intermediate
972 blocks that have stack levels or cleanups.
973 It detects only a problem with the innermost block
974 around the label. */
975 if (f->target != 0
976 && (dont_jump_in || stack_level || cleanup_list)
977 && INSN_UID (first_insn) < INSN_UID (f->target_rtl)
978 && INSN_UID (first_insn) > INSN_UID (f->before_jump)
979 && ! DECL_ERROR_ISSUED (f->target))
981 error_with_decl (f->target,
982 "label `%s' used before containing binding contour");
983 /* Prevent multiple errors for one label. */
984 DECL_ERROR_ISSUED (f->target) = 1;
987 /* We will expand the cleanups into a sequence of their own and
988 then later on we will attach this new sequence to the insn
989 stream just ahead of the actual jump insn. */
991 start_sequence ();
993 /* Temporarily restore the lexical context where we will
994 logically be inserting the fixup code. We do this for the
995 sake of getting the debugging information right. */
997 (*lang_hooks.decls.pushlevel) (0);
998 (*lang_hooks.decls.set_block) (f->context);
1000 /* Expand the cleanups for blocks this jump exits. */
1001 if (f->cleanup_list_list)
1003 tree lists;
1004 for (lists = f->cleanup_list_list; lists; lists = TREE_CHAIN (lists))
1005 /* Marked elements correspond to blocks that have been closed.
1006 Do their cleanups. */
1007 if (TREE_ADDRESSABLE (lists)
1008 && TREE_VALUE (lists) != 0)
1010 expand_cleanups (TREE_VALUE (lists), NULL_TREE, 1, 1);
1011 /* Pop any pushes done in the cleanups,
1012 in case function is about to return. */
1013 do_pending_stack_adjust ();
1017 /* Restore stack level for the biggest contour that this
1018 jump jumps out of. */
1019 if (f->stack_level
1020 && ! (f->target_rtl == return_label
1021 && ((TREE_CODE (TREE_TYPE (current_function_decl))
1022 == FUNCTION_TYPE)
1023 && (TYPE_RETURNS_STACK_DEPRESSED
1024 (TREE_TYPE (current_function_decl))))))
1025 emit_stack_restore (SAVE_BLOCK, f->stack_level, f->before_jump);
1027 /* Finish up the sequence containing the insns which implement the
1028 necessary cleanups, and then attach that whole sequence to the
1029 insn stream just ahead of the actual jump insn. Attaching it
1030 at that point insures that any cleanups which are in fact
1031 implicit C++ object destructions (which must be executed upon
1032 leaving the block) appear (to the debugger) to be taking place
1033 in an area of the generated code where the object(s) being
1034 destructed are still "in scope". */
1036 cleanup_insns = get_insns ();
1037 (*lang_hooks.decls.poplevel) (1, 0, 0);
1039 end_sequence ();
1040 emit_insn_after (cleanup_insns, f->before_jump);
1042 f->before_jump = 0;
1046 /* For any still-undefined labels, do the cleanups for this block now.
1047 We must do this now since items in the cleanup list may go out
1048 of scope when the block ends. */
1049 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1050 if (f->before_jump != 0
1051 && PREV_INSN (f->target_rtl) == 0
1052 /* Label has still not appeared. If we are exiting a block with
1053 a stack level to restore, that started before the fixup,
1054 mark this stack level as needing restoration
1055 when the fixup is later finalized. */
1056 && thisblock != 0
1057 /* Note: if THISBLOCK == 0 and we have a label that hasn't appeared, it
1058 means the label is undefined. That's erroneous, but possible. */
1059 && (thisblock->data.block.block_start_count
1060 <= f->block_start_count))
1062 tree lists = f->cleanup_list_list;
1063 rtx cleanup_insns;
1065 for (; lists; lists = TREE_CHAIN (lists))
1066 /* If the following elt. corresponds to our containing block
1067 then the elt. must be for this block. */
1068 if (TREE_CHAIN (lists) == thisblock->data.block.outer_cleanups)
1070 start_sequence ();
1071 (*lang_hooks.decls.pushlevel) (0);
1072 (*lang_hooks.decls.set_block) (f->context);
1073 expand_cleanups (TREE_VALUE (lists), NULL_TREE, 1, 1);
1074 do_pending_stack_adjust ();
1075 cleanup_insns = get_insns ();
1076 (*lang_hooks.decls.poplevel) (1, 0, 0);
1077 end_sequence ();
1078 if (cleanup_insns != 0)
1079 f->before_jump
1080 = emit_insn_after (cleanup_insns, f->before_jump);
1082 f->cleanup_list_list = TREE_CHAIN (lists);
1085 if (stack_level)
1086 f->stack_level = stack_level;
1090 /* Return the number of times character C occurs in string S. */
1091 static int
1092 n_occurrences (c, s)
1093 int c;
1094 const char *s;
1096 int n = 0;
1097 while (*s)
1098 n += (*s++ == c);
1099 return n;
1102 /* Generate RTL for an asm statement (explicit assembler code).
1103 STRING is a STRING_CST node containing the assembler code text,
1104 or an ADDR_EXPR containing a STRING_CST. VOL nonzero means the
1105 insn is volatile; don't optimize it. */
1107 void
1108 expand_asm (string, vol)
1109 tree string;
1110 int vol;
1112 rtx body;
1114 if (TREE_CODE (string) == ADDR_EXPR)
1115 string = TREE_OPERAND (string, 0);
1117 body = gen_rtx_ASM_INPUT (VOIDmode, TREE_STRING_POINTER (string));
1119 MEM_VOLATILE_P (body) = vol;
1121 emit_insn (body);
1123 clear_last_expr ();
1126 /* Parse the output constraint pointed to by *CONSTRAINT_P. It is the
1127 OPERAND_NUMth output operand, indexed from zero. There are NINPUTS
1128 inputs and NOUTPUTS outputs to this extended-asm. Upon return,
1129 *ALLOWS_MEM will be TRUE iff the constraint allows the use of a
1130 memory operand. Similarly, *ALLOWS_REG will be TRUE iff the
1131 constraint allows the use of a register operand. And, *IS_INOUT
1132 will be true if the operand is read-write, i.e., if it is used as
1133 an input as well as an output. If *CONSTRAINT_P is not in
1134 canonical form, it will be made canonical. (Note that `+' will be
1135 replaced with `=' as part of this process.)
1137 Returns TRUE if all went well; FALSE if an error occurred. */
1139 bool
1140 parse_output_constraint (constraint_p, operand_num, ninputs, noutputs,
1141 allows_mem, allows_reg, is_inout)
1142 const char **constraint_p;
1143 int operand_num;
1144 int ninputs;
1145 int noutputs;
1146 bool *allows_mem;
1147 bool *allows_reg;
1148 bool *is_inout;
1150 const char *constraint = *constraint_p;
1151 const char *p;
1153 /* Assume the constraint doesn't allow the use of either a register
1154 or memory. */
1155 *allows_mem = false;
1156 *allows_reg = false;
1158 /* Allow the `=' or `+' to not be at the beginning of the string,
1159 since it wasn't explicitly documented that way, and there is a
1160 large body of code that puts it last. Swap the character to
1161 the front, so as not to uglify any place else. */
1162 p = strchr (constraint, '=');
1163 if (!p)
1164 p = strchr (constraint, '+');
1166 /* If the string doesn't contain an `=', issue an error
1167 message. */
1168 if (!p)
1170 error ("output operand constraint lacks `='");
1171 return false;
1174 /* If the constraint begins with `+', then the operand is both read
1175 from and written to. */
1176 *is_inout = (*p == '+');
1178 /* Canonicalize the output constraint so that it begins with `='. */
1179 if (p != constraint || is_inout)
1181 char *buf;
1182 size_t c_len = strlen (constraint);
1184 if (p != constraint)
1185 warning ("output constraint `%c' for operand %d is not at the beginning",
1186 *p, operand_num);
1188 /* Make a copy of the constraint. */
1189 buf = alloca (c_len + 1);
1190 strcpy (buf, constraint);
1191 /* Swap the first character and the `=' or `+'. */
1192 buf[p - constraint] = buf[0];
1193 /* Make sure the first character is an `='. (Until we do this,
1194 it might be a `+'.) */
1195 buf[0] = '=';
1196 /* Replace the constraint with the canonicalized string. */
1197 *constraint_p = ggc_alloc_string (buf, c_len);
1198 constraint = *constraint_p;
1201 /* Loop through the constraint string. */
1202 for (p = constraint + 1; *p; p += CONSTRAINT_LEN (*p, p))
1203 switch (*p)
1205 case '+':
1206 case '=':
1207 error ("operand constraint contains incorrectly positioned '+' or '='");
1208 return false;
1210 case '%':
1211 if (operand_num + 1 == ninputs + noutputs)
1213 error ("`%%' constraint used with last operand");
1214 return false;
1216 break;
1218 case 'V': case 'm': case 'o':
1219 *allows_mem = true;
1220 break;
1222 case '?': case '!': case '*': case '&': case '#':
1223 case 'E': case 'F': case 'G': case 'H':
1224 case 's': case 'i': case 'n':
1225 case 'I': case 'J': case 'K': case 'L': case 'M':
1226 case 'N': case 'O': case 'P': case ',':
1227 break;
1229 case '0': case '1': case '2': case '3': case '4':
1230 case '5': case '6': case '7': case '8': case '9':
1231 case '[':
1232 error ("matching constraint not valid in output operand");
1233 return false;
1235 case '<': case '>':
1236 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
1237 excepting those that expand_call created. So match memory
1238 and hope. */
1239 *allows_mem = true;
1240 break;
1242 case 'g': case 'X':
1243 *allows_reg = true;
1244 *allows_mem = true;
1245 break;
1247 case 'p': case 'r':
1248 *allows_reg = true;
1249 break;
1251 default:
1252 if (!ISALPHA (*p))
1253 break;
1254 if (REG_CLASS_FROM_CONSTRAINT (*p, p) != NO_REGS)
1255 *allows_reg = true;
1256 #ifdef EXTRA_CONSTRAINT_STR
1257 else if (EXTRA_ADDRESS_CONSTRAINT (*p, p))
1258 *allows_reg = true;
1259 else if (EXTRA_MEMORY_CONSTRAINT (*p, p))
1260 *allows_mem = true;
1261 else
1263 /* Otherwise we can't assume anything about the nature of
1264 the constraint except that it isn't purely registers.
1265 Treat it like "g" and hope for the best. */
1266 *allows_reg = true;
1267 *allows_mem = true;
1269 #endif
1270 break;
1273 return true;
1276 /* Similar, but for input constraints. */
1278 static bool
1279 parse_input_constraint (constraint_p, input_num, ninputs, noutputs, ninout,
1280 constraints, allows_mem, allows_reg)
1281 const char **constraint_p;
1282 int input_num;
1283 int ninputs;
1284 int noutputs;
1285 int ninout;
1286 const char * const * constraints;
1287 bool *allows_mem;
1288 bool *allows_reg;
1290 const char *constraint = *constraint_p;
1291 const char *orig_constraint = constraint;
1292 size_t c_len = strlen (constraint);
1293 size_t j;
1295 /* Assume the constraint doesn't allow the use of either
1296 a register or memory. */
1297 *allows_mem = false;
1298 *allows_reg = false;
1300 /* Make sure constraint has neither `=', `+', nor '&'. */
1302 for (j = 0; j < c_len; j += CONSTRAINT_LEN (constraint[j], constraint+j))
1303 switch (constraint[j])
1305 case '+': case '=': case '&':
1306 if (constraint == orig_constraint)
1308 error ("input operand constraint contains `%c'", constraint[j]);
1309 return false;
1311 break;
1313 case '%':
1314 if (constraint == orig_constraint
1315 && input_num + 1 == ninputs - ninout)
1317 error ("`%%' constraint used with last operand");
1318 return false;
1320 break;
1322 case 'V': case 'm': case 'o':
1323 *allows_mem = true;
1324 break;
1326 case '<': case '>':
1327 case '?': case '!': case '*': case '#':
1328 case 'E': case 'F': case 'G': case 'H':
1329 case 's': case 'i': case 'n':
1330 case 'I': case 'J': case 'K': case 'L': case 'M':
1331 case 'N': case 'O': case 'P': case ',':
1332 break;
1334 /* Whether or not a numeric constraint allows a register is
1335 decided by the matching constraint, and so there is no need
1336 to do anything special with them. We must handle them in
1337 the default case, so that we don't unnecessarily force
1338 operands to memory. */
1339 case '0': case '1': case '2': case '3': case '4':
1340 case '5': case '6': case '7': case '8': case '9':
1342 char *end;
1343 unsigned long match;
1345 match = strtoul (constraint + j, &end, 10);
1346 if (match >= (unsigned long) noutputs)
1348 error ("matching constraint references invalid operand number");
1349 return false;
1352 /* Try and find the real constraint for this dup. Only do this
1353 if the matching constraint is the only alternative. */
1354 if (*end == '\0'
1355 && (j == 0 || (j == 1 && constraint[0] == '%')))
1357 constraint = constraints[match];
1358 *constraint_p = constraint;
1359 c_len = strlen (constraint);
1360 j = 0;
1361 /* ??? At the end of the loop, we will skip the first part of
1362 the matched constraint. This assumes not only that the
1363 other constraint is an output constraint, but also that
1364 the '=' or '+' come first. */
1365 break;
1367 else
1368 j = end - constraint;
1369 /* Anticipate increment at end of loop. */
1370 j--;
1372 /* Fall through. */
1374 case 'p': case 'r':
1375 *allows_reg = true;
1376 break;
1378 case 'g': case 'X':
1379 *allows_reg = true;
1380 *allows_mem = true;
1381 break;
1383 default:
1384 if (! ISALPHA (constraint[j]))
1386 error ("invalid punctuation `%c' in constraint", constraint[j]);
1387 return false;
1389 if (REG_CLASS_FROM_CONSTRAINT (constraint[j], constraint + j)
1390 != NO_REGS)
1391 *allows_reg = true;
1392 #ifdef EXTRA_CONSTRAINT_STR
1393 else if (EXTRA_ADDRESS_CONSTRAINT (constraint[j], constraint + j))
1394 *allows_reg = true;
1395 else if (EXTRA_MEMORY_CONSTRAINT (constraint[j], constraint + j))
1396 *allows_mem = true;
1397 else
1399 /* Otherwise we can't assume anything about the nature of
1400 the constraint except that it isn't purely registers.
1401 Treat it like "g" and hope for the best. */
1402 *allows_reg = true;
1403 *allows_mem = true;
1405 #endif
1406 break;
1409 return true;
1412 /* Check for overlap between registers marked in CLOBBERED_REGS and
1413 anything inappropriate in DECL. Emit error and return TRUE for error,
1414 FALSE for ok. */
1416 static bool
1417 decl_conflicts_with_clobbers_p (decl, clobbered_regs)
1418 tree decl;
1419 const HARD_REG_SET clobbered_regs;
1421 /* Conflicts between asm-declared register variables and the clobber
1422 list are not allowed. */
1423 if ((TREE_CODE (decl) == VAR_DECL || TREE_CODE (decl) == PARM_DECL)
1424 && DECL_REGISTER (decl)
1425 && REG_P (DECL_RTL (decl))
1426 && REGNO (DECL_RTL (decl)) < FIRST_PSEUDO_REGISTER)
1428 rtx reg = DECL_RTL (decl);
1429 unsigned int regno;
1431 for (regno = REGNO (reg);
1432 regno < (REGNO (reg)
1433 + HARD_REGNO_NREGS (REGNO (reg), GET_MODE (reg)));
1434 regno++)
1435 if (TEST_HARD_REG_BIT (clobbered_regs, regno))
1437 error ("asm-specifier for variable `%s' conflicts with asm clobber list",
1438 IDENTIFIER_POINTER (DECL_NAME (decl)));
1440 /* Reset registerness to stop multiple errors emitted for a
1441 single variable. */
1442 DECL_REGISTER (decl) = 0;
1443 return true;
1446 return false;
1449 /* Generate RTL for an asm statement with arguments.
1450 STRING is the instruction template.
1451 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
1452 Each output or input has an expression in the TREE_VALUE and
1453 and a tree list in TREE_PURPOSE which in turn contains a constraint
1454 name in TREE_VALUE (or NULL_TREE) and a constraint string
1455 in TREE_PURPOSE.
1456 CLOBBERS is a list of STRING_CST nodes each naming a hard register
1457 that is clobbered by this insn.
1459 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
1460 Some elements of OUTPUTS may be replaced with trees representing temporary
1461 values. The caller should copy those temporary values to the originally
1462 specified lvalues.
1464 VOL nonzero means the insn is volatile; don't optimize it. */
1466 void
1467 expand_asm_operands (string, outputs, inputs, clobbers, vol, filename, line)
1468 tree string, outputs, inputs, clobbers;
1469 int vol;
1470 const char *filename;
1471 int line;
1473 rtvec argvec, constraintvec;
1474 rtx body;
1475 int ninputs = list_length (inputs);
1476 int noutputs = list_length (outputs);
1477 int ninout;
1478 int nclobbers;
1479 HARD_REG_SET clobbered_regs;
1480 int clobber_conflict_found = 0;
1481 tree tail;
1482 int i;
1483 /* Vector of RTX's of evaluated output operands. */
1484 rtx *output_rtx = (rtx *) alloca (noutputs * sizeof (rtx));
1485 int *inout_opnum = (int *) alloca (noutputs * sizeof (int));
1486 rtx *real_output_rtx = (rtx *) alloca (noutputs * sizeof (rtx));
1487 enum machine_mode *inout_mode
1488 = (enum machine_mode *) alloca (noutputs * sizeof (enum machine_mode));
1489 const char **constraints
1490 = (const char **) alloca ((noutputs + ninputs) * sizeof (const char *));
1491 int old_generating_concat_p = generating_concat_p;
1493 /* An ASM with no outputs needs to be treated as volatile, for now. */
1494 if (noutputs == 0)
1495 vol = 1;
1497 if (! check_operand_nalternatives (outputs, inputs))
1498 return;
1500 if (! check_unique_operand_names (outputs, inputs))
1501 return;
1503 string = resolve_operand_names (string, outputs, inputs, constraints);
1505 #ifdef MD_ASM_CLOBBERS
1506 /* Sometimes we wish to automatically clobber registers across an asm.
1507 Case in point is when the i386 backend moved from cc0 to a hard reg --
1508 maintaining source-level compatibility means automatically clobbering
1509 the flags register. */
1510 MD_ASM_CLOBBERS (clobbers);
1511 #endif
1513 /* Count the number of meaningful clobbered registers, ignoring what
1514 we would ignore later. */
1515 nclobbers = 0;
1516 CLEAR_HARD_REG_SET (clobbered_regs);
1517 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1519 const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1521 i = decode_reg_name (regname);
1522 if (i >= 0 || i == -4)
1523 ++nclobbers;
1524 else if (i == -2)
1525 error ("unknown register name `%s' in `asm'", regname);
1527 /* Mark clobbered registers. */
1528 if (i >= 0)
1530 /* Clobbering the PIC register is an error */
1531 if (i == (int) PIC_OFFSET_TABLE_REGNUM)
1533 error ("PIC register `%s' clobbered in `asm'", regname);
1534 return;
1537 SET_HARD_REG_BIT (clobbered_regs, i);
1541 clear_last_expr ();
1543 /* First pass over inputs and outputs checks validity and sets
1544 mark_addressable if needed. */
1546 ninout = 0;
1547 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1549 tree val = TREE_VALUE (tail);
1550 tree type = TREE_TYPE (val);
1551 const char *constraint;
1552 bool is_inout;
1553 bool allows_reg;
1554 bool allows_mem;
1556 /* If there's an erroneous arg, emit no insn. */
1557 if (type == error_mark_node)
1558 return;
1560 /* Try to parse the output constraint. If that fails, there's
1561 no point in going further. */
1562 constraint = constraints[i];
1563 if (!parse_output_constraint (&constraint, i, ninputs, noutputs,
1564 &allows_mem, &allows_reg, &is_inout))
1565 return;
1567 if (! allows_reg
1568 && (allows_mem
1569 || is_inout
1570 || (DECL_P (val)
1571 && GET_CODE (DECL_RTL (val)) == REG
1572 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type))))
1573 (*lang_hooks.mark_addressable) (val);
1575 if (is_inout)
1576 ninout++;
1579 ninputs += ninout;
1580 if (ninputs + noutputs > MAX_RECOG_OPERANDS)
1582 error ("more than %d operands in `asm'", MAX_RECOG_OPERANDS);
1583 return;
1586 for (i = 0, tail = inputs; tail; i++, tail = TREE_CHAIN (tail))
1588 bool allows_reg, allows_mem;
1589 const char *constraint;
1591 /* If there's an erroneous arg, emit no insn, because the ASM_INPUT
1592 would get VOIDmode and that could cause a crash in reload. */
1593 if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
1594 return;
1596 constraint = constraints[i + noutputs];
1597 if (! parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
1598 constraints, &allows_mem, &allows_reg))
1599 return;
1601 if (! allows_reg && allows_mem)
1602 (*lang_hooks.mark_addressable) (TREE_VALUE (tail));
1605 /* Second pass evaluates arguments. */
1607 ninout = 0;
1608 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1610 tree val = TREE_VALUE (tail);
1611 tree type = TREE_TYPE (val);
1612 bool is_inout;
1613 bool allows_reg;
1614 bool allows_mem;
1615 rtx op;
1617 if (!parse_output_constraint (&constraints[i], i, ninputs,
1618 noutputs, &allows_mem, &allows_reg,
1619 &is_inout))
1620 abort ();
1622 /* If an output operand is not a decl or indirect ref and our constraint
1623 allows a register, make a temporary to act as an intermediate.
1624 Make the asm insn write into that, then our caller will copy it to
1625 the real output operand. Likewise for promoted variables. */
1627 generating_concat_p = 0;
1629 real_output_rtx[i] = NULL_RTX;
1630 if ((TREE_CODE (val) == INDIRECT_REF
1631 && allows_mem)
1632 || (DECL_P (val)
1633 && (allows_mem || GET_CODE (DECL_RTL (val)) == REG)
1634 && ! (GET_CODE (DECL_RTL (val)) == REG
1635 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
1636 || ! allows_reg
1637 || is_inout)
1639 op = expand_expr (val, NULL_RTX, VOIDmode, EXPAND_WRITE);
1640 if (GET_CODE (op) == MEM)
1641 op = validize_mem (op);
1643 if (! allows_reg && GET_CODE (op) != MEM)
1644 error ("output number %d not directly addressable", i);
1645 if ((! allows_mem && GET_CODE (op) == MEM)
1646 || GET_CODE (op) == CONCAT)
1648 real_output_rtx[i] = protect_from_queue (op, 1);
1649 op = gen_reg_rtx (GET_MODE (op));
1650 if (is_inout)
1651 emit_move_insn (op, real_output_rtx[i]);
1654 else
1656 op = assign_temp (type, 0, 0, 1);
1657 op = validize_mem (op);
1658 TREE_VALUE (tail) = make_tree (type, op);
1660 output_rtx[i] = op;
1662 generating_concat_p = old_generating_concat_p;
1664 if (is_inout)
1666 inout_mode[ninout] = TYPE_MODE (type);
1667 inout_opnum[ninout++] = i;
1670 if (decl_conflicts_with_clobbers_p (val, clobbered_regs))
1671 clobber_conflict_found = 1;
1674 /* Make vectors for the expression-rtx, constraint strings,
1675 and named operands. */
1677 argvec = rtvec_alloc (ninputs);
1678 constraintvec = rtvec_alloc (ninputs);
1680 body = gen_rtx_ASM_OPERANDS ((noutputs == 0 ? VOIDmode
1681 : GET_MODE (output_rtx[0])),
1682 TREE_STRING_POINTER (string),
1683 empty_string, 0, argvec, constraintvec,
1684 filename, line);
1686 MEM_VOLATILE_P (body) = vol;
1688 /* Eval the inputs and put them into ARGVEC.
1689 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
1691 for (i = 0, tail = inputs; tail; tail = TREE_CHAIN (tail), ++i)
1693 bool allows_reg, allows_mem;
1694 const char *constraint;
1695 tree val, type;
1696 rtx op;
1698 constraint = constraints[i + noutputs];
1699 if (! parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
1700 constraints, &allows_mem, &allows_reg))
1701 abort ();
1703 generating_concat_p = 0;
1705 val = TREE_VALUE (tail);
1706 type = TREE_TYPE (val);
1707 op = expand_expr (val, NULL_RTX, VOIDmode, 0);
1709 /* Never pass a CONCAT to an ASM. */
1710 if (GET_CODE (op) == CONCAT)
1711 op = force_reg (GET_MODE (op), op);
1712 else if (GET_CODE (op) == MEM)
1713 op = validize_mem (op);
1715 if (asm_operand_ok (op, constraint) <= 0)
1717 if (allows_reg)
1718 op = force_reg (TYPE_MODE (type), op);
1719 else if (!allows_mem)
1720 warning ("asm operand %d probably doesn't match constraints",
1721 i + noutputs);
1722 else if (CONSTANT_P (op))
1724 op = force_const_mem (TYPE_MODE (type), op);
1725 op = validize_mem (op);
1727 else if (GET_CODE (op) == REG
1728 || GET_CODE (op) == SUBREG
1729 || GET_CODE (op) == ADDRESSOF
1730 || GET_CODE (op) == CONCAT)
1732 tree qual_type = build_qualified_type (type,
1733 (TYPE_QUALS (type)
1734 | TYPE_QUAL_CONST));
1735 rtx memloc = assign_temp (qual_type, 1, 1, 1);
1736 memloc = validize_mem (memloc);
1737 emit_move_insn (memloc, op);
1738 op = memloc;
1741 else if (GET_CODE (op) == MEM && MEM_VOLATILE_P (op))
1743 /* We won't recognize volatile memory as available a
1744 memory_operand at this point. Ignore it. */
1746 else if (queued_subexp_p (op))
1748 else
1749 /* ??? Leave this only until we have experience with what
1750 happens in combine and elsewhere when constraints are
1751 not satisfied. */
1752 warning ("asm operand %d probably doesn't match constraints",
1753 i + noutputs);
1756 generating_concat_p = old_generating_concat_p;
1757 ASM_OPERANDS_INPUT (body, i) = op;
1759 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, i)
1760 = gen_rtx_ASM_INPUT (TYPE_MODE (type), constraints[i + noutputs]);
1762 if (decl_conflicts_with_clobbers_p (val, clobbered_regs))
1763 clobber_conflict_found = 1;
1766 /* Protect all the operands from the queue now that they have all been
1767 evaluated. */
1769 generating_concat_p = 0;
1771 for (i = 0; i < ninputs - ninout; i++)
1772 ASM_OPERANDS_INPUT (body, i)
1773 = protect_from_queue (ASM_OPERANDS_INPUT (body, i), 0);
1775 for (i = 0; i < noutputs; i++)
1776 output_rtx[i] = protect_from_queue (output_rtx[i], 1);
1778 /* For in-out operands, copy output rtx to input rtx. */
1779 for (i = 0; i < ninout; i++)
1781 int j = inout_opnum[i];
1782 char buffer[16];
1784 ASM_OPERANDS_INPUT (body, ninputs - ninout + i)
1785 = output_rtx[j];
1787 sprintf (buffer, "%d", j);
1788 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, ninputs - ninout + i)
1789 = gen_rtx_ASM_INPUT (inout_mode[i], ggc_alloc_string (buffer, -1));
1792 generating_concat_p = old_generating_concat_p;
1794 /* Now, for each output, construct an rtx
1795 (set OUTPUT (asm_operands INSN OUTPUTCONSTRAINT OUTPUTNUMBER
1796 ARGVEC CONSTRAINTS OPNAMES))
1797 If there is more than one, put them inside a PARALLEL. */
1799 if (noutputs == 1 && nclobbers == 0)
1801 ASM_OPERANDS_OUTPUT_CONSTRAINT (body) = constraints[0];
1802 emit_insn (gen_rtx_SET (VOIDmode, output_rtx[0], body));
1805 else if (noutputs == 0 && nclobbers == 0)
1807 /* No output operands: put in a raw ASM_OPERANDS rtx. */
1808 emit_insn (body);
1811 else
1813 rtx obody = body;
1814 int num = noutputs;
1816 if (num == 0)
1817 num = 1;
1819 body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
1821 /* For each output operand, store a SET. */
1822 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1824 XVECEXP (body, 0, i)
1825 = gen_rtx_SET (VOIDmode,
1826 output_rtx[i],
1827 gen_rtx_ASM_OPERANDS
1828 (GET_MODE (output_rtx[i]),
1829 TREE_STRING_POINTER (string),
1830 constraints[i], i, argvec, constraintvec,
1831 filename, line));
1833 MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
1836 /* If there are no outputs (but there are some clobbers)
1837 store the bare ASM_OPERANDS into the PARALLEL. */
1839 if (i == 0)
1840 XVECEXP (body, 0, i++) = obody;
1842 /* Store (clobber REG) for each clobbered register specified. */
1844 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1846 const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1847 int j = decode_reg_name (regname);
1848 rtx clobbered_reg;
1850 if (j < 0)
1852 if (j == -3) /* `cc', which is not a register */
1853 continue;
1855 if (j == -4) /* `memory', don't cache memory across asm */
1857 XVECEXP (body, 0, i++)
1858 = gen_rtx_CLOBBER (VOIDmode,
1859 gen_rtx_MEM
1860 (BLKmode,
1861 gen_rtx_SCRATCH (VOIDmode)));
1862 continue;
1865 /* Ignore unknown register, error already signaled. */
1866 continue;
1869 /* Use QImode since that's guaranteed to clobber just one reg. */
1870 clobbered_reg = gen_rtx_REG (QImode, j);
1872 /* Do sanity check for overlap between clobbers and respectively
1873 input and outputs that hasn't been handled. Such overlap
1874 should have been detected and reported above. */
1875 if (!clobber_conflict_found)
1877 int opno;
1879 /* We test the old body (obody) contents to avoid tripping
1880 over the under-construction body. */
1881 for (opno = 0; opno < noutputs; opno++)
1882 if (reg_overlap_mentioned_p (clobbered_reg, output_rtx[opno]))
1883 internal_error ("asm clobber conflict with output operand");
1885 for (opno = 0; opno < ninputs - ninout; opno++)
1886 if (reg_overlap_mentioned_p (clobbered_reg,
1887 ASM_OPERANDS_INPUT (obody, opno)))
1888 internal_error ("asm clobber conflict with input operand");
1891 XVECEXP (body, 0, i++)
1892 = gen_rtx_CLOBBER (VOIDmode, clobbered_reg);
1895 emit_insn (body);
1898 /* For any outputs that needed reloading into registers, spill them
1899 back to where they belong. */
1900 for (i = 0; i < noutputs; ++i)
1901 if (real_output_rtx[i])
1902 emit_move_insn (real_output_rtx[i], output_rtx[i]);
1904 free_temp_slots ();
1907 /* A subroutine of expand_asm_operands. Check that all operands have
1908 the same number of alternatives. Return true if so. */
1910 static bool
1911 check_operand_nalternatives (outputs, inputs)
1912 tree outputs, inputs;
1914 if (outputs || inputs)
1916 tree tmp = TREE_PURPOSE (outputs ? outputs : inputs);
1917 int nalternatives
1918 = n_occurrences (',', TREE_STRING_POINTER (TREE_VALUE (tmp)));
1919 tree next = inputs;
1921 if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
1923 error ("too many alternatives in `asm'");
1924 return false;
1927 tmp = outputs;
1928 while (tmp)
1930 const char *constraint
1931 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (tmp)));
1933 if (n_occurrences (',', constraint) != nalternatives)
1935 error ("operand constraints for `asm' differ in number of alternatives");
1936 return false;
1939 if (TREE_CHAIN (tmp))
1940 tmp = TREE_CHAIN (tmp);
1941 else
1942 tmp = next, next = 0;
1946 return true;
1949 /* A subroutine of expand_asm_operands. Check that all operand names
1950 are unique. Return true if so. We rely on the fact that these names
1951 are identifiers, and so have been canonicalized by get_identifier,
1952 so all we need are pointer comparisons. */
1954 static bool
1955 check_unique_operand_names (outputs, inputs)
1956 tree outputs, inputs;
1958 tree i, j;
1960 for (i = outputs; i ; i = TREE_CHAIN (i))
1962 tree i_name = TREE_PURPOSE (TREE_PURPOSE (i));
1963 if (! i_name)
1964 continue;
1966 for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
1967 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1968 goto failure;
1971 for (i = inputs; i ; i = TREE_CHAIN (i))
1973 tree i_name = TREE_PURPOSE (TREE_PURPOSE (i));
1974 if (! i_name)
1975 continue;
1977 for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
1978 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1979 goto failure;
1980 for (j = outputs; j ; j = TREE_CHAIN (j))
1981 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1982 goto failure;
1985 return true;
1987 failure:
1988 error ("duplicate asm operand name '%s'",
1989 TREE_STRING_POINTER (TREE_PURPOSE (TREE_PURPOSE (i))));
1990 return false;
1993 /* A subroutine of expand_asm_operands. Resolve the names of the operands
1994 in *POUTPUTS and *PINPUTS to numbers, and replace the name expansions in
1995 STRING and in the constraints to those numbers. */
1997 static tree
1998 resolve_operand_names (string, outputs, inputs, pconstraints)
1999 tree string;
2000 tree outputs, inputs;
2001 const char **pconstraints;
2003 char *buffer = xstrdup (TREE_STRING_POINTER (string));
2004 char *p;
2005 tree t;
2007 /* Assume that we will not need extra space to perform the substitution.
2008 This because we get to remove '[' and ']', which means we cannot have
2009 a problem until we have more than 999 operands. */
2011 p = buffer;
2012 while ((p = strchr (p, '%')) != NULL)
2014 if (p[1] == '[')
2015 p += 1;
2016 else if (ISALPHA (p[1]) && p[2] == '[')
2017 p += 2;
2018 else
2020 p += 1;
2021 continue;
2024 p = resolve_operand_name_1 (p, outputs, inputs);
2027 string = build_string (strlen (buffer), buffer);
2028 free (buffer);
2030 /* Collect output constraints here because it's convenient.
2031 There should be no named operands here; this is verified
2032 in expand_asm_operand. */
2033 for (t = outputs; t ; t = TREE_CHAIN (t), pconstraints++)
2034 *pconstraints = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
2036 /* Substitute [<name>] in input constraint strings. */
2037 for (t = inputs; t ; t = TREE_CHAIN (t), pconstraints++)
2039 const char *c = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
2040 if (strchr (c, '[') == NULL)
2041 *pconstraints = c;
2042 else
2044 p = buffer = xstrdup (c);
2045 while ((p = strchr (p, '[')) != NULL)
2046 p = resolve_operand_name_1 (p, outputs, inputs);
2048 *pconstraints = ggc_alloc_string (buffer, -1);
2049 free (buffer);
2053 return string;
2056 /* A subroutine of resolve_operand_names. P points to the '[' for a
2057 potential named operand of the form [<name>]. In place, replace
2058 the name and brackets with a number. Return a pointer to the
2059 balance of the string after substitution. */
2061 static char *
2062 resolve_operand_name_1 (p, outputs, inputs)
2063 char *p;
2064 tree outputs, inputs;
2066 char *q;
2067 int op;
2068 tree t;
2069 size_t len;
2071 /* Collect the operand name. */
2072 q = strchr (p, ']');
2073 if (!q)
2075 error ("missing close brace for named operand");
2076 return strchr (p, '\0');
2078 len = q - p - 1;
2080 /* Resolve the name to a number. */
2081 for (op = 0, t = outputs; t ; t = TREE_CHAIN (t), op++)
2083 tree name = TREE_PURPOSE (TREE_PURPOSE (t));
2084 if (name)
2086 const char *c = TREE_STRING_POINTER (name);
2087 if (strncmp (c, p + 1, len) == 0 && c[len] == '\0')
2088 goto found;
2091 for (t = inputs; t ; t = TREE_CHAIN (t), op++)
2093 tree name = TREE_PURPOSE (TREE_PURPOSE (t));
2094 if (name)
2096 const char *c = TREE_STRING_POINTER (name);
2097 if (strncmp (c, p + 1, len) == 0 && c[len] == '\0')
2098 goto found;
2102 *q = '\0';
2103 error ("undefined named operand '%s'", p + 1);
2104 op = 0;
2105 found:
2107 /* Replace the name with the number. Unfortunately, not all libraries
2108 get the return value of sprintf correct, so search for the end of the
2109 generated string by hand. */
2110 sprintf (p, "%d", op);
2111 p = strchr (p, '\0');
2113 /* Verify the no extra buffer space assumption. */
2114 if (p > q)
2115 abort ();
2117 /* Shift the rest of the buffer down to fill the gap. */
2118 memmove (p, q + 1, strlen (q + 1) + 1);
2120 return p;
2123 /* Generate RTL to evaluate the expression EXP
2124 and remember it in case this is the VALUE in a ({... VALUE; }) constr.
2125 Provided just for backward-compatibility. expand_expr_stmt_value()
2126 should be used for new code. */
2128 void
2129 expand_expr_stmt (exp)
2130 tree exp;
2132 expand_expr_stmt_value (exp, -1, 1);
2135 /* Generate RTL to evaluate the expression EXP. WANT_VALUE tells
2136 whether to (1) save the value of the expression, (0) discard it or
2137 (-1) use expr_stmts_for_value to tell. The use of -1 is
2138 deprecated, and retained only for backward compatibility. */
2140 void
2141 expand_expr_stmt_value (exp, want_value, maybe_last)
2142 tree exp;
2143 int want_value, maybe_last;
2145 rtx value;
2146 tree type;
2148 if (want_value == -1)
2149 want_value = expr_stmts_for_value != 0;
2151 /* If -Wextra, warn about statements with no side effects,
2152 except for an explicit cast to void (e.g. for assert()), and
2153 except for last statement in ({...}) where they may be useful. */
2154 if (! want_value
2155 && (expr_stmts_for_value == 0 || ! maybe_last)
2156 && exp != error_mark_node)
2158 if (! TREE_SIDE_EFFECTS (exp))
2160 if ((extra_warnings || warn_unused_value)
2161 && !(TREE_CODE (exp) == CONVERT_EXPR
2162 && VOID_TYPE_P (TREE_TYPE (exp))))
2163 warning_with_file_and_line (emit_filename, emit_lineno,
2164 "statement with no effect");
2166 else if (warn_unused_value)
2167 warn_if_unused_value (exp);
2170 /* If EXP is of function type and we are expanding statements for
2171 value, convert it to pointer-to-function. */
2172 if (want_value && TREE_CODE (TREE_TYPE (exp)) == FUNCTION_TYPE)
2173 exp = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (exp)), exp);
2175 /* The call to `expand_expr' could cause last_expr_type and
2176 last_expr_value to get reset. Therefore, we set last_expr_value
2177 and last_expr_type *after* calling expand_expr. */
2178 value = expand_expr (exp, want_value ? NULL_RTX : const0_rtx,
2179 VOIDmode, 0);
2180 type = TREE_TYPE (exp);
2182 /* If all we do is reference a volatile value in memory,
2183 copy it to a register to be sure it is actually touched. */
2184 if (value && GET_CODE (value) == MEM && TREE_THIS_VOLATILE (exp))
2186 if (TYPE_MODE (type) == VOIDmode)
2188 else if (TYPE_MODE (type) != BLKmode)
2189 value = copy_to_reg (value);
2190 else
2192 rtx lab = gen_label_rtx ();
2194 /* Compare the value with itself to reference it. */
2195 emit_cmp_and_jump_insns (value, value, EQ,
2196 expand_expr (TYPE_SIZE (type),
2197 NULL_RTX, VOIDmode, 0),
2198 BLKmode, 0, lab);
2199 emit_label (lab);
2203 /* If this expression is part of a ({...}) and is in memory, we may have
2204 to preserve temporaries. */
2205 preserve_temp_slots (value);
2207 /* Free any temporaries used to evaluate this expression. Any temporary
2208 used as a result of this expression will already have been preserved
2209 above. */
2210 free_temp_slots ();
2212 if (want_value)
2214 last_expr_value = value;
2215 last_expr_type = type;
2218 emit_queue ();
2221 /* Warn if EXP contains any computations whose results are not used.
2222 Return 1 if a warning is printed; 0 otherwise. */
2225 warn_if_unused_value (exp)
2226 tree exp;
2228 if (TREE_USED (exp))
2229 return 0;
2231 /* Don't warn about void constructs. This includes casting to void,
2232 void function calls, and statement expressions with a final cast
2233 to void. */
2234 if (VOID_TYPE_P (TREE_TYPE (exp)))
2235 return 0;
2237 switch (TREE_CODE (exp))
2239 case PREINCREMENT_EXPR:
2240 case POSTINCREMENT_EXPR:
2241 case PREDECREMENT_EXPR:
2242 case POSTDECREMENT_EXPR:
2243 case MODIFY_EXPR:
2244 case INIT_EXPR:
2245 case TARGET_EXPR:
2246 case CALL_EXPR:
2247 case METHOD_CALL_EXPR:
2248 case RTL_EXPR:
2249 case TRY_CATCH_EXPR:
2250 case WITH_CLEANUP_EXPR:
2251 case EXIT_EXPR:
2252 return 0;
2254 case BIND_EXPR:
2255 /* For a binding, warn if no side effect within it. */
2256 return warn_if_unused_value (TREE_OPERAND (exp, 1));
2258 case SAVE_EXPR:
2259 return warn_if_unused_value (TREE_OPERAND (exp, 1));
2261 case TRUTH_ORIF_EXPR:
2262 case TRUTH_ANDIF_EXPR:
2263 /* In && or ||, warn if 2nd operand has no side effect. */
2264 return warn_if_unused_value (TREE_OPERAND (exp, 1));
2266 case COMPOUND_EXPR:
2267 if (TREE_NO_UNUSED_WARNING (exp))
2268 return 0;
2269 if (warn_if_unused_value (TREE_OPERAND (exp, 0)))
2270 return 1;
2271 /* Let people do `(foo (), 0)' without a warning. */
2272 if (TREE_CONSTANT (TREE_OPERAND (exp, 1)))
2273 return 0;
2274 return warn_if_unused_value (TREE_OPERAND (exp, 1));
2276 case NOP_EXPR:
2277 case CONVERT_EXPR:
2278 case NON_LVALUE_EXPR:
2279 /* Don't warn about conversions not explicit in the user's program. */
2280 if (TREE_NO_UNUSED_WARNING (exp))
2281 return 0;
2282 /* Assignment to a cast usually results in a cast of a modify.
2283 Don't complain about that. There can be an arbitrary number of
2284 casts before the modify, so we must loop until we find the first
2285 non-cast expression and then test to see if that is a modify. */
2287 tree tem = TREE_OPERAND (exp, 0);
2289 while (TREE_CODE (tem) == CONVERT_EXPR || TREE_CODE (tem) == NOP_EXPR)
2290 tem = TREE_OPERAND (tem, 0);
2292 if (TREE_CODE (tem) == MODIFY_EXPR || TREE_CODE (tem) == INIT_EXPR
2293 || TREE_CODE (tem) == CALL_EXPR)
2294 return 0;
2296 goto maybe_warn;
2298 case INDIRECT_REF:
2299 /* Don't warn about automatic dereferencing of references, since
2300 the user cannot control it. */
2301 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == REFERENCE_TYPE)
2302 return warn_if_unused_value (TREE_OPERAND (exp, 0));
2303 /* Fall through. */
2305 default:
2306 /* Referencing a volatile value is a side effect, so don't warn. */
2307 if ((DECL_P (exp)
2308 || TREE_CODE_CLASS (TREE_CODE (exp)) == 'r')
2309 && TREE_THIS_VOLATILE (exp))
2310 return 0;
2312 /* If this is an expression which has no operands, there is no value
2313 to be unused. There are no such language-independent codes,
2314 but front ends may define such. */
2315 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'e'
2316 && TREE_CODE_LENGTH (TREE_CODE (exp)) == 0)
2317 return 0;
2319 maybe_warn:
2320 /* If this is an expression with side effects, don't warn. */
2321 if (TREE_SIDE_EFFECTS (exp))
2322 return 0;
2324 warning_with_file_and_line (emit_filename, emit_lineno,
2325 "value computed is not used");
2326 return 1;
2330 /* Clear out the memory of the last expression evaluated. */
2332 void
2333 clear_last_expr ()
2335 last_expr_type = NULL_TREE;
2336 last_expr_value = NULL_RTX;
2339 /* Begin a statement-expression, i.e., a series of statements which
2340 may return a value. Return the RTL_EXPR for this statement expr.
2341 The caller must save that value and pass it to
2342 expand_end_stmt_expr. If HAS_SCOPE is nonzero, temporaries created
2343 in the statement-expression are deallocated at the end of the
2344 expression. */
2346 tree
2347 expand_start_stmt_expr (has_scope)
2348 int has_scope;
2350 tree t;
2352 /* Make the RTL_EXPR node temporary, not momentary,
2353 so that rtl_expr_chain doesn't become garbage. */
2354 t = make_node (RTL_EXPR);
2355 do_pending_stack_adjust ();
2356 if (has_scope)
2357 start_sequence_for_rtl_expr (t);
2358 else
2359 start_sequence ();
2360 NO_DEFER_POP;
2361 expr_stmts_for_value++;
2362 return t;
2365 /* Restore the previous state at the end of a statement that returns a value.
2366 Returns a tree node representing the statement's value and the
2367 insns to compute the value.
2369 The nodes of that expression have been freed by now, so we cannot use them.
2370 But we don't want to do that anyway; the expression has already been
2371 evaluated and now we just want to use the value. So generate a RTL_EXPR
2372 with the proper type and RTL value.
2374 If the last substatement was not an expression,
2375 return something with type `void'. */
2377 tree
2378 expand_end_stmt_expr (t)
2379 tree t;
2381 OK_DEFER_POP;
2383 if (! last_expr_value || ! last_expr_type)
2385 last_expr_value = const0_rtx;
2386 last_expr_type = void_type_node;
2388 else if (GET_CODE (last_expr_value) != REG && ! CONSTANT_P (last_expr_value))
2389 /* Remove any possible QUEUED. */
2390 last_expr_value = protect_from_queue (last_expr_value, 0);
2392 emit_queue ();
2394 TREE_TYPE (t) = last_expr_type;
2395 RTL_EXPR_RTL (t) = last_expr_value;
2396 RTL_EXPR_SEQUENCE (t) = get_insns ();
2398 rtl_expr_chain = tree_cons (NULL_TREE, t, rtl_expr_chain);
2400 end_sequence ();
2402 /* Don't consider deleting this expr or containing exprs at tree level. */
2403 TREE_SIDE_EFFECTS (t) = 1;
2404 /* Propagate volatility of the actual RTL expr. */
2405 TREE_THIS_VOLATILE (t) = volatile_refs_p (last_expr_value);
2407 clear_last_expr ();
2408 expr_stmts_for_value--;
2410 return t;
2413 /* Generate RTL for the start of an if-then. COND is the expression
2414 whose truth should be tested.
2416 If EXITFLAG is nonzero, this conditional is visible to
2417 `exit_something'. */
2419 void
2420 expand_start_cond (cond, exitflag)
2421 tree cond;
2422 int exitflag;
2424 struct nesting *thiscond = ALLOC_NESTING ();
2426 /* Make an entry on cond_stack for the cond we are entering. */
2428 thiscond->desc = COND_NESTING;
2429 thiscond->next = cond_stack;
2430 thiscond->all = nesting_stack;
2431 thiscond->depth = ++nesting_depth;
2432 thiscond->data.cond.next_label = gen_label_rtx ();
2433 /* Before we encounter an `else', we don't need a separate exit label
2434 unless there are supposed to be exit statements
2435 to exit this conditional. */
2436 thiscond->exit_label = exitflag ? gen_label_rtx () : 0;
2437 thiscond->data.cond.endif_label = thiscond->exit_label;
2438 cond_stack = thiscond;
2439 nesting_stack = thiscond;
2441 do_jump (cond, thiscond->data.cond.next_label, NULL_RTX);
2444 /* Generate RTL between then-clause and the elseif-clause
2445 of an if-then-elseif-.... */
2447 void
2448 expand_start_elseif (cond)
2449 tree cond;
2451 if (cond_stack->data.cond.endif_label == 0)
2452 cond_stack->data.cond.endif_label = gen_label_rtx ();
2453 emit_jump (cond_stack->data.cond.endif_label);
2454 emit_label (cond_stack->data.cond.next_label);
2455 cond_stack->data.cond.next_label = gen_label_rtx ();
2456 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
2459 /* Generate RTL between the then-clause and the else-clause
2460 of an if-then-else. */
2462 void
2463 expand_start_else ()
2465 if (cond_stack->data.cond.endif_label == 0)
2466 cond_stack->data.cond.endif_label = gen_label_rtx ();
2468 emit_jump (cond_stack->data.cond.endif_label);
2469 emit_label (cond_stack->data.cond.next_label);
2470 cond_stack->data.cond.next_label = 0; /* No more _else or _elseif calls. */
2473 /* After calling expand_start_else, turn this "else" into an "else if"
2474 by providing another condition. */
2476 void
2477 expand_elseif (cond)
2478 tree cond;
2480 cond_stack->data.cond.next_label = gen_label_rtx ();
2481 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
2484 /* Generate RTL for the end of an if-then.
2485 Pop the record for it off of cond_stack. */
2487 void
2488 expand_end_cond ()
2490 struct nesting *thiscond = cond_stack;
2492 do_pending_stack_adjust ();
2493 if (thiscond->data.cond.next_label)
2494 emit_label (thiscond->data.cond.next_label);
2495 if (thiscond->data.cond.endif_label)
2496 emit_label (thiscond->data.cond.endif_label);
2498 POPSTACK (cond_stack);
2499 clear_last_expr ();
2502 /* Generate RTL for the start of a loop. EXIT_FLAG is nonzero if this
2503 loop should be exited by `exit_something'. This is a loop for which
2504 `expand_continue' will jump to the top of the loop.
2506 Make an entry on loop_stack to record the labels associated with
2507 this loop. */
2509 struct nesting *
2510 expand_start_loop (exit_flag)
2511 int exit_flag;
2513 struct nesting *thisloop = ALLOC_NESTING ();
2515 /* Make an entry on loop_stack for the loop we are entering. */
2517 thisloop->desc = LOOP_NESTING;
2518 thisloop->next = loop_stack;
2519 thisloop->all = nesting_stack;
2520 thisloop->depth = ++nesting_depth;
2521 thisloop->data.loop.start_label = gen_label_rtx ();
2522 thisloop->data.loop.end_label = gen_label_rtx ();
2523 thisloop->data.loop.continue_label = thisloop->data.loop.start_label;
2524 thisloop->exit_label = exit_flag ? thisloop->data.loop.end_label : 0;
2525 loop_stack = thisloop;
2526 nesting_stack = thisloop;
2528 do_pending_stack_adjust ();
2529 emit_queue ();
2530 emit_note (NULL, NOTE_INSN_LOOP_BEG);
2531 emit_label (thisloop->data.loop.start_label);
2533 return thisloop;
2536 /* Like expand_start_loop but for a loop where the continuation point
2537 (for expand_continue_loop) will be specified explicitly. */
2539 struct nesting *
2540 expand_start_loop_continue_elsewhere (exit_flag)
2541 int exit_flag;
2543 struct nesting *thisloop = expand_start_loop (exit_flag);
2544 loop_stack->data.loop.continue_label = gen_label_rtx ();
2545 return thisloop;
2548 /* Begin a null, aka do { } while (0) "loop". But since the contents
2549 of said loop can still contain a break, we must frob the loop nest. */
2551 struct nesting *
2552 expand_start_null_loop ()
2554 struct nesting *thisloop = ALLOC_NESTING ();
2556 /* Make an entry on loop_stack for the loop we are entering. */
2558 thisloop->desc = LOOP_NESTING;
2559 thisloop->next = loop_stack;
2560 thisloop->all = nesting_stack;
2561 thisloop->depth = ++nesting_depth;
2562 thisloop->data.loop.start_label = emit_note (NULL, NOTE_INSN_DELETED);
2563 thisloop->data.loop.end_label = gen_label_rtx ();
2564 thisloop->data.loop.continue_label = thisloop->data.loop.end_label;
2565 thisloop->exit_label = thisloop->data.loop.end_label;
2566 loop_stack = thisloop;
2567 nesting_stack = thisloop;
2569 return thisloop;
2572 /* Specify the continuation point for a loop started with
2573 expand_start_loop_continue_elsewhere.
2574 Use this at the point in the code to which a continue statement
2575 should jump. */
2577 void
2578 expand_loop_continue_here ()
2580 do_pending_stack_adjust ();
2581 emit_note (NULL, NOTE_INSN_LOOP_CONT);
2582 emit_label (loop_stack->data.loop.continue_label);
2585 /* Finish a loop. Generate a jump back to the top and the loop-exit label.
2586 Pop the block off of loop_stack. */
2588 void
2589 expand_end_loop ()
2591 rtx start_label = loop_stack->data.loop.start_label;
2592 rtx etc_note;
2593 int eh_regions, debug_blocks;
2594 bool empty_test;
2596 /* Mark the continue-point at the top of the loop if none elsewhere. */
2597 if (start_label == loop_stack->data.loop.continue_label)
2598 emit_note_before (NOTE_INSN_LOOP_CONT, start_label);
2600 do_pending_stack_adjust ();
2602 /* If the loop starts with a loop exit, roll that to the end where
2603 it will optimize together with the jump back.
2605 If the loop presently looks like this (in pseudo-C):
2607 LOOP_BEG
2608 start_label:
2609 if (test) goto end_label;
2610 LOOP_END_TOP_COND
2611 body;
2612 goto start_label;
2613 end_label:
2615 transform it to look like:
2617 LOOP_BEG
2618 goto start_label;
2619 top_label:
2620 body;
2621 start_label:
2622 if (test) goto end_label;
2623 goto top_label;
2624 end_label:
2626 We rely on the presence of NOTE_INSN_LOOP_END_TOP_COND to mark
2627 the end of the entry conditional. Without this, our lexical scan
2628 can't tell the difference between an entry conditional and a
2629 body conditional that exits the loop. Mistaking the two means
2630 that we can misplace the NOTE_INSN_LOOP_CONT note, which can
2631 screw up loop unrolling.
2633 Things will be oh so much better when loop optimization is done
2634 off of a proper control flow graph... */
2636 /* Scan insns from the top of the loop looking for the END_TOP_COND note. */
2638 empty_test = true;
2639 eh_regions = debug_blocks = 0;
2640 for (etc_note = start_label; etc_note ; etc_note = NEXT_INSN (etc_note))
2641 if (GET_CODE (etc_note) == NOTE)
2643 if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_LOOP_END_TOP_COND)
2644 break;
2646 /* We must not walk into a nested loop. */
2647 else if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_LOOP_BEG)
2649 etc_note = NULL_RTX;
2650 break;
2653 /* At the same time, scan for EH region notes, as we don't want
2654 to scrog region nesting. This shouldn't happen, but... */
2655 else if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_EH_REGION_BEG)
2656 eh_regions++;
2657 else if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_EH_REGION_END)
2659 if (--eh_regions < 0)
2660 /* We've come to the end of an EH region, but never saw the
2661 beginning of that region. That means that an EH region
2662 begins before the top of the loop, and ends in the middle
2663 of it. The existence of such a situation violates a basic
2664 assumption in this code, since that would imply that even
2665 when EH_REGIONS is zero, we might move code out of an
2666 exception region. */
2667 abort ();
2670 /* Likewise for debug scopes. In this case we'll either (1) move
2671 all of the notes if they are properly nested or (2) leave the
2672 notes alone and only rotate the loop at high optimization
2673 levels when we expect to scrog debug info. */
2674 else if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_BLOCK_BEG)
2675 debug_blocks++;
2676 else if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_BLOCK_END)
2677 debug_blocks--;
2679 else if (INSN_P (etc_note))
2680 empty_test = false;
2682 if (etc_note
2683 && optimize
2684 && ! empty_test
2685 && eh_regions == 0
2686 && (debug_blocks == 0 || optimize >= 2)
2687 && NEXT_INSN (etc_note) != NULL_RTX
2688 && ! any_condjump_p (get_last_insn ()))
2690 /* We found one. Move everything from START to ETC to the end
2691 of the loop, and add a jump from the top of the loop. */
2692 rtx top_label = gen_label_rtx ();
2693 rtx start_move = start_label;
2695 /* If the start label is preceded by a NOTE_INSN_LOOP_CONT note,
2696 then we want to move this note also. */
2697 if (GET_CODE (PREV_INSN (start_move)) == NOTE
2698 && NOTE_LINE_NUMBER (PREV_INSN (start_move)) == NOTE_INSN_LOOP_CONT)
2699 start_move = PREV_INSN (start_move);
2701 emit_label_before (top_label, start_move);
2703 /* Actually move the insns. If the debug scopes are nested, we
2704 can move everything at once. Otherwise we have to move them
2705 one by one and squeeze out the block notes. */
2706 if (debug_blocks == 0)
2707 reorder_insns (start_move, etc_note, get_last_insn ());
2708 else
2710 rtx insn, next_insn;
2711 for (insn = start_move; insn; insn = next_insn)
2713 /* Figure out which insn comes after this one. We have
2714 to do this before we move INSN. */
2715 next_insn = (insn == etc_note ? NULL : NEXT_INSN (insn));
2717 if (GET_CODE (insn) == NOTE
2718 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2719 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
2720 continue;
2722 reorder_insns (insn, insn, get_last_insn ());
2726 /* Add the jump from the top of the loop. */
2727 emit_jump_insn_before (gen_jump (start_label), top_label);
2728 emit_barrier_before (top_label);
2729 start_label = top_label;
2732 emit_jump (start_label);
2733 emit_note (NULL, NOTE_INSN_LOOP_END);
2734 emit_label (loop_stack->data.loop.end_label);
2736 POPSTACK (loop_stack);
2738 clear_last_expr ();
2741 /* Finish a null loop, aka do { } while (0). */
2743 void
2744 expand_end_null_loop ()
2746 do_pending_stack_adjust ();
2747 emit_label (loop_stack->data.loop.end_label);
2749 POPSTACK (loop_stack);
2751 clear_last_expr ();
2754 /* Generate a jump to the current loop's continue-point.
2755 This is usually the top of the loop, but may be specified
2756 explicitly elsewhere. If not currently inside a loop,
2757 return 0 and do nothing; caller will print an error message. */
2760 expand_continue_loop (whichloop)
2761 struct nesting *whichloop;
2763 /* Emit information for branch prediction. */
2764 rtx note;
2766 note = emit_note (NULL, NOTE_INSN_PREDICTION);
2767 NOTE_PREDICTION (note) = NOTE_PREDICT (PRED_CONTINUE, IS_TAKEN);
2768 clear_last_expr ();
2769 if (whichloop == 0)
2770 whichloop = loop_stack;
2771 if (whichloop == 0)
2772 return 0;
2773 expand_goto_internal (NULL_TREE, whichloop->data.loop.continue_label,
2774 NULL_RTX);
2775 return 1;
2778 /* Generate a jump to exit the current loop. If not currently inside a loop,
2779 return 0 and do nothing; caller will print an error message. */
2782 expand_exit_loop (whichloop)
2783 struct nesting *whichloop;
2785 clear_last_expr ();
2786 if (whichloop == 0)
2787 whichloop = loop_stack;
2788 if (whichloop == 0)
2789 return 0;
2790 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label, NULL_RTX);
2791 return 1;
2794 /* Generate a conditional jump to exit the current loop if COND
2795 evaluates to zero. If not currently inside a loop,
2796 return 0 and do nothing; caller will print an error message. */
2799 expand_exit_loop_if_false (whichloop, cond)
2800 struct nesting *whichloop;
2801 tree cond;
2803 rtx label;
2804 clear_last_expr ();
2806 if (whichloop == 0)
2807 whichloop = loop_stack;
2808 if (whichloop == 0)
2809 return 0;
2811 if (integer_nonzerop (cond))
2812 return 1;
2813 if (integer_zerop (cond))
2814 return expand_exit_loop (whichloop);
2816 /* Check if we definitely won't need a fixup. */
2817 if (whichloop == nesting_stack)
2819 jumpifnot (cond, whichloop->data.loop.end_label);
2820 return 1;
2823 /* In order to handle fixups, we actually create a conditional jump
2824 around an unconditional branch to exit the loop. If fixups are
2825 necessary, they go before the unconditional branch. */
2827 label = gen_label_rtx ();
2828 jumpif (cond, label);
2829 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label,
2830 NULL_RTX);
2831 emit_label (label);
2833 return 1;
2836 /* Like expand_exit_loop_if_false except also emit a note marking
2837 the end of the conditional. Should only be used immediately
2838 after expand_loop_start. */
2841 expand_exit_loop_top_cond (whichloop, cond)
2842 struct nesting *whichloop;
2843 tree cond;
2845 if (! expand_exit_loop_if_false (whichloop, cond))
2846 return 0;
2848 emit_note (NULL, NOTE_INSN_LOOP_END_TOP_COND);
2849 return 1;
2852 /* Return nonzero if we should preserve sub-expressions as separate
2853 pseudos. We never do so if we aren't optimizing. We always do so
2854 if -fexpensive-optimizations.
2856 Otherwise, we only do so if we are in the "early" part of a loop. I.e.,
2857 the loop may still be a small one. */
2860 preserve_subexpressions_p ()
2862 rtx insn;
2864 if (flag_expensive_optimizations)
2865 return 1;
2867 if (optimize == 0 || cfun == 0 || cfun->stmt == 0 || loop_stack == 0)
2868 return 0;
2870 insn = get_last_insn_anywhere ();
2872 return (insn
2873 && (INSN_UID (insn) - INSN_UID (loop_stack->data.loop.start_label)
2874 < n_non_fixed_regs * 3));
2878 /* Generate a jump to exit the current loop, conditional, binding contour
2879 or case statement. Not all such constructs are visible to this function,
2880 only those started with EXIT_FLAG nonzero. Individual languages use
2881 the EXIT_FLAG parameter to control which kinds of constructs you can
2882 exit this way.
2884 If not currently inside anything that can be exited,
2885 return 0 and do nothing; caller will print an error message. */
2888 expand_exit_something ()
2890 struct nesting *n;
2891 clear_last_expr ();
2892 for (n = nesting_stack; n; n = n->all)
2893 if (n->exit_label != 0)
2895 expand_goto_internal (NULL_TREE, n->exit_label, NULL_RTX);
2896 return 1;
2899 return 0;
2902 /* Generate RTL to return from the current function, with no value.
2903 (That is, we do not do anything about returning any value.) */
2905 void
2906 expand_null_return ()
2908 rtx last_insn;
2910 last_insn = get_last_insn ();
2912 /* If this function was declared to return a value, but we
2913 didn't, clobber the return registers so that they are not
2914 propagated live to the rest of the function. */
2915 clobber_return_register ();
2917 expand_null_return_1 (last_insn);
2920 /* Try to guess whether the value of return means error code. */
2921 static enum br_predictor
2922 return_prediction (val)
2923 rtx val;
2925 /* Different heuristics for pointers and scalars. */
2926 if (POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (current_function_decl))))
2928 /* NULL is usually not returned. */
2929 if (val == const0_rtx)
2930 return PRED_NULL_RETURN;
2932 else
2934 /* Negative return values are often used to indicate
2935 errors. */
2936 if (GET_CODE (val) == CONST_INT
2937 && INTVAL (val) < 0)
2938 return PRED_NEGATIVE_RETURN;
2939 /* Constant return values are also usually erors,
2940 zero/one often mean booleans so exclude them from the
2941 heuristics. */
2942 if (CONSTANT_P (val)
2943 && (val != const0_rtx && val != const1_rtx))
2944 return PRED_CONST_RETURN;
2946 return PRED_NO_PREDICTION;
2949 /* Generate RTL to return from the current function, with value VAL. */
2951 static void
2952 expand_value_return (val)
2953 rtx val;
2955 rtx last_insn;
2956 rtx return_reg;
2957 enum br_predictor pred;
2959 if ((pred = return_prediction (val)) != PRED_NO_PREDICTION)
2961 /* Emit information for branch prediction. */
2962 rtx note;
2964 note = emit_note (NULL, NOTE_INSN_PREDICTION);
2966 NOTE_PREDICTION (note) = NOTE_PREDICT (pred, NOT_TAKEN);
2970 last_insn = get_last_insn ();
2971 return_reg = DECL_RTL (DECL_RESULT (current_function_decl));
2973 /* Copy the value to the return location
2974 unless it's already there. */
2976 if (return_reg != val)
2978 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
2979 #ifdef PROMOTE_FUNCTION_RETURN
2980 int unsignedp = TREE_UNSIGNED (type);
2981 enum machine_mode old_mode
2982 = DECL_MODE (DECL_RESULT (current_function_decl));
2983 enum machine_mode mode
2984 = promote_mode (type, old_mode, &unsignedp, 1);
2986 if (mode != old_mode)
2987 val = convert_modes (mode, old_mode, val, unsignedp);
2988 #endif
2989 if (GET_CODE (return_reg) == PARALLEL)
2990 emit_group_load (return_reg, val, int_size_in_bytes (type));
2991 else
2992 emit_move_insn (return_reg, val);
2995 expand_null_return_1 (last_insn);
2998 /* Output a return with no value. If LAST_INSN is nonzero,
2999 pretend that the return takes place after LAST_INSN. */
3001 static void
3002 expand_null_return_1 (last_insn)
3003 rtx last_insn;
3005 rtx end_label = cleanup_label ? cleanup_label : return_label;
3007 clear_pending_stack_adjust ();
3008 do_pending_stack_adjust ();
3009 clear_last_expr ();
3011 if (end_label == 0)
3012 end_label = return_label = gen_label_rtx ();
3013 expand_goto_internal (NULL_TREE, end_label, last_insn);
3016 /* Generate RTL to evaluate the expression RETVAL and return it
3017 from the current function. */
3019 void
3020 expand_return (retval)
3021 tree retval;
3023 /* If there are any cleanups to be performed, then they will
3024 be inserted following LAST_INSN. It is desirable
3025 that the last_insn, for such purposes, should be the
3026 last insn before computing the return value. Otherwise, cleanups
3027 which call functions can clobber the return value. */
3028 /* ??? rms: I think that is erroneous, because in C++ it would
3029 run destructors on variables that might be used in the subsequent
3030 computation of the return value. */
3031 rtx last_insn = 0;
3032 rtx result_rtl;
3033 rtx val = 0;
3034 tree retval_rhs;
3036 /* If function wants no value, give it none. */
3037 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
3039 expand_expr (retval, NULL_RTX, VOIDmode, 0);
3040 emit_queue ();
3041 expand_null_return ();
3042 return;
3045 if (retval == error_mark_node)
3047 /* Treat this like a return of no value from a function that
3048 returns a value. */
3049 expand_null_return ();
3050 return;
3052 else if (TREE_CODE (retval) == RESULT_DECL)
3053 retval_rhs = retval;
3054 else if ((TREE_CODE (retval) == MODIFY_EXPR || TREE_CODE (retval) == INIT_EXPR)
3055 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
3056 retval_rhs = TREE_OPERAND (retval, 1);
3057 else if (VOID_TYPE_P (TREE_TYPE (retval)))
3058 /* Recognize tail-recursive call to void function. */
3059 retval_rhs = retval;
3060 else
3061 retval_rhs = NULL_TREE;
3063 last_insn = get_last_insn ();
3065 /* Distribute return down conditional expr if either of the sides
3066 may involve tail recursion (see test below). This enhances the number
3067 of tail recursions we see. Don't do this always since it can produce
3068 sub-optimal code in some cases and we distribute assignments into
3069 conditional expressions when it would help. */
3071 if (optimize && retval_rhs != 0
3072 && frame_offset == 0
3073 && TREE_CODE (retval_rhs) == COND_EXPR
3074 && (TREE_CODE (TREE_OPERAND (retval_rhs, 1)) == CALL_EXPR
3075 || TREE_CODE (TREE_OPERAND (retval_rhs, 2)) == CALL_EXPR))
3077 rtx label = gen_label_rtx ();
3078 tree expr;
3080 do_jump (TREE_OPERAND (retval_rhs, 0), label, NULL_RTX);
3081 start_cleanup_deferral ();
3082 expr = build (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (current_function_decl)),
3083 DECL_RESULT (current_function_decl),
3084 TREE_OPERAND (retval_rhs, 1));
3085 TREE_SIDE_EFFECTS (expr) = 1;
3086 expand_return (expr);
3087 emit_label (label);
3089 expr = build (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (current_function_decl)),
3090 DECL_RESULT (current_function_decl),
3091 TREE_OPERAND (retval_rhs, 2));
3092 TREE_SIDE_EFFECTS (expr) = 1;
3093 expand_return (expr);
3094 end_cleanup_deferral ();
3095 return;
3098 result_rtl = DECL_RTL (DECL_RESULT (current_function_decl));
3100 /* If the result is an aggregate that is being returned in one (or more)
3101 registers, load the registers here. The compiler currently can't handle
3102 copying a BLKmode value into registers. We could put this code in a
3103 more general area (for use by everyone instead of just function
3104 call/return), but until this feature is generally usable it is kept here
3105 (and in expand_call). The value must go into a pseudo in case there
3106 are cleanups that will clobber the real return register. */
3108 if (retval_rhs != 0
3109 && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
3110 && GET_CODE (result_rtl) == REG)
3112 int i;
3113 unsigned HOST_WIDE_INT bitpos, xbitpos;
3114 unsigned HOST_WIDE_INT big_endian_correction = 0;
3115 unsigned HOST_WIDE_INT bytes
3116 = int_size_in_bytes (TREE_TYPE (retval_rhs));
3117 int n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
3118 unsigned int bitsize
3119 = MIN (TYPE_ALIGN (TREE_TYPE (retval_rhs)), BITS_PER_WORD);
3120 rtx *result_pseudos = (rtx *) alloca (sizeof (rtx) * n_regs);
3121 rtx result_reg, src = NULL_RTX, dst = NULL_RTX;
3122 rtx result_val = expand_expr (retval_rhs, NULL_RTX, VOIDmode, 0);
3123 enum machine_mode tmpmode, result_reg_mode;
3125 if (bytes == 0)
3127 expand_null_return ();
3128 return;
3131 /* Structures whose size is not a multiple of a word are aligned
3132 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
3133 machine, this means we must skip the empty high order bytes when
3134 calculating the bit offset. */
3135 if (BYTES_BIG_ENDIAN
3136 && bytes % UNITS_PER_WORD)
3137 big_endian_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
3138 * BITS_PER_UNIT));
3140 /* Copy the structure BITSIZE bits at a time. */
3141 for (bitpos = 0, xbitpos = big_endian_correction;
3142 bitpos < bytes * BITS_PER_UNIT;
3143 bitpos += bitsize, xbitpos += bitsize)
3145 /* We need a new destination pseudo each time xbitpos is
3146 on a word boundary and when xbitpos == big_endian_correction
3147 (the first time through). */
3148 if (xbitpos % BITS_PER_WORD == 0
3149 || xbitpos == big_endian_correction)
3151 /* Generate an appropriate register. */
3152 dst = gen_reg_rtx (word_mode);
3153 result_pseudos[xbitpos / BITS_PER_WORD] = dst;
3155 /* Clear the destination before we move anything into it. */
3156 emit_move_insn (dst, CONST0_RTX (GET_MODE (dst)));
3159 /* We need a new source operand each time bitpos is on a word
3160 boundary. */
3161 if (bitpos % BITS_PER_WORD == 0)
3162 src = operand_subword_force (result_val,
3163 bitpos / BITS_PER_WORD,
3164 BLKmode);
3166 /* Use bitpos for the source extraction (left justified) and
3167 xbitpos for the destination store (right justified). */
3168 store_bit_field (dst, bitsize, xbitpos % BITS_PER_WORD, word_mode,
3169 extract_bit_field (src, bitsize,
3170 bitpos % BITS_PER_WORD, 1,
3171 NULL_RTX, word_mode, word_mode,
3172 BITS_PER_WORD),
3173 BITS_PER_WORD);
3176 /* Find the smallest integer mode large enough to hold the
3177 entire structure and use that mode instead of BLKmode
3178 on the USE insn for the return register. */
3179 for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3180 tmpmode != VOIDmode;
3181 tmpmode = GET_MODE_WIDER_MODE (tmpmode))
3182 /* Have we found a large enough mode? */
3183 if (GET_MODE_SIZE (tmpmode) >= bytes)
3184 break;
3186 /* No suitable mode found. */
3187 if (tmpmode == VOIDmode)
3188 abort ();
3190 PUT_MODE (result_rtl, tmpmode);
3192 if (GET_MODE_SIZE (tmpmode) < GET_MODE_SIZE (word_mode))
3193 result_reg_mode = word_mode;
3194 else
3195 result_reg_mode = tmpmode;
3196 result_reg = gen_reg_rtx (result_reg_mode);
3198 emit_queue ();
3199 for (i = 0; i < n_regs; i++)
3200 emit_move_insn (operand_subword (result_reg, i, 0, result_reg_mode),
3201 result_pseudos[i]);
3203 if (tmpmode != result_reg_mode)
3204 result_reg = gen_lowpart (tmpmode, result_reg);
3206 expand_value_return (result_reg);
3208 else if (retval_rhs != 0
3209 && !VOID_TYPE_P (TREE_TYPE (retval_rhs))
3210 && (GET_CODE (result_rtl) == REG
3211 || (GET_CODE (result_rtl) == PARALLEL)))
3213 /* Calculate the return value into a temporary (usually a pseudo
3214 reg). */
3215 tree ot = TREE_TYPE (DECL_RESULT (current_function_decl));
3216 tree nt = build_qualified_type (ot, TYPE_QUALS (ot) | TYPE_QUAL_CONST);
3218 val = assign_temp (nt, 0, 0, 1);
3219 val = expand_expr (retval_rhs, val, GET_MODE (val), 0);
3220 val = force_not_mem (val);
3221 emit_queue ();
3222 /* Return the calculated value, doing cleanups first. */
3223 expand_value_return (val);
3225 else
3227 /* No cleanups or no hard reg used;
3228 calculate value into hard return reg. */
3229 expand_expr (retval, const0_rtx, VOIDmode, 0);
3230 emit_queue ();
3231 expand_value_return (result_rtl);
3235 /* Attempt to optimize a potential tail recursion call into a goto.
3236 ARGUMENTS are the arguments to a CALL_EXPR; LAST_INSN indicates
3237 where to place the jump to the tail recursion label.
3239 Return TRUE if the call was optimized into a goto. */
3242 optimize_tail_recursion (arguments, last_insn)
3243 tree arguments;
3244 rtx last_insn;
3246 /* Finish checking validity, and if valid emit code to set the
3247 argument variables for the new call. */
3248 if (tail_recursion_args (arguments, DECL_ARGUMENTS (current_function_decl)))
3250 if (tail_recursion_label == 0)
3252 tail_recursion_label = gen_label_rtx ();
3253 emit_label_after (tail_recursion_label,
3254 tail_recursion_reentry);
3256 emit_queue ();
3257 expand_goto_internal (NULL_TREE, tail_recursion_label, last_insn);
3258 emit_barrier ();
3259 return 1;
3261 return 0;
3264 /* Emit code to alter this function's formal parms for a tail-recursive call.
3265 ACTUALS is a list of actual parameter expressions (chain of TREE_LISTs).
3266 FORMALS is the chain of decls of formals.
3267 Return 1 if this can be done;
3268 otherwise return 0 and do not emit any code. */
3270 static int
3271 tail_recursion_args (actuals, formals)
3272 tree actuals, formals;
3274 tree a = actuals, f = formals;
3275 int i;
3276 rtx *argvec;
3278 /* Check that number and types of actuals are compatible
3279 with the formals. This is not always true in valid C code.
3280 Also check that no formal needs to be addressable
3281 and that all formals are scalars. */
3283 /* Also count the args. */
3285 for (a = actuals, f = formals, i = 0; a && f; a = TREE_CHAIN (a), f = TREE_CHAIN (f), i++)
3287 if (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_VALUE (a)))
3288 != TYPE_MAIN_VARIANT (TREE_TYPE (f)))
3289 return 0;
3290 if (GET_CODE (DECL_RTL (f)) != REG || DECL_MODE (f) == BLKmode)
3291 return 0;
3293 if (a != 0 || f != 0)
3294 return 0;
3296 /* Compute all the actuals. */
3298 argvec = (rtx *) alloca (i * sizeof (rtx));
3300 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
3301 argvec[i] = expand_expr (TREE_VALUE (a), NULL_RTX, VOIDmode, 0);
3303 /* Find which actual values refer to current values of previous formals.
3304 Copy each of them now, before any formal is changed. */
3306 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
3308 int copy = 0;
3309 int j;
3310 for (f = formals, j = 0; j < i; f = TREE_CHAIN (f), j++)
3311 if (reg_mentioned_p (DECL_RTL (f), argvec[i]))
3313 copy = 1;
3314 break;
3316 if (copy)
3317 argvec[i] = copy_to_reg (argvec[i]);
3320 /* Store the values of the actuals into the formals. */
3322 for (f = formals, a = actuals, i = 0; f;
3323 f = TREE_CHAIN (f), a = TREE_CHAIN (a), i++)
3325 if (GET_MODE (DECL_RTL (f)) == GET_MODE (argvec[i]))
3326 emit_move_insn (DECL_RTL (f), argvec[i]);
3327 else
3329 rtx tmp = argvec[i];
3331 if (DECL_MODE (f) != GET_MODE (DECL_RTL (f)))
3333 tmp = gen_reg_rtx (DECL_MODE (f));
3334 convert_move (tmp, argvec[i],
3335 TREE_UNSIGNED (TREE_TYPE (TREE_VALUE (a))));
3337 convert_move (DECL_RTL (f), tmp,
3338 TREE_UNSIGNED (TREE_TYPE (TREE_VALUE (a))));
3342 free_temp_slots ();
3343 return 1;
3346 /* Generate the RTL code for entering a binding contour.
3347 The variables are declared one by one, by calls to `expand_decl'.
3349 FLAGS is a bitwise or of the following flags:
3351 1 - Nonzero if this construct should be visible to
3352 `exit_something'.
3354 2 - Nonzero if this contour does not require a
3355 NOTE_INSN_BLOCK_BEG note. Virtually all calls from
3356 language-independent code should set this flag because they
3357 will not create corresponding BLOCK nodes. (There should be
3358 a one-to-one correspondence between NOTE_INSN_BLOCK_BEG notes
3359 and BLOCKs.) If this flag is set, MARK_ENDS should be zero
3360 when expand_end_bindings is called.
3362 If we are creating a NOTE_INSN_BLOCK_BEG note, a BLOCK may
3363 optionally be supplied. If so, it becomes the NOTE_BLOCK for the
3364 note. */
3366 void
3367 expand_start_bindings_and_block (flags, block)
3368 int flags;
3369 tree block;
3371 struct nesting *thisblock = ALLOC_NESTING ();
3372 rtx note;
3373 int exit_flag = ((flags & 1) != 0);
3374 int block_flag = ((flags & 2) == 0);
3376 /* If a BLOCK is supplied, then the caller should be requesting a
3377 NOTE_INSN_BLOCK_BEG note. */
3378 if (!block_flag && block)
3379 abort ();
3381 /* Create a note to mark the beginning of the block. */
3382 if (block_flag)
3384 note = emit_note (NULL, NOTE_INSN_BLOCK_BEG);
3385 NOTE_BLOCK (note) = block;
3387 else
3388 note = emit_note (NULL, NOTE_INSN_DELETED);
3390 /* Make an entry on block_stack for the block we are entering. */
3392 thisblock->desc = BLOCK_NESTING;
3393 thisblock->next = block_stack;
3394 thisblock->all = nesting_stack;
3395 thisblock->depth = ++nesting_depth;
3396 thisblock->data.block.stack_level = 0;
3397 thisblock->data.block.cleanups = 0;
3398 thisblock->data.block.n_function_calls = 0;
3399 thisblock->data.block.exception_region = 0;
3400 thisblock->data.block.block_target_temp_slot_level = target_temp_slot_level;
3402 thisblock->data.block.conditional_code = 0;
3403 thisblock->data.block.last_unconditional_cleanup = note;
3404 /* When we insert instructions after the last unconditional cleanup,
3405 we don't adjust last_insn. That means that a later add_insn will
3406 clobber the instructions we've just added. The easiest way to
3407 fix this is to just insert another instruction here, so that the
3408 instructions inserted after the last unconditional cleanup are
3409 never the last instruction. */
3410 emit_note (NULL, NOTE_INSN_DELETED);
3412 if (block_stack
3413 && !(block_stack->data.block.cleanups == NULL_TREE
3414 && block_stack->data.block.outer_cleanups == NULL_TREE))
3415 thisblock->data.block.outer_cleanups
3416 = tree_cons (NULL_TREE, block_stack->data.block.cleanups,
3417 block_stack->data.block.outer_cleanups);
3418 else
3419 thisblock->data.block.outer_cleanups = 0;
3420 thisblock->data.block.label_chain = 0;
3421 thisblock->data.block.innermost_stack_block = stack_block_stack;
3422 thisblock->data.block.first_insn = note;
3423 thisblock->data.block.block_start_count = ++current_block_start_count;
3424 thisblock->exit_label = exit_flag ? gen_label_rtx () : 0;
3425 block_stack = thisblock;
3426 nesting_stack = thisblock;
3428 /* Make a new level for allocating stack slots. */
3429 push_temp_slots ();
3432 /* Specify the scope of temporaries created by TARGET_EXPRs. Similar
3433 to CLEANUP_POINT_EXPR, but handles cases when a series of calls to
3434 expand_expr are made. After we end the region, we know that all
3435 space for all temporaries that were created by TARGET_EXPRs will be
3436 destroyed and their space freed for reuse. */
3438 void
3439 expand_start_target_temps ()
3441 /* This is so that even if the result is preserved, the space
3442 allocated will be freed, as we know that it is no longer in use. */
3443 push_temp_slots ();
3445 /* Start a new binding layer that will keep track of all cleanup
3446 actions to be performed. */
3447 expand_start_bindings (2);
3449 target_temp_slot_level = temp_slot_level;
3452 void
3453 expand_end_target_temps ()
3455 expand_end_bindings (NULL_TREE, 0, 0);
3457 /* This is so that even if the result is preserved, the space
3458 allocated will be freed, as we know that it is no longer in use. */
3459 pop_temp_slots ();
3462 /* Given a pointer to a BLOCK node return nonzero if (and only if) the node
3463 in question represents the outermost pair of curly braces (i.e. the "body
3464 block") of a function or method.
3466 For any BLOCK node representing a "body block" of a function or method, the
3467 BLOCK_SUPERCONTEXT of the node will point to another BLOCK node which
3468 represents the outermost (function) scope for the function or method (i.e.
3469 the one which includes the formal parameters). The BLOCK_SUPERCONTEXT of
3470 *that* node in turn will point to the relevant FUNCTION_DECL node. */
3473 is_body_block (stmt)
3474 tree stmt;
3476 if (TREE_CODE (stmt) == BLOCK)
3478 tree parent = BLOCK_SUPERCONTEXT (stmt);
3480 if (parent && TREE_CODE (parent) == BLOCK)
3482 tree grandparent = BLOCK_SUPERCONTEXT (parent);
3484 if (grandparent && TREE_CODE (grandparent) == FUNCTION_DECL)
3485 return 1;
3489 return 0;
3492 /* True if we are currently emitting insns in an area of output code
3493 that is controlled by a conditional expression. This is used by
3494 the cleanup handling code to generate conditional cleanup actions. */
3497 conditional_context ()
3499 return block_stack && block_stack->data.block.conditional_code;
3502 /* Return an opaque pointer to the current nesting level, so frontend code
3503 can check its own sanity. */
3505 struct nesting *
3506 current_nesting_level ()
3508 return cfun ? block_stack : 0;
3511 /* Emit a handler label for a nonlocal goto handler.
3512 Also emit code to store the handler label in SLOT before BEFORE_INSN. */
3514 static rtx
3515 expand_nl_handler_label (slot, before_insn)
3516 rtx slot, before_insn;
3518 rtx insns;
3519 rtx handler_label = gen_label_rtx ();
3521 /* Don't let cleanup_cfg delete the handler. */
3522 LABEL_PRESERVE_P (handler_label) = 1;
3524 start_sequence ();
3525 emit_move_insn (slot, gen_rtx_LABEL_REF (Pmode, handler_label));
3526 insns = get_insns ();
3527 end_sequence ();
3528 emit_insn_before (insns, before_insn);
3530 emit_label (handler_label);
3532 return handler_label;
3535 /* Emit code to restore vital registers at the beginning of a nonlocal goto
3536 handler. */
3537 static void
3538 expand_nl_goto_receiver ()
3540 #ifdef HAVE_nonlocal_goto
3541 if (! HAVE_nonlocal_goto)
3542 #endif
3543 /* First adjust our frame pointer to its actual value. It was
3544 previously set to the start of the virtual area corresponding to
3545 the stacked variables when we branched here and now needs to be
3546 adjusted to the actual hardware fp value.
3548 Assignments are to virtual registers are converted by
3549 instantiate_virtual_regs into the corresponding assignment
3550 to the underlying register (fp in this case) that makes
3551 the original assignment true.
3552 So the following insn will actually be
3553 decrementing fp by STARTING_FRAME_OFFSET. */
3554 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
3556 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3557 if (fixed_regs[ARG_POINTER_REGNUM])
3559 #ifdef ELIMINABLE_REGS
3560 /* If the argument pointer can be eliminated in favor of the
3561 frame pointer, we don't need to restore it. We assume here
3562 that if such an elimination is present, it can always be used.
3563 This is the case on all known machines; if we don't make this
3564 assumption, we do unnecessary saving on many machines. */
3565 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
3566 size_t i;
3568 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
3569 if (elim_regs[i].from == ARG_POINTER_REGNUM
3570 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
3571 break;
3573 if (i == ARRAY_SIZE (elim_regs))
3574 #endif
3576 /* Now restore our arg pointer from the address at which it
3577 was saved in our stack frame. */
3578 emit_move_insn (virtual_incoming_args_rtx,
3579 copy_to_reg (get_arg_pointer_save_area (cfun)));
3582 #endif
3584 #ifdef HAVE_nonlocal_goto_receiver
3585 if (HAVE_nonlocal_goto_receiver)
3586 emit_insn (gen_nonlocal_goto_receiver ());
3587 #endif
3590 /* Make handlers for nonlocal gotos taking place in the function calls in
3591 block THISBLOCK. */
3593 static void
3594 expand_nl_goto_receivers (thisblock)
3595 struct nesting *thisblock;
3597 tree link;
3598 rtx afterward = gen_label_rtx ();
3599 rtx insns, slot;
3600 rtx label_list;
3601 int any_invalid;
3603 /* Record the handler address in the stack slot for that purpose,
3604 during this block, saving and restoring the outer value. */
3605 if (thisblock->next != 0)
3606 for (slot = nonlocal_goto_handler_slots; slot; slot = XEXP (slot, 1))
3608 rtx save_receiver = gen_reg_rtx (Pmode);
3609 emit_move_insn (XEXP (slot, 0), save_receiver);
3611 start_sequence ();
3612 emit_move_insn (save_receiver, XEXP (slot, 0));
3613 insns = get_insns ();
3614 end_sequence ();
3615 emit_insn_before (insns, thisblock->data.block.first_insn);
3618 /* Jump around the handlers; they run only when specially invoked. */
3619 emit_jump (afterward);
3621 /* Make a separate handler for each label. */
3622 link = nonlocal_labels;
3623 slot = nonlocal_goto_handler_slots;
3624 label_list = NULL_RTX;
3625 for (; link; link = TREE_CHAIN (link), slot = XEXP (slot, 1))
3626 /* Skip any labels we shouldn't be able to jump to from here,
3627 we generate one special handler for all of them below which just calls
3628 abort. */
3629 if (! DECL_TOO_LATE (TREE_VALUE (link)))
3631 rtx lab;
3632 lab = expand_nl_handler_label (XEXP (slot, 0),
3633 thisblock->data.block.first_insn);
3634 label_list = gen_rtx_EXPR_LIST (VOIDmode, lab, label_list);
3636 expand_nl_goto_receiver ();
3638 /* Jump to the "real" nonlocal label. */
3639 expand_goto (TREE_VALUE (link));
3642 /* A second pass over all nonlocal labels; this time we handle those
3643 we should not be able to jump to at this point. */
3644 link = nonlocal_labels;
3645 slot = nonlocal_goto_handler_slots;
3646 any_invalid = 0;
3647 for (; link; link = TREE_CHAIN (link), slot = XEXP (slot, 1))
3648 if (DECL_TOO_LATE (TREE_VALUE (link)))
3650 rtx lab;
3651 lab = expand_nl_handler_label (XEXP (slot, 0),
3652 thisblock->data.block.first_insn);
3653 label_list = gen_rtx_EXPR_LIST (VOIDmode, lab, label_list);
3654 any_invalid = 1;
3657 if (any_invalid)
3659 expand_nl_goto_receiver ();
3660 expand_builtin_trap ();
3663 nonlocal_goto_handler_labels = label_list;
3664 emit_label (afterward);
3667 /* Warn about any unused VARS (which may contain nodes other than
3668 VAR_DECLs, but such nodes are ignored). The nodes are connected
3669 via the TREE_CHAIN field. */
3671 void
3672 warn_about_unused_variables (vars)
3673 tree vars;
3675 tree decl;
3677 if (warn_unused_variable)
3678 for (decl = vars; decl; decl = TREE_CHAIN (decl))
3679 if (TREE_CODE (decl) == VAR_DECL
3680 && ! TREE_USED (decl)
3681 && ! DECL_IN_SYSTEM_HEADER (decl)
3682 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
3683 warning_with_decl (decl, "unused variable `%s'");
3686 /* Generate RTL code to terminate a binding contour.
3688 VARS is the chain of VAR_DECL nodes for the variables bound in this
3689 contour. There may actually be other nodes in this chain, but any
3690 nodes other than VAR_DECLS are ignored.
3692 MARK_ENDS is nonzero if we should put a note at the beginning
3693 and end of this binding contour.
3695 DONT_JUMP_IN is nonzero if it is not valid to jump into this contour.
3696 (That is true automatically if the contour has a saved stack level.) */
3698 void
3699 expand_end_bindings (vars, mark_ends, dont_jump_in)
3700 tree vars;
3701 int mark_ends;
3702 int dont_jump_in;
3704 struct nesting *thisblock = block_stack;
3706 /* If any of the variables in this scope were not used, warn the
3707 user. */
3708 warn_about_unused_variables (vars);
3710 if (thisblock->exit_label)
3712 do_pending_stack_adjust ();
3713 emit_label (thisblock->exit_label);
3716 /* If necessary, make handlers for nonlocal gotos taking
3717 place in the function calls in this block. */
3718 if (function_call_count != thisblock->data.block.n_function_calls
3719 && nonlocal_labels
3720 /* Make handler for outermost block
3721 if there were any nonlocal gotos to this function. */
3722 && (thisblock->next == 0 ? current_function_has_nonlocal_label
3723 /* Make handler for inner block if it has something
3724 special to do when you jump out of it. */
3725 : (thisblock->data.block.cleanups != 0
3726 || thisblock->data.block.stack_level != 0)))
3727 expand_nl_goto_receivers (thisblock);
3729 /* Don't allow jumping into a block that has a stack level.
3730 Cleanups are allowed, though. */
3731 if (dont_jump_in
3732 || thisblock->data.block.stack_level != 0)
3734 struct label_chain *chain;
3736 /* Any labels in this block are no longer valid to go to.
3737 Mark them to cause an error message. */
3738 for (chain = thisblock->data.block.label_chain; chain; chain = chain->next)
3740 DECL_TOO_LATE (chain->label) = 1;
3741 /* If any goto without a fixup came to this label,
3742 that must be an error, because gotos without fixups
3743 come from outside all saved stack-levels. */
3744 if (TREE_ADDRESSABLE (chain->label))
3745 error_with_decl (chain->label,
3746 "label `%s' used before containing binding contour");
3750 /* Restore stack level in effect before the block
3751 (only if variable-size objects allocated). */
3752 /* Perform any cleanups associated with the block. */
3754 if (thisblock->data.block.stack_level != 0
3755 || thisblock->data.block.cleanups != 0)
3757 int reachable;
3758 rtx insn;
3760 /* Don't let cleanups affect ({...}) constructs. */
3761 int old_expr_stmts_for_value = expr_stmts_for_value;
3762 rtx old_last_expr_value = last_expr_value;
3763 tree old_last_expr_type = last_expr_type;
3764 expr_stmts_for_value = 0;
3766 /* Only clean up here if this point can actually be reached. */
3767 insn = get_last_insn ();
3768 if (GET_CODE (insn) == NOTE)
3769 insn = prev_nonnote_insn (insn);
3770 reachable = (! insn || GET_CODE (insn) != BARRIER);
3772 /* Do the cleanups. */
3773 expand_cleanups (thisblock->data.block.cleanups, NULL_TREE, 0, reachable);
3774 if (reachable)
3775 do_pending_stack_adjust ();
3777 expr_stmts_for_value = old_expr_stmts_for_value;
3778 last_expr_value = old_last_expr_value;
3779 last_expr_type = old_last_expr_type;
3781 /* Restore the stack level. */
3783 if (reachable && thisblock->data.block.stack_level != 0)
3785 emit_stack_restore (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3786 thisblock->data.block.stack_level, NULL_RTX);
3787 if (nonlocal_goto_handler_slots != 0)
3788 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level,
3789 NULL_RTX);
3792 /* Any gotos out of this block must also do these things.
3793 Also report any gotos with fixups that came to labels in this
3794 level. */
3795 fixup_gotos (thisblock,
3796 thisblock->data.block.stack_level,
3797 thisblock->data.block.cleanups,
3798 thisblock->data.block.first_insn,
3799 dont_jump_in);
3802 /* Mark the beginning and end of the scope if requested.
3803 We do this now, after running cleanups on the variables
3804 just going out of scope, so they are in scope for their cleanups. */
3806 if (mark_ends)
3808 rtx note = emit_note (NULL, NOTE_INSN_BLOCK_END);
3809 NOTE_BLOCK (note) = NOTE_BLOCK (thisblock->data.block.first_insn);
3811 else
3812 /* Get rid of the beginning-mark if we don't make an end-mark. */
3813 NOTE_LINE_NUMBER (thisblock->data.block.first_insn) = NOTE_INSN_DELETED;
3815 /* Restore the temporary level of TARGET_EXPRs. */
3816 target_temp_slot_level = thisblock->data.block.block_target_temp_slot_level;
3818 /* Restore block_stack level for containing block. */
3820 stack_block_stack = thisblock->data.block.innermost_stack_block;
3821 POPSTACK (block_stack);
3823 /* Pop the stack slot nesting and free any slots at this level. */
3824 pop_temp_slots ();
3827 /* Generate code to save the stack pointer at the start of the current block
3828 and set up to restore it on exit. */
3830 void
3831 save_stack_pointer ()
3833 struct nesting *thisblock = block_stack;
3835 if (thisblock->data.block.stack_level == 0)
3837 emit_stack_save (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3838 &thisblock->data.block.stack_level,
3839 thisblock->data.block.first_insn);
3840 stack_block_stack = thisblock;
3844 /* Generate RTL for the automatic variable declaration DECL.
3845 (Other kinds of declarations are simply ignored if seen here.) */
3847 void
3848 expand_decl (decl)
3849 tree decl;
3851 tree type;
3853 type = TREE_TYPE (decl);
3855 /* For a CONST_DECL, set mode, alignment, and sizes from those of the
3856 type in case this node is used in a reference. */
3857 if (TREE_CODE (decl) == CONST_DECL)
3859 DECL_MODE (decl) = TYPE_MODE (type);
3860 DECL_ALIGN (decl) = TYPE_ALIGN (type);
3861 DECL_SIZE (decl) = TYPE_SIZE (type);
3862 DECL_SIZE_UNIT (decl) = TYPE_SIZE_UNIT (type);
3863 return;
3866 /* Otherwise, only automatic variables need any expansion done. Static and
3867 external variables, and external functions, will be handled by
3868 `assemble_variable' (called from finish_decl). TYPE_DECL requires
3869 nothing. PARM_DECLs are handled in `assign_parms'. */
3870 if (TREE_CODE (decl) != VAR_DECL)
3871 return;
3873 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
3874 return;
3876 /* Create the RTL representation for the variable. */
3878 if (type == error_mark_node)
3879 SET_DECL_RTL (decl, gen_rtx_MEM (BLKmode, const0_rtx));
3881 else if (DECL_SIZE (decl) == 0)
3882 /* Variable with incomplete type. */
3884 rtx x;
3885 if (DECL_INITIAL (decl) == 0)
3886 /* Error message was already done; now avoid a crash. */
3887 x = gen_rtx_MEM (BLKmode, const0_rtx);
3888 else
3889 /* An initializer is going to decide the size of this array.
3890 Until we know the size, represent its address with a reg. */
3891 x = gen_rtx_MEM (BLKmode, gen_reg_rtx (Pmode));
3893 set_mem_attributes (x, decl, 1);
3894 SET_DECL_RTL (decl, x);
3896 else if (DECL_MODE (decl) != BLKmode
3897 /* If -ffloat-store, don't put explicit float vars
3898 into regs. */
3899 && !(flag_float_store
3900 && TREE_CODE (type) == REAL_TYPE)
3901 && ! TREE_THIS_VOLATILE (decl)
3902 && (DECL_REGISTER (decl) || optimize))
3904 /* Automatic variable that can go in a register. */
3905 int unsignedp = TREE_UNSIGNED (type);
3906 enum machine_mode reg_mode
3907 = promote_mode (type, DECL_MODE (decl), &unsignedp, 0);
3909 SET_DECL_RTL (decl, gen_reg_rtx (reg_mode));
3911 if (GET_CODE (DECL_RTL (decl)) == REG)
3912 REGNO_DECL (REGNO (DECL_RTL (decl))) = decl;
3913 else if (GET_CODE (DECL_RTL (decl)) == CONCAT)
3915 REGNO_DECL (REGNO (XEXP (DECL_RTL (decl), 0))) = decl;
3916 REGNO_DECL (REGNO (XEXP (DECL_RTL (decl), 1))) = decl;
3919 mark_user_reg (DECL_RTL (decl));
3921 if (POINTER_TYPE_P (type))
3922 mark_reg_pointer (DECL_RTL (decl),
3923 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (decl))));
3925 maybe_set_unchanging (DECL_RTL (decl), decl);
3927 /* If something wants our address, try to use ADDRESSOF. */
3928 if (TREE_ADDRESSABLE (decl))
3929 put_var_into_stack (decl);
3932 else if (TREE_CODE (DECL_SIZE_UNIT (decl)) == INTEGER_CST
3933 && ! (flag_stack_check && ! STACK_CHECK_BUILTIN
3934 && 0 < compare_tree_int (DECL_SIZE_UNIT (decl),
3935 STACK_CHECK_MAX_VAR_SIZE)))
3937 /* Variable of fixed size that goes on the stack. */
3938 rtx oldaddr = 0;
3939 rtx addr;
3940 rtx x;
3942 /* If we previously made RTL for this decl, it must be an array
3943 whose size was determined by the initializer.
3944 The old address was a register; set that register now
3945 to the proper address. */
3946 if (DECL_RTL_SET_P (decl))
3948 if (GET_CODE (DECL_RTL (decl)) != MEM
3949 || GET_CODE (XEXP (DECL_RTL (decl), 0)) != REG)
3950 abort ();
3951 oldaddr = XEXP (DECL_RTL (decl), 0);
3954 /* Set alignment we actually gave this decl. */
3955 DECL_ALIGN (decl) = (DECL_MODE (decl) == BLKmode ? BIGGEST_ALIGNMENT
3956 : GET_MODE_BITSIZE (DECL_MODE (decl)));
3957 DECL_USER_ALIGN (decl) = 0;
3959 x = assign_temp (decl, 1, 1, 1);
3960 set_mem_attributes (x, decl, 1);
3961 SET_DECL_RTL (decl, x);
3963 if (oldaddr)
3965 addr = force_operand (XEXP (DECL_RTL (decl), 0), oldaddr);
3966 if (addr != oldaddr)
3967 emit_move_insn (oldaddr, addr);
3970 else
3971 /* Dynamic-size object: must push space on the stack. */
3973 rtx address, size, x;
3975 /* Record the stack pointer on entry to block, if have
3976 not already done so. */
3977 do_pending_stack_adjust ();
3978 save_stack_pointer ();
3980 /* In function-at-a-time mode, variable_size doesn't expand this,
3981 so do it now. */
3982 if (TREE_CODE (type) == ARRAY_TYPE && TYPE_DOMAIN (type))
3983 expand_expr (TYPE_MAX_VALUE (TYPE_DOMAIN (type)),
3984 const0_rtx, VOIDmode, 0);
3986 /* Compute the variable's size, in bytes. */
3987 size = expand_expr (DECL_SIZE_UNIT (decl), NULL_RTX, VOIDmode, 0);
3988 free_temp_slots ();
3990 /* Allocate space on the stack for the variable. Note that
3991 DECL_ALIGN says how the variable is to be aligned and we
3992 cannot use it to conclude anything about the alignment of
3993 the size. */
3994 address = allocate_dynamic_stack_space (size, NULL_RTX,
3995 TYPE_ALIGN (TREE_TYPE (decl)));
3997 /* Reference the variable indirect through that rtx. */
3998 x = gen_rtx_MEM (DECL_MODE (decl), address);
3999 set_mem_attributes (x, decl, 1);
4000 SET_DECL_RTL (decl, x);
4003 /* Indicate the alignment we actually gave this variable. */
4004 #ifdef STACK_BOUNDARY
4005 DECL_ALIGN (decl) = STACK_BOUNDARY;
4006 #else
4007 DECL_ALIGN (decl) = BIGGEST_ALIGNMENT;
4008 #endif
4009 DECL_USER_ALIGN (decl) = 0;
4013 /* Emit code to perform the initialization of a declaration DECL. */
4015 void
4016 expand_decl_init (decl)
4017 tree decl;
4019 int was_used = TREE_USED (decl);
4021 /* If this is a CONST_DECL, we don't have to generate any code. Likewise
4022 for static decls. */
4023 if (TREE_CODE (decl) == CONST_DECL
4024 || TREE_STATIC (decl))
4025 return;
4027 /* Compute and store the initial value now. */
4029 if (DECL_INITIAL (decl) == error_mark_node)
4031 enum tree_code code = TREE_CODE (TREE_TYPE (decl));
4033 if (code == INTEGER_TYPE || code == REAL_TYPE || code == ENUMERAL_TYPE
4034 || code == POINTER_TYPE || code == REFERENCE_TYPE)
4035 expand_assignment (decl, convert (TREE_TYPE (decl), integer_zero_node),
4036 0, 0);
4037 emit_queue ();
4039 else if (DECL_INITIAL (decl) && TREE_CODE (DECL_INITIAL (decl)) != TREE_LIST)
4041 emit_line_note (DECL_SOURCE_FILE (decl), DECL_SOURCE_LINE (decl));
4042 expand_assignment (decl, DECL_INITIAL (decl), 0, 0);
4043 emit_queue ();
4046 /* Don't let the initialization count as "using" the variable. */
4047 TREE_USED (decl) = was_used;
4049 /* Free any temporaries we made while initializing the decl. */
4050 preserve_temp_slots (NULL_RTX);
4051 free_temp_slots ();
4054 /* CLEANUP is an expression to be executed at exit from this binding contour;
4055 for example, in C++, it might call the destructor for this variable.
4057 We wrap CLEANUP in an UNSAVE_EXPR node, so that we can expand the
4058 CLEANUP multiple times, and have the correct semantics. This
4059 happens in exception handling, for gotos, returns, breaks that
4060 leave the current scope.
4062 If CLEANUP is nonzero and DECL is zero, we record a cleanup
4063 that is not associated with any particular variable. */
4066 expand_decl_cleanup (decl, cleanup)
4067 tree decl, cleanup;
4069 struct nesting *thisblock;
4071 /* Error if we are not in any block. */
4072 if (cfun == 0 || block_stack == 0)
4073 return 0;
4075 thisblock = block_stack;
4077 /* Record the cleanup if there is one. */
4079 if (cleanup != 0)
4081 tree t;
4082 rtx seq;
4083 tree *cleanups = &thisblock->data.block.cleanups;
4084 int cond_context = conditional_context ();
4086 if (cond_context)
4088 rtx flag = gen_reg_rtx (word_mode);
4089 rtx set_flag_0;
4090 tree cond;
4092 start_sequence ();
4093 emit_move_insn (flag, const0_rtx);
4094 set_flag_0 = get_insns ();
4095 end_sequence ();
4097 thisblock->data.block.last_unconditional_cleanup
4098 = emit_insn_after (set_flag_0,
4099 thisblock->data.block.last_unconditional_cleanup);
4101 emit_move_insn (flag, const1_rtx);
4103 cond = build_decl (VAR_DECL, NULL_TREE,
4104 (*lang_hooks.types.type_for_mode) (word_mode, 1));
4105 SET_DECL_RTL (cond, flag);
4107 /* Conditionalize the cleanup. */
4108 cleanup = build (COND_EXPR, void_type_node,
4109 (*lang_hooks.truthvalue_conversion) (cond),
4110 cleanup, integer_zero_node);
4111 cleanup = fold (cleanup);
4113 cleanups = &thisblock->data.block.cleanups;
4116 cleanup = unsave_expr (cleanup);
4118 t = *cleanups = tree_cons (decl, cleanup, *cleanups);
4120 if (! cond_context)
4121 /* If this block has a cleanup, it belongs in stack_block_stack. */
4122 stack_block_stack = thisblock;
4124 if (cond_context)
4126 start_sequence ();
4129 if (! using_eh_for_cleanups_p)
4130 TREE_ADDRESSABLE (t) = 1;
4131 else
4132 expand_eh_region_start ();
4134 if (cond_context)
4136 seq = get_insns ();
4137 end_sequence ();
4138 if (seq)
4139 thisblock->data.block.last_unconditional_cleanup
4140 = emit_insn_after (seq,
4141 thisblock->data.block.last_unconditional_cleanup);
4143 else
4145 thisblock->data.block.last_unconditional_cleanup
4146 = get_last_insn ();
4147 /* When we insert instructions after the last unconditional cleanup,
4148 we don't adjust last_insn. That means that a later add_insn will
4149 clobber the instructions we've just added. The easiest way to
4150 fix this is to just insert another instruction here, so that the
4151 instructions inserted after the last unconditional cleanup are
4152 never the last instruction. */
4153 emit_note (NULL, NOTE_INSN_DELETED);
4156 return 1;
4159 /* Like expand_decl_cleanup, but maybe only run the cleanup if an exception
4160 is thrown. */
4163 expand_decl_cleanup_eh (decl, cleanup, eh_only)
4164 tree decl, cleanup;
4165 int eh_only;
4167 int ret = expand_decl_cleanup (decl, cleanup);
4168 if (cleanup && ret)
4170 tree node = block_stack->data.block.cleanups;
4171 CLEANUP_EH_ONLY (node) = eh_only;
4173 return ret;
4176 /* DECL is an anonymous union. CLEANUP is a cleanup for DECL.
4177 DECL_ELTS is the list of elements that belong to DECL's type.
4178 In each, the TREE_VALUE is a VAR_DECL, and the TREE_PURPOSE a cleanup. */
4180 void
4181 expand_anon_union_decl (decl, cleanup, decl_elts)
4182 tree decl, cleanup, decl_elts;
4184 struct nesting *thisblock = cfun == 0 ? 0 : block_stack;
4185 rtx x;
4186 tree t;
4188 /* If any of the elements are addressable, so is the entire union. */
4189 for (t = decl_elts; t; t = TREE_CHAIN (t))
4190 if (TREE_ADDRESSABLE (TREE_VALUE (t)))
4192 TREE_ADDRESSABLE (decl) = 1;
4193 break;
4196 expand_decl (decl);
4197 expand_decl_cleanup (decl, cleanup);
4198 x = DECL_RTL (decl);
4200 /* Go through the elements, assigning RTL to each. */
4201 for (t = decl_elts; t; t = TREE_CHAIN (t))
4203 tree decl_elt = TREE_VALUE (t);
4204 tree cleanup_elt = TREE_PURPOSE (t);
4205 enum machine_mode mode = TYPE_MODE (TREE_TYPE (decl_elt));
4207 /* If any of the elements are addressable, so is the entire
4208 union. */
4209 if (TREE_USED (decl_elt))
4210 TREE_USED (decl) = 1;
4212 /* Propagate the union's alignment to the elements. */
4213 DECL_ALIGN (decl_elt) = DECL_ALIGN (decl);
4214 DECL_USER_ALIGN (decl_elt) = DECL_USER_ALIGN (decl);
4216 /* If the element has BLKmode and the union doesn't, the union is
4217 aligned such that the element doesn't need to have BLKmode, so
4218 change the element's mode to the appropriate one for its size. */
4219 if (mode == BLKmode && DECL_MODE (decl) != BLKmode)
4220 DECL_MODE (decl_elt) = mode
4221 = mode_for_size_tree (DECL_SIZE (decl_elt), MODE_INT, 1);
4223 /* (SUBREG (MEM ...)) at RTL generation time is invalid, so we
4224 instead create a new MEM rtx with the proper mode. */
4225 if (GET_CODE (x) == MEM)
4227 if (mode == GET_MODE (x))
4228 SET_DECL_RTL (decl_elt, x);
4229 else
4230 SET_DECL_RTL (decl_elt, adjust_address_nv (x, mode, 0));
4232 else if (GET_CODE (x) == REG)
4234 if (mode == GET_MODE (x))
4235 SET_DECL_RTL (decl_elt, x);
4236 else
4237 SET_DECL_RTL (decl_elt, gen_lowpart_SUBREG (mode, x));
4239 else
4240 abort ();
4242 /* Record the cleanup if there is one. */
4244 if (cleanup != 0)
4245 thisblock->data.block.cleanups
4246 = tree_cons (decl_elt, cleanup_elt,
4247 thisblock->data.block.cleanups);
4251 /* Expand a list of cleanups LIST.
4252 Elements may be expressions or may be nested lists.
4254 If DONT_DO is nonnull, then any list-element
4255 whose TREE_PURPOSE matches DONT_DO is omitted.
4256 This is sometimes used to avoid a cleanup associated with
4257 a value that is being returned out of the scope.
4259 If IN_FIXUP is nonzero, we are generating this cleanup for a fixup
4260 goto and handle protection regions specially in that case.
4262 If REACHABLE, we emit code, otherwise just inform the exception handling
4263 code about this finalization. */
4265 static void
4266 expand_cleanups (list, dont_do, in_fixup, reachable)
4267 tree list;
4268 tree dont_do;
4269 int in_fixup;
4270 int reachable;
4272 tree tail;
4273 for (tail = list; tail; tail = TREE_CHAIN (tail))
4274 if (dont_do == 0 || TREE_PURPOSE (tail) != dont_do)
4276 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
4277 expand_cleanups (TREE_VALUE (tail), dont_do, in_fixup, reachable);
4278 else
4280 if (! in_fixup && using_eh_for_cleanups_p)
4281 expand_eh_region_end_cleanup (TREE_VALUE (tail));
4283 if (reachable && !CLEANUP_EH_ONLY (tail))
4285 /* Cleanups may be run multiple times. For example,
4286 when exiting a binding contour, we expand the
4287 cleanups associated with that contour. When a goto
4288 within that binding contour has a target outside that
4289 contour, it will expand all cleanups from its scope to
4290 the target. Though the cleanups are expanded multiple
4291 times, the control paths are non-overlapping so the
4292 cleanups will not be executed twice. */
4294 /* We may need to protect from outer cleanups. */
4295 if (in_fixup && using_eh_for_cleanups_p)
4297 expand_eh_region_start ();
4299 expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
4301 expand_eh_region_end_fixup (TREE_VALUE (tail));
4303 else
4304 expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
4306 free_temp_slots ();
4312 /* Mark when the context we are emitting RTL for as a conditional
4313 context, so that any cleanup actions we register with
4314 expand_decl_init will be properly conditionalized when those
4315 cleanup actions are later performed. Must be called before any
4316 expression (tree) is expanded that is within a conditional context. */
4318 void
4319 start_cleanup_deferral ()
4321 /* block_stack can be NULL if we are inside the parameter list. It is
4322 OK to do nothing, because cleanups aren't possible here. */
4323 if (block_stack)
4324 ++block_stack->data.block.conditional_code;
4327 /* Mark the end of a conditional region of code. Because cleanup
4328 deferrals may be nested, we may still be in a conditional region
4329 after we end the currently deferred cleanups, only after we end all
4330 deferred cleanups, are we back in unconditional code. */
4332 void
4333 end_cleanup_deferral ()
4335 /* block_stack can be NULL if we are inside the parameter list. It is
4336 OK to do nothing, because cleanups aren't possible here. */
4337 if (block_stack)
4338 --block_stack->data.block.conditional_code;
4341 tree
4342 last_cleanup_this_contour ()
4344 if (block_stack == 0)
4345 return 0;
4347 return block_stack->data.block.cleanups;
4350 /* Return 1 if there are any pending cleanups at this point.
4351 If THIS_CONTOUR is nonzero, check the current contour as well.
4352 Otherwise, look only at the contours that enclose this one. */
4355 any_pending_cleanups (this_contour)
4356 int this_contour;
4358 struct nesting *block;
4360 if (cfun == NULL || cfun->stmt == NULL || block_stack == 0)
4361 return 0;
4363 if (this_contour && block_stack->data.block.cleanups != NULL)
4364 return 1;
4365 if (block_stack->data.block.cleanups == 0
4366 && block_stack->data.block.outer_cleanups == 0)
4367 return 0;
4369 for (block = block_stack->next; block; block = block->next)
4370 if (block->data.block.cleanups != 0)
4371 return 1;
4373 return 0;
4376 /* Enter a case (Pascal) or switch (C) statement.
4377 Push a block onto case_stack and nesting_stack
4378 to accumulate the case-labels that are seen
4379 and to record the labels generated for the statement.
4381 EXIT_FLAG is nonzero if `exit_something' should exit this case stmt.
4382 Otherwise, this construct is transparent for `exit_something'.
4384 EXPR is the index-expression to be dispatched on.
4385 TYPE is its nominal type. We could simply convert EXPR to this type,
4386 but instead we take short cuts. */
4388 void
4389 expand_start_case (exit_flag, expr, type, printname)
4390 int exit_flag;
4391 tree expr;
4392 tree type;
4393 const char *printname;
4395 struct nesting *thiscase = ALLOC_NESTING ();
4397 /* Make an entry on case_stack for the case we are entering. */
4399 thiscase->desc = CASE_NESTING;
4400 thiscase->next = case_stack;
4401 thiscase->all = nesting_stack;
4402 thiscase->depth = ++nesting_depth;
4403 thiscase->exit_label = exit_flag ? gen_label_rtx () : 0;
4404 thiscase->data.case_stmt.case_list = 0;
4405 thiscase->data.case_stmt.index_expr = expr;
4406 thiscase->data.case_stmt.nominal_type = type;
4407 thiscase->data.case_stmt.default_label = 0;
4408 thiscase->data.case_stmt.printname = printname;
4409 thiscase->data.case_stmt.line_number_status = force_line_numbers ();
4410 case_stack = thiscase;
4411 nesting_stack = thiscase;
4413 do_pending_stack_adjust ();
4415 /* Make sure case_stmt.start points to something that won't
4416 need any transformation before expand_end_case. */
4417 if (GET_CODE (get_last_insn ()) != NOTE)
4418 emit_note (NULL, NOTE_INSN_DELETED);
4420 thiscase->data.case_stmt.start = get_last_insn ();
4422 start_cleanup_deferral ();
4425 /* Start a "dummy case statement" within which case labels are invalid
4426 and are not connected to any larger real case statement.
4427 This can be used if you don't want to let a case statement jump
4428 into the middle of certain kinds of constructs. */
4430 void
4431 expand_start_case_dummy ()
4433 struct nesting *thiscase = ALLOC_NESTING ();
4435 /* Make an entry on case_stack for the dummy. */
4437 thiscase->desc = CASE_NESTING;
4438 thiscase->next = case_stack;
4439 thiscase->all = nesting_stack;
4440 thiscase->depth = ++nesting_depth;
4441 thiscase->exit_label = 0;
4442 thiscase->data.case_stmt.case_list = 0;
4443 thiscase->data.case_stmt.start = 0;
4444 thiscase->data.case_stmt.nominal_type = 0;
4445 thiscase->data.case_stmt.default_label = 0;
4446 case_stack = thiscase;
4447 nesting_stack = thiscase;
4448 start_cleanup_deferral ();
4451 static void
4452 check_seenlabel ()
4454 /* If this is the first label, warn if any insns have been emitted. */
4455 if (case_stack->data.case_stmt.line_number_status >= 0)
4457 rtx insn;
4459 restore_line_number_status
4460 (case_stack->data.case_stmt.line_number_status);
4461 case_stack->data.case_stmt.line_number_status = -1;
4463 for (insn = case_stack->data.case_stmt.start;
4464 insn;
4465 insn = NEXT_INSN (insn))
4467 if (GET_CODE (insn) == CODE_LABEL)
4468 break;
4469 if (GET_CODE (insn) != NOTE
4470 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn)) != USE))
4473 insn = PREV_INSN (insn);
4474 while (insn && (GET_CODE (insn) != NOTE || NOTE_LINE_NUMBER (insn) < 0));
4476 /* If insn is zero, then there must have been a syntax error. */
4477 if (insn)
4478 warning_with_file_and_line (NOTE_SOURCE_FILE (insn),
4479 NOTE_LINE_NUMBER (insn),
4480 "unreachable code at beginning of %s",
4481 case_stack->data.case_stmt.printname);
4482 break;
4488 /* Accumulate one case or default label inside a case or switch statement.
4489 VALUE is the value of the case (a null pointer, for a default label).
4490 The function CONVERTER, when applied to arguments T and V,
4491 converts the value V to the type T.
4493 If not currently inside a case or switch statement, return 1 and do
4494 nothing. The caller will print a language-specific error message.
4495 If VALUE is a duplicate or overlaps, return 2 and do nothing
4496 except store the (first) duplicate node in *DUPLICATE.
4497 If VALUE is out of range, return 3 and do nothing.
4498 If we are jumping into the scope of a cleanup or var-sized array, return 5.
4499 Return 0 on success.
4501 Extended to handle range statements. */
4504 pushcase (value, converter, label, duplicate)
4505 tree value;
4506 tree (*converter) PARAMS ((tree, tree));
4507 tree label;
4508 tree *duplicate;
4510 tree index_type;
4511 tree nominal_type;
4513 /* Fail if not inside a real case statement. */
4514 if (! (case_stack && case_stack->data.case_stmt.start))
4515 return 1;
4517 if (stack_block_stack
4518 && stack_block_stack->depth > case_stack->depth)
4519 return 5;
4521 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4522 nominal_type = case_stack->data.case_stmt.nominal_type;
4524 /* If the index is erroneous, avoid more problems: pretend to succeed. */
4525 if (index_type == error_mark_node)
4526 return 0;
4528 /* Convert VALUE to the type in which the comparisons are nominally done. */
4529 if (value != 0)
4530 value = (*converter) (nominal_type, value);
4532 check_seenlabel ();
4534 /* Fail if this value is out of range for the actual type of the index
4535 (which may be narrower than NOMINAL_TYPE). */
4536 if (value != 0
4537 && (TREE_CONSTANT_OVERFLOW (value)
4538 || ! int_fits_type_p (value, index_type)))
4539 return 3;
4541 return add_case_node (value, value, label, duplicate);
4544 /* Like pushcase but this case applies to all values between VALUE1 and
4545 VALUE2 (inclusive). If VALUE1 is NULL, the range starts at the lowest
4546 value of the index type and ends at VALUE2. If VALUE2 is NULL, the range
4547 starts at VALUE1 and ends at the highest value of the index type.
4548 If both are NULL, this case applies to all values.
4550 The return value is the same as that of pushcase but there is one
4551 additional error code: 4 means the specified range was empty. */
4554 pushcase_range (value1, value2, converter, label, duplicate)
4555 tree value1, value2;
4556 tree (*converter) PARAMS ((tree, tree));
4557 tree label;
4558 tree *duplicate;
4560 tree index_type;
4561 tree nominal_type;
4563 /* Fail if not inside a real case statement. */
4564 if (! (case_stack && case_stack->data.case_stmt.start))
4565 return 1;
4567 if (stack_block_stack
4568 && stack_block_stack->depth > case_stack->depth)
4569 return 5;
4571 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4572 nominal_type = case_stack->data.case_stmt.nominal_type;
4574 /* If the index is erroneous, avoid more problems: pretend to succeed. */
4575 if (index_type == error_mark_node)
4576 return 0;
4578 check_seenlabel ();
4580 /* Convert VALUEs to type in which the comparisons are nominally done
4581 and replace any unspecified value with the corresponding bound. */
4582 if (value1 == 0)
4583 value1 = TYPE_MIN_VALUE (index_type);
4584 if (value2 == 0)
4585 value2 = TYPE_MAX_VALUE (index_type);
4587 /* Fail if the range is empty. Do this before any conversion since
4588 we want to allow out-of-range empty ranges. */
4589 if (value2 != 0 && tree_int_cst_lt (value2, value1))
4590 return 4;
4592 /* If the max was unbounded, use the max of the nominal_type we are
4593 converting to. Do this after the < check above to suppress false
4594 positives. */
4595 if (value2 == 0)
4596 value2 = TYPE_MAX_VALUE (nominal_type);
4598 value1 = (*converter) (nominal_type, value1);
4599 value2 = (*converter) (nominal_type, value2);
4601 /* Fail if these values are out of range. */
4602 if (TREE_CONSTANT_OVERFLOW (value1)
4603 || ! int_fits_type_p (value1, index_type))
4604 return 3;
4606 if (TREE_CONSTANT_OVERFLOW (value2)
4607 || ! int_fits_type_p (value2, index_type))
4608 return 3;
4610 return add_case_node (value1, value2, label, duplicate);
4613 /* Do the actual insertion of a case label for pushcase and pushcase_range
4614 into case_stack->data.case_stmt.case_list. Use an AVL tree to avoid
4615 slowdown for large switch statements. */
4618 add_case_node (low, high, label, duplicate)
4619 tree low, high;
4620 tree label;
4621 tree *duplicate;
4623 struct case_node *p, **q, *r;
4625 /* If there's no HIGH value, then this is not a case range; it's
4626 just a simple case label. But that's just a degenerate case
4627 range. */
4628 if (!high)
4629 high = low;
4631 /* Handle default labels specially. */
4632 if (!high && !low)
4634 if (case_stack->data.case_stmt.default_label != 0)
4636 *duplicate = case_stack->data.case_stmt.default_label;
4637 return 2;
4639 case_stack->data.case_stmt.default_label = label;
4640 expand_label (label);
4641 return 0;
4644 q = &case_stack->data.case_stmt.case_list;
4645 p = *q;
4647 while ((r = *q))
4649 p = r;
4651 /* Keep going past elements distinctly greater than HIGH. */
4652 if (tree_int_cst_lt (high, p->low))
4653 q = &p->left;
4655 /* or distinctly less than LOW. */
4656 else if (tree_int_cst_lt (p->high, low))
4657 q = &p->right;
4659 else
4661 /* We have an overlap; this is an error. */
4662 *duplicate = p->code_label;
4663 return 2;
4667 /* Add this label to the chain, and succeed. */
4669 r = (struct case_node *) ggc_alloc (sizeof (struct case_node));
4670 r->low = low;
4672 /* If the bounds are equal, turn this into the one-value case. */
4673 if (tree_int_cst_equal (low, high))
4674 r->high = r->low;
4675 else
4676 r->high = high;
4678 r->code_label = label;
4679 expand_label (label);
4681 *q = r;
4682 r->parent = p;
4683 r->left = 0;
4684 r->right = 0;
4685 r->balance = 0;
4687 while (p)
4689 struct case_node *s;
4691 if (r == p->left)
4693 int b;
4695 if (! (b = p->balance))
4696 /* Growth propagation from left side. */
4697 p->balance = -1;
4698 else if (b < 0)
4700 if (r->balance < 0)
4702 /* R-Rotation */
4703 if ((p->left = s = r->right))
4704 s->parent = p;
4706 r->right = p;
4707 p->balance = 0;
4708 r->balance = 0;
4709 s = p->parent;
4710 p->parent = r;
4712 if ((r->parent = s))
4714 if (s->left == p)
4715 s->left = r;
4716 else
4717 s->right = r;
4719 else
4720 case_stack->data.case_stmt.case_list = r;
4722 else
4723 /* r->balance == +1 */
4725 /* LR-Rotation */
4727 int b2;
4728 struct case_node *t = r->right;
4730 if ((p->left = s = t->right))
4731 s->parent = p;
4733 t->right = p;
4734 if ((r->right = s = t->left))
4735 s->parent = r;
4737 t->left = r;
4738 b = t->balance;
4739 b2 = b < 0;
4740 p->balance = b2;
4741 b2 = -b2 - b;
4742 r->balance = b2;
4743 t->balance = 0;
4744 s = p->parent;
4745 p->parent = t;
4746 r->parent = t;
4748 if ((t->parent = s))
4750 if (s->left == p)
4751 s->left = t;
4752 else
4753 s->right = t;
4755 else
4756 case_stack->data.case_stmt.case_list = t;
4758 break;
4761 else
4763 /* p->balance == +1; growth of left side balances the node. */
4764 p->balance = 0;
4765 break;
4768 else
4769 /* r == p->right */
4771 int b;
4773 if (! (b = p->balance))
4774 /* Growth propagation from right side. */
4775 p->balance++;
4776 else if (b > 0)
4778 if (r->balance > 0)
4780 /* L-Rotation */
4782 if ((p->right = s = r->left))
4783 s->parent = p;
4785 r->left = p;
4786 p->balance = 0;
4787 r->balance = 0;
4788 s = p->parent;
4789 p->parent = r;
4790 if ((r->parent = s))
4792 if (s->left == p)
4793 s->left = r;
4794 else
4795 s->right = r;
4798 else
4799 case_stack->data.case_stmt.case_list = r;
4802 else
4803 /* r->balance == -1 */
4805 /* RL-Rotation */
4806 int b2;
4807 struct case_node *t = r->left;
4809 if ((p->right = s = t->left))
4810 s->parent = p;
4812 t->left = p;
4814 if ((r->left = s = t->right))
4815 s->parent = r;
4817 t->right = r;
4818 b = t->balance;
4819 b2 = b < 0;
4820 r->balance = b2;
4821 b2 = -b2 - b;
4822 p->balance = b2;
4823 t->balance = 0;
4824 s = p->parent;
4825 p->parent = t;
4826 r->parent = t;
4828 if ((t->parent = s))
4830 if (s->left == p)
4831 s->left = t;
4832 else
4833 s->right = t;
4836 else
4837 case_stack->data.case_stmt.case_list = t;
4839 break;
4841 else
4843 /* p->balance == -1; growth of right side balances the node. */
4844 p->balance = 0;
4845 break;
4849 r = p;
4850 p = p->parent;
4853 return 0;
4856 /* Returns the number of possible values of TYPE.
4857 Returns -1 if the number is unknown, variable, or if the number does not
4858 fit in a HOST_WIDE_INT.
4859 Sets *SPARSENESS to 2 if TYPE is an ENUMERAL_TYPE whose values
4860 do not increase monotonically (there may be duplicates);
4861 to 1 if the values increase monotonically, but not always by 1;
4862 otherwise sets it to 0. */
4864 HOST_WIDE_INT
4865 all_cases_count (type, sparseness)
4866 tree type;
4867 int *sparseness;
4869 tree t;
4870 HOST_WIDE_INT count, minval, lastval;
4872 *sparseness = 0;
4874 switch (TREE_CODE (type))
4876 case BOOLEAN_TYPE:
4877 count = 2;
4878 break;
4880 case CHAR_TYPE:
4881 count = 1 << BITS_PER_UNIT;
4882 break;
4884 default:
4885 case INTEGER_TYPE:
4886 if (TYPE_MAX_VALUE (type) != 0
4887 && 0 != (t = fold (build (MINUS_EXPR, type, TYPE_MAX_VALUE (type),
4888 TYPE_MIN_VALUE (type))))
4889 && 0 != (t = fold (build (PLUS_EXPR, type, t,
4890 convert (type, integer_zero_node))))
4891 && host_integerp (t, 1))
4892 count = tree_low_cst (t, 1);
4893 else
4894 return -1;
4895 break;
4897 case ENUMERAL_TYPE:
4898 /* Don't waste time with enumeral types with huge values. */
4899 if (! host_integerp (TYPE_MIN_VALUE (type), 0)
4900 || TYPE_MAX_VALUE (type) == 0
4901 || ! host_integerp (TYPE_MAX_VALUE (type), 0))
4902 return -1;
4904 lastval = minval = tree_low_cst (TYPE_MIN_VALUE (type), 0);
4905 count = 0;
4907 for (t = TYPE_VALUES (type); t != NULL_TREE; t = TREE_CHAIN (t))
4909 HOST_WIDE_INT thisval = tree_low_cst (TREE_VALUE (t), 0);
4911 if (*sparseness == 2 || thisval <= lastval)
4912 *sparseness = 2;
4913 else if (thisval != minval + count)
4914 *sparseness = 1;
4916 lastval = thisval;
4917 count++;
4921 return count;
4924 #define BITARRAY_TEST(ARRAY, INDEX) \
4925 ((ARRAY)[(unsigned) (INDEX) / HOST_BITS_PER_CHAR]\
4926 & (1 << ((unsigned) (INDEX) % HOST_BITS_PER_CHAR)))
4927 #define BITARRAY_SET(ARRAY, INDEX) \
4928 ((ARRAY)[(unsigned) (INDEX) / HOST_BITS_PER_CHAR]\
4929 |= 1 << ((unsigned) (INDEX) % HOST_BITS_PER_CHAR))
4931 /* Set the elements of the bitstring CASES_SEEN (which has length COUNT),
4932 with the case values we have seen, assuming the case expression
4933 has the given TYPE.
4934 SPARSENESS is as determined by all_cases_count.
4936 The time needed is proportional to COUNT, unless
4937 SPARSENESS is 2, in which case quadratic time is needed. */
4939 void
4940 mark_seen_cases (type, cases_seen, count, sparseness)
4941 tree type;
4942 unsigned char *cases_seen;
4943 HOST_WIDE_INT count;
4944 int sparseness;
4946 tree next_node_to_try = NULL_TREE;
4947 HOST_WIDE_INT next_node_offset = 0;
4949 struct case_node *n, *root = case_stack->data.case_stmt.case_list;
4950 tree val = make_node (INTEGER_CST);
4952 TREE_TYPE (val) = type;
4953 if (! root)
4954 /* Do nothing. */
4956 else if (sparseness == 2)
4958 tree t;
4959 unsigned HOST_WIDE_INT xlo;
4961 /* This less efficient loop is only needed to handle
4962 duplicate case values (multiple enum constants
4963 with the same value). */
4964 TREE_TYPE (val) = TREE_TYPE (root->low);
4965 for (t = TYPE_VALUES (type), xlo = 0; t != NULL_TREE;
4966 t = TREE_CHAIN (t), xlo++)
4968 TREE_INT_CST_LOW (val) = TREE_INT_CST_LOW (TREE_VALUE (t));
4969 TREE_INT_CST_HIGH (val) = TREE_INT_CST_HIGH (TREE_VALUE (t));
4970 n = root;
4973 /* Keep going past elements distinctly greater than VAL. */
4974 if (tree_int_cst_lt (val, n->low))
4975 n = n->left;
4977 /* or distinctly less than VAL. */
4978 else if (tree_int_cst_lt (n->high, val))
4979 n = n->right;
4981 else
4983 /* We have found a matching range. */
4984 BITARRAY_SET (cases_seen, xlo);
4985 break;
4988 while (n);
4991 else
4993 if (root->left)
4994 case_stack->data.case_stmt.case_list = root = case_tree2list (root, 0);
4996 for (n = root; n; n = n->right)
4998 TREE_INT_CST_LOW (val) = TREE_INT_CST_LOW (n->low);
4999 TREE_INT_CST_HIGH (val) = TREE_INT_CST_HIGH (n->low);
5000 while (! tree_int_cst_lt (n->high, val))
5002 /* Calculate (into xlo) the "offset" of the integer (val).
5003 The element with lowest value has offset 0, the next smallest
5004 element has offset 1, etc. */
5006 unsigned HOST_WIDE_INT xlo;
5007 HOST_WIDE_INT xhi;
5008 tree t;
5010 if (sparseness && TYPE_VALUES (type) != NULL_TREE)
5012 /* The TYPE_VALUES will be in increasing order, so
5013 starting searching where we last ended. */
5014 t = next_node_to_try;
5015 xlo = next_node_offset;
5016 xhi = 0;
5017 for (;;)
5019 if (t == NULL_TREE)
5021 t = TYPE_VALUES (type);
5022 xlo = 0;
5024 if (tree_int_cst_equal (val, TREE_VALUE (t)))
5026 next_node_to_try = TREE_CHAIN (t);
5027 next_node_offset = xlo + 1;
5028 break;
5030 xlo++;
5031 t = TREE_CHAIN (t);
5032 if (t == next_node_to_try)
5034 xlo = -1;
5035 break;
5039 else
5041 t = TYPE_MIN_VALUE (type);
5042 if (t)
5043 neg_double (TREE_INT_CST_LOW (t), TREE_INT_CST_HIGH (t),
5044 &xlo, &xhi);
5045 else
5046 xlo = xhi = 0;
5047 add_double (xlo, xhi,
5048 TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
5049 &xlo, &xhi);
5052 if (xhi == 0 && xlo < (unsigned HOST_WIDE_INT) count)
5053 BITARRAY_SET (cases_seen, xlo);
5055 add_double (TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
5056 1, 0,
5057 &TREE_INT_CST_LOW (val), &TREE_INT_CST_HIGH (val));
5063 /* Given a switch statement with an expression that is an enumeration
5064 type, warn if any of the enumeration type's literals are not
5065 covered by the case expressions of the switch. Also, warn if there
5066 are any extra switch cases that are *not* elements of the
5067 enumerated type.
5069 Historical note:
5071 At one stage this function would: ``If all enumeration literals
5072 were covered by the case expressions, turn one of the expressions
5073 into the default expression since it should not be possible to fall
5074 through such a switch.''
5076 That code has since been removed as: ``This optimization is
5077 disabled because it causes valid programs to fail. ANSI C does not
5078 guarantee that an expression with enum type will have a value that
5079 is the same as one of the enumeration literals.'' */
5081 void
5082 check_for_full_enumeration_handling (type)
5083 tree type;
5085 struct case_node *n;
5086 tree chain;
5088 /* True iff the selector type is a numbered set mode. */
5089 int sparseness = 0;
5091 /* The number of possible selector values. */
5092 HOST_WIDE_INT size;
5094 /* For each possible selector value. a one iff it has been matched
5095 by a case value alternative. */
5096 unsigned char *cases_seen;
5098 /* The allocated size of cases_seen, in chars. */
5099 HOST_WIDE_INT bytes_needed;
5101 size = all_cases_count (type, &sparseness);
5102 bytes_needed = (size + HOST_BITS_PER_CHAR) / HOST_BITS_PER_CHAR;
5104 if (size > 0 && size < 600000
5105 /* We deliberately use calloc here, not cmalloc, so that we can suppress
5106 this optimization if we don't have enough memory rather than
5107 aborting, as xmalloc would do. */
5108 && (cases_seen =
5109 (unsigned char *) really_call_calloc (bytes_needed, 1)) != NULL)
5111 HOST_WIDE_INT i;
5112 tree v = TYPE_VALUES (type);
5114 /* The time complexity of this code is normally O(N), where
5115 N being the number of members in the enumerated type.
5116 However, if type is an ENUMERAL_TYPE whose values do not
5117 increase monotonically, O(N*log(N)) time may be needed. */
5119 mark_seen_cases (type, cases_seen, size, sparseness);
5121 for (i = 0; v != NULL_TREE && i < size; i++, v = TREE_CHAIN (v))
5122 if (BITARRAY_TEST (cases_seen, i) == 0)
5123 warning ("enumeration value `%s' not handled in switch",
5124 IDENTIFIER_POINTER (TREE_PURPOSE (v)));
5126 free (cases_seen);
5129 /* Now we go the other way around; we warn if there are case
5130 expressions that don't correspond to enumerators. This can
5131 occur since C and C++ don't enforce type-checking of
5132 assignments to enumeration variables. */
5134 if (case_stack->data.case_stmt.case_list
5135 && case_stack->data.case_stmt.case_list->left)
5136 case_stack->data.case_stmt.case_list
5137 = case_tree2list (case_stack->data.case_stmt.case_list, 0);
5138 for (n = case_stack->data.case_stmt.case_list; n; n = n->right)
5140 for (chain = TYPE_VALUES (type);
5141 chain && !tree_int_cst_equal (n->low, TREE_VALUE (chain));
5142 chain = TREE_CHAIN (chain))
5145 if (!chain)
5147 if (TYPE_NAME (type) == 0)
5148 warning ("case value `%ld' not in enumerated type",
5149 (long) TREE_INT_CST_LOW (n->low));
5150 else
5151 warning ("case value `%ld' not in enumerated type `%s'",
5152 (long) TREE_INT_CST_LOW (n->low),
5153 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
5154 == IDENTIFIER_NODE)
5155 ? TYPE_NAME (type)
5156 : DECL_NAME (TYPE_NAME (type))));
5158 if (!tree_int_cst_equal (n->low, n->high))
5160 for (chain = TYPE_VALUES (type);
5161 chain && !tree_int_cst_equal (n->high, TREE_VALUE (chain));
5162 chain = TREE_CHAIN (chain))
5165 if (!chain)
5167 if (TYPE_NAME (type) == 0)
5168 warning ("case value `%ld' not in enumerated type",
5169 (long) TREE_INT_CST_LOW (n->high));
5170 else
5171 warning ("case value `%ld' not in enumerated type `%s'",
5172 (long) TREE_INT_CST_LOW (n->high),
5173 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
5174 == IDENTIFIER_NODE)
5175 ? TYPE_NAME (type)
5176 : DECL_NAME (TYPE_NAME (type))));
5184 /* Terminate a case (Pascal) or switch (C) statement
5185 in which ORIG_INDEX is the expression to be tested.
5186 If ORIG_TYPE is not NULL, it is the original ORIG_INDEX
5187 type as given in the source before any compiler conversions.
5188 Generate the code to test it and jump to the right place. */
5190 void
5191 expand_end_case_type (orig_index, orig_type)
5192 tree orig_index, orig_type;
5194 tree minval = NULL_TREE, maxval = NULL_TREE, range = NULL_TREE;
5195 rtx default_label = 0;
5196 struct case_node *n;
5197 unsigned int count;
5198 rtx index;
5199 rtx table_label;
5200 int ncases;
5201 rtx *labelvec;
5202 int i;
5203 rtx before_case, end;
5204 struct nesting *thiscase = case_stack;
5205 tree index_expr, index_type;
5206 bool exit_done = false;
5207 int unsignedp;
5209 /* Don't crash due to previous errors. */
5210 if (thiscase == NULL)
5211 return;
5213 index_expr = thiscase->data.case_stmt.index_expr;
5214 index_type = TREE_TYPE (index_expr);
5215 unsignedp = TREE_UNSIGNED (index_type);
5216 if (orig_type == NULL)
5217 orig_type = TREE_TYPE (orig_index);
5219 do_pending_stack_adjust ();
5221 /* This might get a spurious warning in the presence of a syntax error;
5222 it could be fixed by moving the call to check_seenlabel after the
5223 check for error_mark_node, and copying the code of check_seenlabel that
5224 deals with case_stack->data.case_stmt.line_number_status /
5225 restore_line_number_status in front of the call to end_cleanup_deferral;
5226 However, this might miss some useful warnings in the presence of
5227 non-syntax errors. */
5228 check_seenlabel ();
5230 /* An ERROR_MARK occurs for various reasons including invalid data type. */
5231 if (index_type != error_mark_node)
5233 /* If the switch expression was an enumerated type, check that
5234 exactly all enumeration literals are covered by the cases.
5235 The check is made when -Wswitch was specified and there is no
5236 default case, or when -Wswitch-enum was specified. */
5237 if (((warn_switch && !thiscase->data.case_stmt.default_label)
5238 || warn_switch_enum)
5239 && TREE_CODE (orig_type) == ENUMERAL_TYPE
5240 && TREE_CODE (index_expr) != INTEGER_CST)
5241 check_for_full_enumeration_handling (orig_type);
5243 if (warn_switch_default && !thiscase->data.case_stmt.default_label)
5244 warning ("switch missing default case");
5246 /* If we don't have a default-label, create one here,
5247 after the body of the switch. */
5248 if (thiscase->data.case_stmt.default_label == 0)
5250 thiscase->data.case_stmt.default_label
5251 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
5252 /* Share the exit label if possible. */
5253 if (thiscase->exit_label)
5255 SET_DECL_RTL (thiscase->data.case_stmt.default_label,
5256 thiscase->exit_label);
5257 exit_done = true;
5259 expand_label (thiscase->data.case_stmt.default_label);
5261 default_label = label_rtx (thiscase->data.case_stmt.default_label);
5263 before_case = get_last_insn ();
5265 if (thiscase->data.case_stmt.case_list
5266 && thiscase->data.case_stmt.case_list->left)
5267 thiscase->data.case_stmt.case_list
5268 = case_tree2list (thiscase->data.case_stmt.case_list, 0);
5270 /* Simplify the case-list before we count it. */
5271 group_case_nodes (thiscase->data.case_stmt.case_list);
5272 strip_default_case_nodes (&thiscase->data.case_stmt.case_list,
5273 default_label);
5275 /* Get upper and lower bounds of case values.
5276 Also convert all the case values to the index expr's data type. */
5278 count = 0;
5279 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5281 /* Check low and high label values are integers. */
5282 if (TREE_CODE (n->low) != INTEGER_CST)
5283 abort ();
5284 if (TREE_CODE (n->high) != INTEGER_CST)
5285 abort ();
5287 n->low = convert (index_type, n->low);
5288 n->high = convert (index_type, n->high);
5290 /* Count the elements and track the largest and smallest
5291 of them (treating them as signed even if they are not). */
5292 if (count++ == 0)
5294 minval = n->low;
5295 maxval = n->high;
5297 else
5299 if (INT_CST_LT (n->low, minval))
5300 minval = n->low;
5301 if (INT_CST_LT (maxval, n->high))
5302 maxval = n->high;
5304 /* A range counts double, since it requires two compares. */
5305 if (! tree_int_cst_equal (n->low, n->high))
5306 count++;
5309 /* Compute span of values. */
5310 if (count != 0)
5311 range = fold (build (MINUS_EXPR, index_type, maxval, minval));
5313 end_cleanup_deferral ();
5315 if (count == 0)
5317 expand_expr (index_expr, const0_rtx, VOIDmode, 0);
5318 emit_queue ();
5319 emit_jump (default_label);
5322 /* If range of values is much bigger than number of values,
5323 make a sequence of conditional branches instead of a dispatch.
5324 If the switch-index is a constant, do it this way
5325 because we can optimize it. */
5327 else if (count < case_values_threshold ()
5328 || compare_tree_int (range, 10 * count) > 0
5329 /* RANGE may be signed, and really large ranges will show up
5330 as negative numbers. */
5331 || compare_tree_int (range, 0) < 0
5332 #ifndef ASM_OUTPUT_ADDR_DIFF_ELT
5333 || flag_pic
5334 #endif
5335 || TREE_CONSTANT (index_expr))
5337 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5339 /* If the index is a short or char that we do not have
5340 an insn to handle comparisons directly, convert it to
5341 a full integer now, rather than letting each comparison
5342 generate the conversion. */
5344 if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT
5345 && ! have_insn_for (COMPARE, GET_MODE (index)))
5347 enum machine_mode wider_mode;
5348 for (wider_mode = GET_MODE (index); wider_mode != VOIDmode;
5349 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
5350 if (have_insn_for (COMPARE, wider_mode))
5352 index = convert_to_mode (wider_mode, index, unsignedp);
5353 break;
5357 emit_queue ();
5358 do_pending_stack_adjust ();
5360 index = protect_from_queue (index, 0);
5361 if (GET_CODE (index) == MEM)
5362 index = copy_to_reg (index);
5363 if (GET_CODE (index) == CONST_INT
5364 || TREE_CODE (index_expr) == INTEGER_CST)
5366 /* Make a tree node with the proper constant value
5367 if we don't already have one. */
5368 if (TREE_CODE (index_expr) != INTEGER_CST)
5370 index_expr
5371 = build_int_2 (INTVAL (index),
5372 unsignedp || INTVAL (index) >= 0 ? 0 : -1);
5373 index_expr = convert (index_type, index_expr);
5376 /* For constant index expressions we need only
5377 issue an unconditional branch to the appropriate
5378 target code. The job of removing any unreachable
5379 code is left to the optimisation phase if the
5380 "-O" option is specified. */
5381 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5382 if (! tree_int_cst_lt (index_expr, n->low)
5383 && ! tree_int_cst_lt (n->high, index_expr))
5384 break;
5386 if (n)
5387 emit_jump (label_rtx (n->code_label));
5388 else
5389 emit_jump (default_label);
5391 else
5393 /* If the index expression is not constant we generate
5394 a binary decision tree to select the appropriate
5395 target code. This is done as follows:
5397 The list of cases is rearranged into a binary tree,
5398 nearly optimal assuming equal probability for each case.
5400 The tree is transformed into RTL, eliminating
5401 redundant test conditions at the same time.
5403 If program flow could reach the end of the
5404 decision tree an unconditional jump to the
5405 default code is emitted. */
5407 use_cost_table
5408 = (TREE_CODE (orig_type) != ENUMERAL_TYPE
5409 && estimate_case_costs (thiscase->data.case_stmt.case_list));
5410 balance_case_nodes (&thiscase->data.case_stmt.case_list, NULL);
5411 emit_case_nodes (index, thiscase->data.case_stmt.case_list,
5412 default_label, index_type);
5413 emit_jump_if_reachable (default_label);
5416 else
5418 table_label = gen_label_rtx ();
5419 if (! try_casesi (index_type, index_expr, minval, range,
5420 table_label, default_label))
5422 index_type = thiscase->data.case_stmt.nominal_type;
5424 /* Index jumptables from zero for suitable values of
5425 minval to avoid a subtraction. */
5426 if (! optimize_size
5427 && compare_tree_int (minval, 0) > 0
5428 && compare_tree_int (minval, 3) < 0)
5430 minval = integer_zero_node;
5431 range = maxval;
5434 if (! try_tablejump (index_type, index_expr, minval, range,
5435 table_label, default_label))
5436 abort ();
5439 /* Get table of labels to jump to, in order of case index. */
5441 ncases = tree_low_cst (range, 0) + 1;
5442 labelvec = (rtx *) alloca (ncases * sizeof (rtx));
5443 memset ((char *) labelvec, 0, ncases * sizeof (rtx));
5445 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5447 /* Compute the low and high bounds relative to the minimum
5448 value since that should fit in a HOST_WIDE_INT while the
5449 actual values may not. */
5450 HOST_WIDE_INT i_low
5451 = tree_low_cst (fold (build (MINUS_EXPR, index_type,
5452 n->low, minval)), 1);
5453 HOST_WIDE_INT i_high
5454 = tree_low_cst (fold (build (MINUS_EXPR, index_type,
5455 n->high, minval)), 1);
5456 HOST_WIDE_INT i;
5458 for (i = i_low; i <= i_high; i ++)
5459 labelvec[i]
5460 = gen_rtx_LABEL_REF (Pmode, label_rtx (n->code_label));
5463 /* Fill in the gaps with the default. */
5464 for (i = 0; i < ncases; i++)
5465 if (labelvec[i] == 0)
5466 labelvec[i] = gen_rtx_LABEL_REF (Pmode, default_label);
5468 /* Output the table */
5469 emit_label (table_label);
5471 if (CASE_VECTOR_PC_RELATIVE || flag_pic)
5472 emit_jump_insn (gen_rtx_ADDR_DIFF_VEC (CASE_VECTOR_MODE,
5473 gen_rtx_LABEL_REF (Pmode, table_label),
5474 gen_rtvec_v (ncases, labelvec),
5475 const0_rtx, const0_rtx));
5476 else
5477 emit_jump_insn (gen_rtx_ADDR_VEC (CASE_VECTOR_MODE,
5478 gen_rtvec_v (ncases, labelvec)));
5480 /* If the case insn drops through the table,
5481 after the table we must jump to the default-label.
5482 Otherwise record no drop-through after the table. */
5483 #ifdef CASE_DROPS_THROUGH
5484 emit_jump (default_label);
5485 #else
5486 emit_barrier ();
5487 #endif
5490 before_case = NEXT_INSN (before_case);
5491 end = get_last_insn ();
5492 if (squeeze_notes (&before_case, &end))
5493 abort ();
5494 reorder_insns (before_case, end,
5495 thiscase->data.case_stmt.start);
5497 else
5498 end_cleanup_deferral ();
5500 if (thiscase->exit_label && !exit_done)
5501 emit_label (thiscase->exit_label);
5503 POPSTACK (case_stack);
5505 free_temp_slots ();
5508 /* Convert the tree NODE into a list linked by the right field, with the left
5509 field zeroed. RIGHT is used for recursion; it is a list to be placed
5510 rightmost in the resulting list. */
5512 static struct case_node *
5513 case_tree2list (node, right)
5514 struct case_node *node, *right;
5516 struct case_node *left;
5518 if (node->right)
5519 right = case_tree2list (node->right, right);
5521 node->right = right;
5522 if ((left = node->left))
5524 node->left = 0;
5525 return case_tree2list (left, node);
5528 return node;
5531 /* Generate code to jump to LABEL if OP1 and OP2 are equal. */
5533 static void
5534 do_jump_if_equal (op1, op2, label, unsignedp)
5535 rtx op1, op2, label;
5536 int unsignedp;
5538 if (GET_CODE (op1) == CONST_INT && GET_CODE (op2) == CONST_INT)
5540 if (INTVAL (op1) == INTVAL (op2))
5541 emit_jump (label);
5543 else
5544 emit_cmp_and_jump_insns (op1, op2, EQ, NULL_RTX,
5545 (GET_MODE (op1) == VOIDmode
5546 ? GET_MODE (op2) : GET_MODE (op1)),
5547 unsignedp, label);
5550 /* Not all case values are encountered equally. This function
5551 uses a heuristic to weight case labels, in cases where that
5552 looks like a reasonable thing to do.
5554 Right now, all we try to guess is text, and we establish the
5555 following weights:
5557 chars above space: 16
5558 digits: 16
5559 default: 12
5560 space, punct: 8
5561 tab: 4
5562 newline: 2
5563 other "\" chars: 1
5564 remaining chars: 0
5566 If we find any cases in the switch that are not either -1 or in the range
5567 of valid ASCII characters, or are control characters other than those
5568 commonly used with "\", don't treat this switch scanning text.
5570 Return 1 if these nodes are suitable for cost estimation, otherwise
5571 return 0. */
5573 static int
5574 estimate_case_costs (node)
5575 case_node_ptr node;
5577 tree min_ascii = integer_minus_one_node;
5578 tree max_ascii = convert (TREE_TYPE (node->high), build_int_2 (127, 0));
5579 case_node_ptr n;
5580 int i;
5582 /* If we haven't already made the cost table, make it now. Note that the
5583 lower bound of the table is -1, not zero. */
5585 if (! cost_table_initialized)
5587 cost_table_initialized = 1;
5589 for (i = 0; i < 128; i++)
5591 if (ISALNUM (i))
5592 COST_TABLE (i) = 16;
5593 else if (ISPUNCT (i))
5594 COST_TABLE (i) = 8;
5595 else if (ISCNTRL (i))
5596 COST_TABLE (i) = -1;
5599 COST_TABLE (' ') = 8;
5600 COST_TABLE ('\t') = 4;
5601 COST_TABLE ('\0') = 4;
5602 COST_TABLE ('\n') = 2;
5603 COST_TABLE ('\f') = 1;
5604 COST_TABLE ('\v') = 1;
5605 COST_TABLE ('\b') = 1;
5608 /* See if all the case expressions look like text. It is text if the
5609 constant is >= -1 and the highest constant is <= 127. Do all comparisons
5610 as signed arithmetic since we don't want to ever access cost_table with a
5611 value less than -1. Also check that none of the constants in a range
5612 are strange control characters. */
5614 for (n = node; n; n = n->right)
5616 if ((INT_CST_LT (n->low, min_ascii)) || INT_CST_LT (max_ascii, n->high))
5617 return 0;
5619 for (i = (HOST_WIDE_INT) TREE_INT_CST_LOW (n->low);
5620 i <= (HOST_WIDE_INT) TREE_INT_CST_LOW (n->high); i++)
5621 if (COST_TABLE (i) < 0)
5622 return 0;
5625 /* All interesting values are within the range of interesting
5626 ASCII characters. */
5627 return 1;
5630 /* Determine whether two case labels branch to the same target. */
5632 static bool
5633 same_case_target_p (l1, l2)
5634 rtx l1, l2;
5636 rtx i1, i2;
5638 if (l1 == l2)
5639 return true;
5641 i1 = next_real_insn (l1);
5642 i2 = next_real_insn (l2);
5643 if (i1 == i2)
5644 return true;
5646 if (i1 && simplejump_p (i1))
5648 l1 = XEXP (SET_SRC (PATTERN (i1)), 0);
5651 if (i2 && simplejump_p (i2))
5653 l2 = XEXP (SET_SRC (PATTERN (i2)), 0);
5655 return l1 == l2;
5658 /* Delete nodes that branch to the default label from a list of
5659 case nodes. Eg. case 5: default: becomes just default: */
5661 static void
5662 strip_default_case_nodes (prev, deflab)
5663 case_node_ptr *prev;
5664 rtx deflab;
5666 case_node_ptr ptr;
5668 while (*prev)
5670 ptr = *prev;
5671 if (same_case_target_p (label_rtx (ptr->code_label), deflab))
5672 *prev = ptr->right;
5673 else
5674 prev = &ptr->right;
5678 /* Scan an ordered list of case nodes
5679 combining those with consecutive values or ranges.
5681 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
5683 static void
5684 group_case_nodes (head)
5685 case_node_ptr head;
5687 case_node_ptr node = head;
5689 while (node)
5691 rtx lab = label_rtx (node->code_label);
5692 case_node_ptr np = node;
5694 /* Try to group the successors of NODE with NODE. */
5695 while (((np = np->right) != 0)
5696 /* Do they jump to the same place? */
5697 && same_case_target_p (label_rtx (np->code_label), lab)
5698 /* Are their ranges consecutive? */
5699 && tree_int_cst_equal (np->low,
5700 fold (build (PLUS_EXPR,
5701 TREE_TYPE (node->high),
5702 node->high,
5703 integer_one_node)))
5704 /* An overflow is not consecutive. */
5705 && tree_int_cst_lt (node->high,
5706 fold (build (PLUS_EXPR,
5707 TREE_TYPE (node->high),
5708 node->high,
5709 integer_one_node))))
5711 node->high = np->high;
5713 /* NP is the first node after NODE which can't be grouped with it.
5714 Delete the nodes in between, and move on to that node. */
5715 node->right = np;
5716 node = np;
5720 /* Take an ordered list of case nodes
5721 and transform them into a near optimal binary tree,
5722 on the assumption that any target code selection value is as
5723 likely as any other.
5725 The transformation is performed by splitting the ordered
5726 list into two equal sections plus a pivot. The parts are
5727 then attached to the pivot as left and right branches. Each
5728 branch is then transformed recursively. */
5730 static void
5731 balance_case_nodes (head, parent)
5732 case_node_ptr *head;
5733 case_node_ptr parent;
5735 case_node_ptr np;
5737 np = *head;
5738 if (np)
5740 int cost = 0;
5741 int i = 0;
5742 int ranges = 0;
5743 case_node_ptr *npp;
5744 case_node_ptr left;
5746 /* Count the number of entries on branch. Also count the ranges. */
5748 while (np)
5750 if (!tree_int_cst_equal (np->low, np->high))
5752 ranges++;
5753 if (use_cost_table)
5754 cost += COST_TABLE (TREE_INT_CST_LOW (np->high));
5757 if (use_cost_table)
5758 cost += COST_TABLE (TREE_INT_CST_LOW (np->low));
5760 i++;
5761 np = np->right;
5764 if (i > 2)
5766 /* Split this list if it is long enough for that to help. */
5767 npp = head;
5768 left = *npp;
5769 if (use_cost_table)
5771 /* Find the place in the list that bisects the list's total cost,
5772 Here I gets half the total cost. */
5773 int n_moved = 0;
5774 i = (cost + 1) / 2;
5775 while (1)
5777 /* Skip nodes while their cost does not reach that amount. */
5778 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5779 i -= COST_TABLE (TREE_INT_CST_LOW ((*npp)->high));
5780 i -= COST_TABLE (TREE_INT_CST_LOW ((*npp)->low));
5781 if (i <= 0)
5782 break;
5783 npp = &(*npp)->right;
5784 n_moved += 1;
5786 if (n_moved == 0)
5788 /* Leave this branch lopsided, but optimize left-hand
5789 side and fill in `parent' fields for right-hand side. */
5790 np = *head;
5791 np->parent = parent;
5792 balance_case_nodes (&np->left, np);
5793 for (; np->right; np = np->right)
5794 np->right->parent = np;
5795 return;
5798 /* If there are just three nodes, split at the middle one. */
5799 else if (i == 3)
5800 npp = &(*npp)->right;
5801 else
5803 /* Find the place in the list that bisects the list's total cost,
5804 where ranges count as 2.
5805 Here I gets half the total cost. */
5806 i = (i + ranges + 1) / 2;
5807 while (1)
5809 /* Skip nodes while their cost does not reach that amount. */
5810 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5811 i--;
5812 i--;
5813 if (i <= 0)
5814 break;
5815 npp = &(*npp)->right;
5818 *head = np = *npp;
5819 *npp = 0;
5820 np->parent = parent;
5821 np->left = left;
5823 /* Optimize each of the two split parts. */
5824 balance_case_nodes (&np->left, np);
5825 balance_case_nodes (&np->right, np);
5827 else
5829 /* Else leave this branch as one level,
5830 but fill in `parent' fields. */
5831 np = *head;
5832 np->parent = parent;
5833 for (; np->right; np = np->right)
5834 np->right->parent = np;
5839 /* Search the parent sections of the case node tree
5840 to see if a test for the lower bound of NODE would be redundant.
5841 INDEX_TYPE is the type of the index expression.
5843 The instructions to generate the case decision tree are
5844 output in the same order as nodes are processed so it is
5845 known that if a parent node checks the range of the current
5846 node minus one that the current node is bounded at its lower
5847 span. Thus the test would be redundant. */
5849 static int
5850 node_has_low_bound (node, index_type)
5851 case_node_ptr node;
5852 tree index_type;
5854 tree low_minus_one;
5855 case_node_ptr pnode;
5857 /* If the lower bound of this node is the lowest value in the index type,
5858 we need not test it. */
5860 if (tree_int_cst_equal (node->low, TYPE_MIN_VALUE (index_type)))
5861 return 1;
5863 /* If this node has a left branch, the value at the left must be less
5864 than that at this node, so it cannot be bounded at the bottom and
5865 we need not bother testing any further. */
5867 if (node->left)
5868 return 0;
5870 low_minus_one = fold (build (MINUS_EXPR, TREE_TYPE (node->low),
5871 node->low, integer_one_node));
5873 /* If the subtraction above overflowed, we can't verify anything.
5874 Otherwise, look for a parent that tests our value - 1. */
5876 if (! tree_int_cst_lt (low_minus_one, node->low))
5877 return 0;
5879 for (pnode = node->parent; pnode; pnode = pnode->parent)
5880 if (tree_int_cst_equal (low_minus_one, pnode->high))
5881 return 1;
5883 return 0;
5886 /* Search the parent sections of the case node tree
5887 to see if a test for the upper bound of NODE would be redundant.
5888 INDEX_TYPE is the type of the index expression.
5890 The instructions to generate the case decision tree are
5891 output in the same order as nodes are processed so it is
5892 known that if a parent node checks the range of the current
5893 node plus one that the current node is bounded at its upper
5894 span. Thus the test would be redundant. */
5896 static int
5897 node_has_high_bound (node, index_type)
5898 case_node_ptr node;
5899 tree index_type;
5901 tree high_plus_one;
5902 case_node_ptr pnode;
5904 /* If there is no upper bound, obviously no test is needed. */
5906 if (TYPE_MAX_VALUE (index_type) == NULL)
5907 return 1;
5909 /* If the upper bound of this node is the highest value in the type
5910 of the index expression, we need not test against it. */
5912 if (tree_int_cst_equal (node->high, TYPE_MAX_VALUE (index_type)))
5913 return 1;
5915 /* If this node has a right branch, the value at the right must be greater
5916 than that at this node, so it cannot be bounded at the top and
5917 we need not bother testing any further. */
5919 if (node->right)
5920 return 0;
5922 high_plus_one = fold (build (PLUS_EXPR, TREE_TYPE (node->high),
5923 node->high, integer_one_node));
5925 /* If the addition above overflowed, we can't verify anything.
5926 Otherwise, look for a parent that tests our value + 1. */
5928 if (! tree_int_cst_lt (node->high, high_plus_one))
5929 return 0;
5931 for (pnode = node->parent; pnode; pnode = pnode->parent)
5932 if (tree_int_cst_equal (high_plus_one, pnode->low))
5933 return 1;
5935 return 0;
5938 /* Search the parent sections of the
5939 case node tree to see if both tests for the upper and lower
5940 bounds of NODE would be redundant. */
5942 static int
5943 node_is_bounded (node, index_type)
5944 case_node_ptr node;
5945 tree index_type;
5947 return (node_has_low_bound (node, index_type)
5948 && node_has_high_bound (node, index_type));
5951 /* Emit an unconditional jump to LABEL unless it would be dead code. */
5953 static void
5954 emit_jump_if_reachable (label)
5955 rtx label;
5957 if (GET_CODE (get_last_insn ()) != BARRIER)
5958 emit_jump (label);
5961 /* Emit step-by-step code to select a case for the value of INDEX.
5962 The thus generated decision tree follows the form of the
5963 case-node binary tree NODE, whose nodes represent test conditions.
5964 INDEX_TYPE is the type of the index of the switch.
5966 Care is taken to prune redundant tests from the decision tree
5967 by detecting any boundary conditions already checked by
5968 emitted rtx. (See node_has_high_bound, node_has_low_bound
5969 and node_is_bounded, above.)
5971 Where the test conditions can be shown to be redundant we emit
5972 an unconditional jump to the target code. As a further
5973 optimization, the subordinates of a tree node are examined to
5974 check for bounded nodes. In this case conditional and/or
5975 unconditional jumps as a result of the boundary check for the
5976 current node are arranged to target the subordinates associated
5977 code for out of bound conditions on the current node.
5979 We can assume that when control reaches the code generated here,
5980 the index value has already been compared with the parents
5981 of this node, and determined to be on the same side of each parent
5982 as this node is. Thus, if this node tests for the value 51,
5983 and a parent tested for 52, we don't need to consider
5984 the possibility of a value greater than 51. If another parent
5985 tests for the value 50, then this node need not test anything. */
5987 static void
5988 emit_case_nodes (index, node, default_label, index_type)
5989 rtx index;
5990 case_node_ptr node;
5991 rtx default_label;
5992 tree index_type;
5994 /* If INDEX has an unsigned type, we must make unsigned branches. */
5995 int unsignedp = TREE_UNSIGNED (index_type);
5996 enum machine_mode mode = GET_MODE (index);
5997 enum machine_mode imode = TYPE_MODE (index_type);
5999 /* See if our parents have already tested everything for us.
6000 If they have, emit an unconditional jump for this node. */
6001 if (node_is_bounded (node, index_type))
6002 emit_jump (label_rtx (node->code_label));
6004 else if (tree_int_cst_equal (node->low, node->high))
6006 /* Node is single valued. First see if the index expression matches
6007 this node and then check our children, if any. */
6009 do_jump_if_equal (index,
6010 convert_modes (mode, imode,
6011 expand_expr (node->low, NULL_RTX,
6012 VOIDmode, 0),
6013 unsignedp),
6014 label_rtx (node->code_label), unsignedp);
6016 if (node->right != 0 && node->left != 0)
6018 /* This node has children on both sides.
6019 Dispatch to one side or the other
6020 by comparing the index value with this node's value.
6021 If one subtree is bounded, check that one first,
6022 so we can avoid real branches in the tree. */
6024 if (node_is_bounded (node->right, index_type))
6026 emit_cmp_and_jump_insns (index,
6027 convert_modes
6028 (mode, imode,
6029 expand_expr (node->high, NULL_RTX,
6030 VOIDmode, 0),
6031 unsignedp),
6032 GT, NULL_RTX, mode, unsignedp,
6033 label_rtx (node->right->code_label));
6034 emit_case_nodes (index, node->left, default_label, index_type);
6037 else if (node_is_bounded (node->left, index_type))
6039 emit_cmp_and_jump_insns (index,
6040 convert_modes
6041 (mode, imode,
6042 expand_expr (node->high, NULL_RTX,
6043 VOIDmode, 0),
6044 unsignedp),
6045 LT, NULL_RTX, mode, unsignedp,
6046 label_rtx (node->left->code_label));
6047 emit_case_nodes (index, node->right, default_label, index_type);
6050 else
6052 /* Neither node is bounded. First distinguish the two sides;
6053 then emit the code for one side at a time. */
6055 tree test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
6057 /* See if the value is on the right. */
6058 emit_cmp_and_jump_insns (index,
6059 convert_modes
6060 (mode, imode,
6061 expand_expr (node->high, NULL_RTX,
6062 VOIDmode, 0),
6063 unsignedp),
6064 GT, NULL_RTX, mode, unsignedp,
6065 label_rtx (test_label));
6067 /* Value must be on the left.
6068 Handle the left-hand subtree. */
6069 emit_case_nodes (index, node->left, default_label, index_type);
6070 /* If left-hand subtree does nothing,
6071 go to default. */
6072 emit_jump_if_reachable (default_label);
6074 /* Code branches here for the right-hand subtree. */
6075 expand_label (test_label);
6076 emit_case_nodes (index, node->right, default_label, index_type);
6080 else if (node->right != 0 && node->left == 0)
6082 /* Here we have a right child but no left so we issue conditional
6083 branch to default and process the right child.
6085 Omit the conditional branch to default if we it avoid only one
6086 right child; it costs too much space to save so little time. */
6088 if (node->right->right || node->right->left
6089 || !tree_int_cst_equal (node->right->low, node->right->high))
6091 if (!node_has_low_bound (node, index_type))
6093 emit_cmp_and_jump_insns (index,
6094 convert_modes
6095 (mode, imode,
6096 expand_expr (node->high, NULL_RTX,
6097 VOIDmode, 0),
6098 unsignedp),
6099 LT, NULL_RTX, mode, unsignedp,
6100 default_label);
6103 emit_case_nodes (index, node->right, default_label, index_type);
6105 else
6106 /* We cannot process node->right normally
6107 since we haven't ruled out the numbers less than
6108 this node's value. So handle node->right explicitly. */
6109 do_jump_if_equal (index,
6110 convert_modes
6111 (mode, imode,
6112 expand_expr (node->right->low, NULL_RTX,
6113 VOIDmode, 0),
6114 unsignedp),
6115 label_rtx (node->right->code_label), unsignedp);
6118 else if (node->right == 0 && node->left != 0)
6120 /* Just one subtree, on the left. */
6121 if (node->left->left || node->left->right
6122 || !tree_int_cst_equal (node->left->low, node->left->high))
6124 if (!node_has_high_bound (node, index_type))
6126 emit_cmp_and_jump_insns (index,
6127 convert_modes
6128 (mode, imode,
6129 expand_expr (node->high, NULL_RTX,
6130 VOIDmode, 0),
6131 unsignedp),
6132 GT, NULL_RTX, mode, unsignedp,
6133 default_label);
6136 emit_case_nodes (index, node->left, default_label, index_type);
6138 else
6139 /* We cannot process node->left normally
6140 since we haven't ruled out the numbers less than
6141 this node's value. So handle node->left explicitly. */
6142 do_jump_if_equal (index,
6143 convert_modes
6144 (mode, imode,
6145 expand_expr (node->left->low, NULL_RTX,
6146 VOIDmode, 0),
6147 unsignedp),
6148 label_rtx (node->left->code_label), unsignedp);
6151 else
6153 /* Node is a range. These cases are very similar to those for a single
6154 value, except that we do not start by testing whether this node
6155 is the one to branch to. */
6157 if (node->right != 0 && node->left != 0)
6159 /* Node has subtrees on both sides.
6160 If the right-hand subtree is bounded,
6161 test for it first, since we can go straight there.
6162 Otherwise, we need to make a branch in the control structure,
6163 then handle the two subtrees. */
6164 tree test_label = 0;
6166 if (node_is_bounded (node->right, index_type))
6167 /* Right hand node is fully bounded so we can eliminate any
6168 testing and branch directly to the target code. */
6169 emit_cmp_and_jump_insns (index,
6170 convert_modes
6171 (mode, imode,
6172 expand_expr (node->high, NULL_RTX,
6173 VOIDmode, 0),
6174 unsignedp),
6175 GT, NULL_RTX, mode, unsignedp,
6176 label_rtx (node->right->code_label));
6177 else
6179 /* Right hand node requires testing.
6180 Branch to a label where we will handle it later. */
6182 test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
6183 emit_cmp_and_jump_insns (index,
6184 convert_modes
6185 (mode, imode,
6186 expand_expr (node->high, NULL_RTX,
6187 VOIDmode, 0),
6188 unsignedp),
6189 GT, NULL_RTX, mode, unsignedp,
6190 label_rtx (test_label));
6193 /* Value belongs to this node or to the left-hand subtree. */
6195 emit_cmp_and_jump_insns (index,
6196 convert_modes
6197 (mode, imode,
6198 expand_expr (node->low, NULL_RTX,
6199 VOIDmode, 0),
6200 unsignedp),
6201 GE, NULL_RTX, mode, unsignedp,
6202 label_rtx (node->code_label));
6204 /* Handle the left-hand subtree. */
6205 emit_case_nodes (index, node->left, default_label, index_type);
6207 /* If right node had to be handled later, do that now. */
6209 if (test_label)
6211 /* If the left-hand subtree fell through,
6212 don't let it fall into the right-hand subtree. */
6213 emit_jump_if_reachable (default_label);
6215 expand_label (test_label);
6216 emit_case_nodes (index, node->right, default_label, index_type);
6220 else if (node->right != 0 && node->left == 0)
6222 /* Deal with values to the left of this node,
6223 if they are possible. */
6224 if (!node_has_low_bound (node, index_type))
6226 emit_cmp_and_jump_insns (index,
6227 convert_modes
6228 (mode, imode,
6229 expand_expr (node->low, NULL_RTX,
6230 VOIDmode, 0),
6231 unsignedp),
6232 LT, NULL_RTX, mode, unsignedp,
6233 default_label);
6236 /* Value belongs to this node or to the right-hand subtree. */
6238 emit_cmp_and_jump_insns (index,
6239 convert_modes
6240 (mode, imode,
6241 expand_expr (node->high, NULL_RTX,
6242 VOIDmode, 0),
6243 unsignedp),
6244 LE, NULL_RTX, mode, unsignedp,
6245 label_rtx (node->code_label));
6247 emit_case_nodes (index, node->right, default_label, index_type);
6250 else if (node->right == 0 && node->left != 0)
6252 /* Deal with values to the right of this node,
6253 if they are possible. */
6254 if (!node_has_high_bound (node, index_type))
6256 emit_cmp_and_jump_insns (index,
6257 convert_modes
6258 (mode, imode,
6259 expand_expr (node->high, NULL_RTX,
6260 VOIDmode, 0),
6261 unsignedp),
6262 GT, NULL_RTX, mode, unsignedp,
6263 default_label);
6266 /* Value belongs to this node or to the left-hand subtree. */
6268 emit_cmp_and_jump_insns (index,
6269 convert_modes
6270 (mode, imode,
6271 expand_expr (node->low, NULL_RTX,
6272 VOIDmode, 0),
6273 unsignedp),
6274 GE, NULL_RTX, mode, unsignedp,
6275 label_rtx (node->code_label));
6277 emit_case_nodes (index, node->left, default_label, index_type);
6280 else
6282 /* Node has no children so we check low and high bounds to remove
6283 redundant tests. Only one of the bounds can exist,
6284 since otherwise this node is bounded--a case tested already. */
6285 int high_bound = node_has_high_bound (node, index_type);
6286 int low_bound = node_has_low_bound (node, index_type);
6288 if (!high_bound && low_bound)
6290 emit_cmp_and_jump_insns (index,
6291 convert_modes
6292 (mode, imode,
6293 expand_expr (node->high, NULL_RTX,
6294 VOIDmode, 0),
6295 unsignedp),
6296 GT, NULL_RTX, mode, unsignedp,
6297 default_label);
6300 else if (!low_bound && high_bound)
6302 emit_cmp_and_jump_insns (index,
6303 convert_modes
6304 (mode, imode,
6305 expand_expr (node->low, NULL_RTX,
6306 VOIDmode, 0),
6307 unsignedp),
6308 LT, NULL_RTX, mode, unsignedp,
6309 default_label);
6311 else if (!low_bound && !high_bound)
6313 /* Widen LOW and HIGH to the same width as INDEX. */
6314 tree type = (*lang_hooks.types.type_for_mode) (mode, unsignedp);
6315 tree low = build1 (CONVERT_EXPR, type, node->low);
6316 tree high = build1 (CONVERT_EXPR, type, node->high);
6317 rtx low_rtx, new_index, new_bound;
6319 /* Instead of doing two branches, emit one unsigned branch for
6320 (index-low) > (high-low). */
6321 low_rtx = expand_expr (low, NULL_RTX, mode, 0);
6322 new_index = expand_simple_binop (mode, MINUS, index, low_rtx,
6323 NULL_RTX, unsignedp,
6324 OPTAB_WIDEN);
6325 new_bound = expand_expr (fold (build (MINUS_EXPR, type,
6326 high, low)),
6327 NULL_RTX, mode, 0);
6329 emit_cmp_and_jump_insns (new_index, new_bound, GT, NULL_RTX,
6330 mode, 1, default_label);
6333 emit_jump (label_rtx (node->code_label));
6338 #include "gt-stmt.h"