Add an UNSPEC_PROLOGUE_USE to prevent the link register from being considered dead.
[official-gcc.git] / gcc / stmt.c
blob6f122c4d31b74271e309d440d3c7d0d319372bb8
1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 /* This file handles the generation of rtl code from tree structure
23 above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
24 It also creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
27 The functions whose names start with `expand_' are called by the
28 parser to generate RTL instructions for various kinds of constructs.
30 Some control and binding constructs require calling several such
31 functions at different times. For example, a simple if-then
32 is expanded by calling `expand_start_cond' (with the condition-expression
33 as argument) before parsing the then-clause and calling `expand_end_cond'
34 after parsing the then-clause. */
36 #include "config.h"
37 #include "system.h"
38 #include "coretypes.h"
39 #include "tm.h"
41 #include "rtl.h"
42 #include "tree.h"
43 #include "tm_p.h"
44 #include "flags.h"
45 #include "except.h"
46 #include "function.h"
47 #include "insn-config.h"
48 #include "expr.h"
49 #include "libfuncs.h"
50 #include "hard-reg-set.h"
51 #include "loop.h"
52 #include "recog.h"
53 #include "machmode.h"
54 #include "toplev.h"
55 #include "output.h"
56 #include "ggc.h"
57 #include "langhooks.h"
58 #include "predict.h"
60 /* Assume that case vectors are not pc-relative. */
61 #ifndef CASE_VECTOR_PC_RELATIVE
62 #define CASE_VECTOR_PC_RELATIVE 0
63 #endif
65 /* Functions and data structures for expanding case statements. */
67 /* Case label structure, used to hold info on labels within case
68 statements. We handle "range" labels; for a single-value label
69 as in C, the high and low limits are the same.
71 An AVL tree of case nodes is initially created, and later transformed
72 to a list linked via the RIGHT fields in the nodes. Nodes with
73 higher case values are later in the list.
75 Switch statements can be output in one of two forms. A branch table
76 is used if there are more than a few labels and the labels are dense
77 within the range between the smallest and largest case value. If a
78 branch table is used, no further manipulations are done with the case
79 node chain.
81 The alternative to the use of a branch table is to generate a series
82 of compare and jump insns. When that is done, we use the LEFT, RIGHT,
83 and PARENT fields to hold a binary tree. Initially the tree is
84 totally unbalanced, with everything on the right. We balance the tree
85 with nodes on the left having lower case values than the parent
86 and nodes on the right having higher values. We then output the tree
87 in order. */
89 struct case_node GTY(())
91 struct case_node *left; /* Left son in binary tree */
92 struct case_node *right; /* Right son in binary tree; also node chain */
93 struct case_node *parent; /* Parent of node in binary tree */
94 tree low; /* Lowest index value for this label */
95 tree high; /* Highest index value for this label */
96 tree code_label; /* Label to jump to when node matches */
97 int balance;
100 typedef struct case_node case_node;
101 typedef struct case_node *case_node_ptr;
103 /* These are used by estimate_case_costs and balance_case_nodes. */
105 /* This must be a signed type, and non-ANSI compilers lack signed char. */
106 static short cost_table_[129];
107 static int use_cost_table;
108 static int cost_table_initialized;
110 /* Special care is needed because we allow -1, but TREE_INT_CST_LOW
111 is unsigned. */
112 #define COST_TABLE(I) cost_table_[(unsigned HOST_WIDE_INT) ((I) + 1)]
114 /* Stack of control and binding constructs we are currently inside.
116 These constructs begin when you call `expand_start_WHATEVER'
117 and end when you call `expand_end_WHATEVER'. This stack records
118 info about how the construct began that tells the end-function
119 what to do. It also may provide information about the construct
120 to alter the behavior of other constructs within the body.
121 For example, they may affect the behavior of C `break' and `continue'.
123 Each construct gets one `struct nesting' object.
124 All of these objects are chained through the `all' field.
125 `nesting_stack' points to the first object (innermost construct).
126 The position of an entry on `nesting_stack' is in its `depth' field.
128 Each type of construct has its own individual stack.
129 For example, loops have `loop_stack'. Each object points to the
130 next object of the same type through the `next' field.
132 Some constructs are visible to `break' exit-statements and others
133 are not. Which constructs are visible depends on the language.
134 Therefore, the data structure allows each construct to be visible
135 or not, according to the args given when the construct is started.
136 The construct is visible if the `exit_label' field is non-null.
137 In that case, the value should be a CODE_LABEL rtx. */
139 struct nesting GTY(())
141 struct nesting *all;
142 struct nesting *next;
143 int depth;
144 rtx exit_label;
145 enum nesting_desc {
146 COND_NESTING,
147 LOOP_NESTING,
148 BLOCK_NESTING,
149 CASE_NESTING
150 } desc;
151 union nesting_u
153 /* For conds (if-then and if-then-else statements). */
154 struct nesting_cond
156 /* Label for the end of the if construct.
157 There is none if EXITFLAG was not set
158 and no `else' has been seen yet. */
159 rtx endif_label;
160 /* Label for the end of this alternative.
161 This may be the end of the if or the next else/elseif. */
162 rtx next_label;
163 } GTY ((tag ("COND_NESTING"))) cond;
164 /* For loops. */
165 struct nesting_loop
167 /* Label at the top of the loop; place to loop back to. */
168 rtx start_label;
169 /* Label at the end of the whole construct. */
170 rtx end_label;
171 /* Label for `continue' statement to jump to;
172 this is in front of the stepper of the loop. */
173 rtx continue_label;
174 } GTY ((tag ("LOOP_NESTING"))) loop;
175 /* For variable binding contours. */
176 struct nesting_block
178 /* Sequence number of this binding contour within the function,
179 in order of entry. */
180 int block_start_count;
181 /* Nonzero => value to restore stack to on exit. */
182 rtx stack_level;
183 /* The NOTE that starts this contour.
184 Used by expand_goto to check whether the destination
185 is within each contour or not. */
186 rtx first_insn;
187 /* Innermost containing binding contour that has a stack level. */
188 struct nesting *innermost_stack_block;
189 /* List of cleanups to be run on exit from this contour.
190 This is a list of expressions to be evaluated.
191 The TREE_PURPOSE of each link is the ..._DECL node
192 which the cleanup pertains to. */
193 tree cleanups;
194 /* List of cleanup-lists of blocks containing this block,
195 as they were at the locus where this block appears.
196 There is an element for each containing block,
197 ordered innermost containing block first.
198 The tail of this list can be 0,
199 if all remaining elements would be empty lists.
200 The element's TREE_VALUE is the cleanup-list of that block,
201 which may be null. */
202 tree outer_cleanups;
203 /* Chain of labels defined inside this binding contour.
204 For contours that have stack levels or cleanups. */
205 struct label_chain *label_chain;
206 /* Number of function calls seen, as of start of this block. */
207 int n_function_calls;
208 /* Nonzero if this is associated with an EH region. */
209 int exception_region;
210 /* The saved target_temp_slot_level from our outer block.
211 We may reset target_temp_slot_level to be the level of
212 this block, if that is done, target_temp_slot_level
213 reverts to the saved target_temp_slot_level at the very
214 end of the block. */
215 int block_target_temp_slot_level;
216 /* True if we are currently emitting insns in an area of
217 output code that is controlled by a conditional
218 expression. This is used by the cleanup handling code to
219 generate conditional cleanup actions. */
220 int conditional_code;
221 /* A place to move the start of the exception region for any
222 of the conditional cleanups, must be at the end or after
223 the start of the last unconditional cleanup, and before any
224 conditional branch points. */
225 rtx last_unconditional_cleanup;
226 } GTY ((tag ("BLOCK_NESTING"))) block;
227 /* For switch (C) or case (Pascal) statements,
228 and also for dummies (see `expand_start_case_dummy'). */
229 struct nesting_case
231 /* The insn after which the case dispatch should finally
232 be emitted. Zero for a dummy. */
233 rtx start;
234 /* A list of case labels; it is first built as an AVL tree.
235 During expand_end_case, this is converted to a list, and may be
236 rearranged into a nearly balanced binary tree. */
237 struct case_node *case_list;
238 /* Label to jump to if no case matches. */
239 tree default_label;
240 /* The expression to be dispatched on. */
241 tree index_expr;
242 /* Type that INDEX_EXPR should be converted to. */
243 tree nominal_type;
244 /* Name of this kind of statement, for warnings. */
245 const char *printname;
246 /* Used to save no_line_numbers till we see the first case label.
247 We set this to -1 when we see the first case label in this
248 case statement. */
249 int line_number_status;
250 } GTY ((tag ("CASE_NESTING"))) case_stmt;
251 } GTY ((desc ("%1.desc"))) data;
254 /* Allocate and return a new `struct nesting'. */
256 #define ALLOC_NESTING() \
257 (struct nesting *) ggc_alloc (sizeof (struct nesting))
259 /* Pop the nesting stack element by element until we pop off
260 the element which is at the top of STACK.
261 Update all the other stacks, popping off elements from them
262 as we pop them from nesting_stack. */
264 #define POPSTACK(STACK) \
265 do { struct nesting *target = STACK; \
266 struct nesting *this; \
267 do { this = nesting_stack; \
268 if (loop_stack == this) \
269 loop_stack = loop_stack->next; \
270 if (cond_stack == this) \
271 cond_stack = cond_stack->next; \
272 if (block_stack == this) \
273 block_stack = block_stack->next; \
274 if (stack_block_stack == this) \
275 stack_block_stack = stack_block_stack->next; \
276 if (case_stack == this) \
277 case_stack = case_stack->next; \
278 nesting_depth = nesting_stack->depth - 1; \
279 nesting_stack = this->all; } \
280 while (this != target); } while (0)
282 /* In some cases it is impossible to generate code for a forward goto
283 until the label definition is seen. This happens when it may be necessary
284 for the goto to reset the stack pointer: we don't yet know how to do that.
285 So expand_goto puts an entry on this fixup list.
286 Each time a binding contour that resets the stack is exited,
287 we check each fixup.
288 If the target label has now been defined, we can insert the proper code. */
290 struct goto_fixup GTY(())
292 /* Points to following fixup. */
293 struct goto_fixup *next;
294 /* Points to the insn before the jump insn.
295 If more code must be inserted, it goes after this insn. */
296 rtx before_jump;
297 /* The LABEL_DECL that this jump is jumping to, or 0
298 for break, continue or return. */
299 tree target;
300 /* The BLOCK for the place where this goto was found. */
301 tree context;
302 /* The CODE_LABEL rtx that this is jumping to. */
303 rtx target_rtl;
304 /* Number of binding contours started in current function
305 before the label reference. */
306 int block_start_count;
307 /* The outermost stack level that should be restored for this jump.
308 Each time a binding contour that resets the stack is exited,
309 if the target label is *not* yet defined, this slot is updated. */
310 rtx stack_level;
311 /* List of lists of cleanup expressions to be run by this goto.
312 There is one element for each block that this goto is within.
313 The tail of this list can be 0,
314 if all remaining elements would be empty.
315 The TREE_VALUE contains the cleanup list of that block as of the
316 time this goto was seen.
317 The TREE_ADDRESSABLE flag is 1 for a block that has been exited. */
318 tree cleanup_list_list;
321 /* Within any binding contour that must restore a stack level,
322 all labels are recorded with a chain of these structures. */
324 struct label_chain GTY(())
326 /* Points to following fixup. */
327 struct label_chain *next;
328 tree label;
331 struct stmt_status GTY(())
333 /* Chain of all pending binding contours. */
334 struct nesting * x_block_stack;
336 /* If any new stacks are added here, add them to POPSTACKS too. */
338 /* Chain of all pending binding contours that restore stack levels
339 or have cleanups. */
340 struct nesting * x_stack_block_stack;
342 /* Chain of all pending conditional statements. */
343 struct nesting * x_cond_stack;
345 /* Chain of all pending loops. */
346 struct nesting * x_loop_stack;
348 /* Chain of all pending case or switch statements. */
349 struct nesting * x_case_stack;
351 /* Separate chain including all of the above,
352 chained through the `all' field. */
353 struct nesting * x_nesting_stack;
355 /* Number of entries on nesting_stack now. */
356 int x_nesting_depth;
358 /* Number of binding contours started so far in this function. */
359 int x_block_start_count;
361 /* Each time we expand an expression-statement,
362 record the expr's type and its RTL value here. */
363 tree x_last_expr_type;
364 rtx x_last_expr_value;
366 /* Nonzero if within a ({...}) grouping, in which case we must
367 always compute a value for each expr-stmt in case it is the last one. */
368 int x_expr_stmts_for_value;
370 /* Filename and line number of last line-number note,
371 whether we actually emitted it or not. */
372 const char *x_emit_filename;
373 int x_emit_lineno;
375 struct goto_fixup *x_goto_fixup_chain;
378 #define block_stack (cfun->stmt->x_block_stack)
379 #define stack_block_stack (cfun->stmt->x_stack_block_stack)
380 #define cond_stack (cfun->stmt->x_cond_stack)
381 #define loop_stack (cfun->stmt->x_loop_stack)
382 #define case_stack (cfun->stmt->x_case_stack)
383 #define nesting_stack (cfun->stmt->x_nesting_stack)
384 #define nesting_depth (cfun->stmt->x_nesting_depth)
385 #define current_block_start_count (cfun->stmt->x_block_start_count)
386 #define last_expr_type (cfun->stmt->x_last_expr_type)
387 #define last_expr_value (cfun->stmt->x_last_expr_value)
388 #define expr_stmts_for_value (cfun->stmt->x_expr_stmts_for_value)
389 #define emit_filename (cfun->stmt->x_emit_filename)
390 #define emit_lineno (cfun->stmt->x_emit_lineno)
391 #define goto_fixup_chain (cfun->stmt->x_goto_fixup_chain)
393 /* Nonzero if we are using EH to handle cleanups. */
394 static int using_eh_for_cleanups_p = 0;
396 static int n_occurrences PARAMS ((int, const char *));
397 static bool parse_input_constraint PARAMS ((const char **, int, int, int,
398 int, const char * const *,
399 bool *, bool *));
400 static bool decl_conflicts_with_clobbers_p PARAMS ((tree, const HARD_REG_SET));
401 static void expand_goto_internal PARAMS ((tree, rtx, rtx));
402 static int expand_fixup PARAMS ((tree, rtx, rtx));
403 static rtx expand_nl_handler_label PARAMS ((rtx, rtx));
404 static void expand_nl_goto_receiver PARAMS ((void));
405 static void expand_nl_goto_receivers PARAMS ((struct nesting *));
406 static void fixup_gotos PARAMS ((struct nesting *, rtx, tree,
407 rtx, int));
408 static bool check_operand_nalternatives PARAMS ((tree, tree));
409 static bool check_unique_operand_names PARAMS ((tree, tree));
410 static tree resolve_operand_names PARAMS ((tree, tree, tree,
411 const char **));
412 static char *resolve_operand_name_1 PARAMS ((char *, tree, tree));
413 static void expand_null_return_1 PARAMS ((rtx));
414 static enum br_predictor return_prediction PARAMS ((rtx));
415 static void expand_value_return PARAMS ((rtx));
416 static int tail_recursion_args PARAMS ((tree, tree));
417 static void expand_cleanups PARAMS ((tree, tree, int, int));
418 static void check_seenlabel PARAMS ((void));
419 static void do_jump_if_equal PARAMS ((rtx, rtx, rtx, int));
420 static int estimate_case_costs PARAMS ((case_node_ptr));
421 static void group_case_nodes PARAMS ((case_node_ptr));
422 static void balance_case_nodes PARAMS ((case_node_ptr *,
423 case_node_ptr));
424 static int node_has_low_bound PARAMS ((case_node_ptr, tree));
425 static int node_has_high_bound PARAMS ((case_node_ptr, tree));
426 static int node_is_bounded PARAMS ((case_node_ptr, tree));
427 static void emit_jump_if_reachable PARAMS ((rtx));
428 static void emit_case_nodes PARAMS ((rtx, case_node_ptr, rtx, tree));
429 static struct case_node *case_tree2list PARAMS ((case_node *, case_node *));
431 void
432 using_eh_for_cleanups ()
434 using_eh_for_cleanups_p = 1;
437 void
438 init_stmt_for_function ()
440 cfun->stmt = ((struct stmt_status *)ggc_alloc (sizeof (struct stmt_status)));
442 /* We are not currently within any block, conditional, loop or case. */
443 block_stack = 0;
444 stack_block_stack = 0;
445 loop_stack = 0;
446 case_stack = 0;
447 cond_stack = 0;
448 nesting_stack = 0;
449 nesting_depth = 0;
451 current_block_start_count = 0;
453 /* No gotos have been expanded yet. */
454 goto_fixup_chain = 0;
456 /* We are not processing a ({...}) grouping. */
457 expr_stmts_for_value = 0;
458 clear_last_expr ();
461 /* Record the current file and line. Called from emit_line_note. */
462 void
463 set_file_and_line_for_stmt (file, line)
464 const char *file;
465 int line;
467 /* If we're outputting an inline function, and we add a line note,
468 there may be no CFUN->STMT information. So, there's no need to
469 update it. */
470 if (cfun->stmt)
472 emit_filename = file;
473 emit_lineno = line;
477 /* Emit a no-op instruction. */
479 void
480 emit_nop ()
482 rtx last_insn;
484 last_insn = get_last_insn ();
485 if (!optimize
486 && (GET_CODE (last_insn) == CODE_LABEL
487 || (GET_CODE (last_insn) == NOTE
488 && prev_real_insn (last_insn) == 0)))
489 emit_insn (gen_nop ());
492 /* Return the rtx-label that corresponds to a LABEL_DECL,
493 creating it if necessary. */
496 label_rtx (label)
497 tree label;
499 if (TREE_CODE (label) != LABEL_DECL)
500 abort ();
502 if (!DECL_RTL_SET_P (label))
503 SET_DECL_RTL (label, gen_label_rtx ());
505 return DECL_RTL (label);
509 /* Add an unconditional jump to LABEL as the next sequential instruction. */
511 void
512 emit_jump (label)
513 rtx label;
515 do_pending_stack_adjust ();
516 emit_jump_insn (gen_jump (label));
517 emit_barrier ();
520 /* Emit code to jump to the address
521 specified by the pointer expression EXP. */
523 void
524 expand_computed_goto (exp)
525 tree exp;
527 rtx x = expand_expr (exp, NULL_RTX, VOIDmode, 0);
529 #ifdef POINTERS_EXTEND_UNSIGNED
530 if (GET_MODE (x) != Pmode)
531 x = convert_memory_address (Pmode, x);
532 #endif
534 emit_queue ();
535 do_pending_stack_adjust ();
536 emit_indirect_jump (x);
538 current_function_has_computed_jump = 1;
541 /* Handle goto statements and the labels that they can go to. */
543 /* Specify the location in the RTL code of a label LABEL,
544 which is a LABEL_DECL tree node.
546 This is used for the kind of label that the user can jump to with a
547 goto statement, and for alternatives of a switch or case statement.
548 RTL labels generated for loops and conditionals don't go through here;
549 they are generated directly at the RTL level, by other functions below.
551 Note that this has nothing to do with defining label *names*.
552 Languages vary in how they do that and what that even means. */
554 void
555 expand_label (label)
556 tree label;
558 struct label_chain *p;
560 do_pending_stack_adjust ();
561 emit_label (label_rtx (label));
562 if (DECL_NAME (label))
563 LABEL_NAME (DECL_RTL (label)) = IDENTIFIER_POINTER (DECL_NAME (label));
565 if (stack_block_stack != 0)
567 p = (struct label_chain *) ggc_alloc (sizeof (struct label_chain));
568 p->next = stack_block_stack->data.block.label_chain;
569 stack_block_stack->data.block.label_chain = p;
570 p->label = label;
574 /* Declare that LABEL (a LABEL_DECL) may be used for nonlocal gotos
575 from nested functions. */
577 void
578 declare_nonlocal_label (label)
579 tree label;
581 rtx slot = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
583 nonlocal_labels = tree_cons (NULL_TREE, label, nonlocal_labels);
584 LABEL_PRESERVE_P (label_rtx (label)) = 1;
585 if (nonlocal_goto_handler_slots == 0)
587 emit_stack_save (SAVE_NONLOCAL,
588 &nonlocal_goto_stack_level,
589 PREV_INSN (tail_recursion_reentry));
591 nonlocal_goto_handler_slots
592 = gen_rtx_EXPR_LIST (VOIDmode, slot, nonlocal_goto_handler_slots);
595 /* Generate RTL code for a `goto' statement with target label LABEL.
596 LABEL should be a LABEL_DECL tree node that was or will later be
597 defined with `expand_label'. */
599 void
600 expand_goto (label)
601 tree label;
603 tree context;
605 /* Check for a nonlocal goto to a containing function. */
606 context = decl_function_context (label);
607 if (context != 0 && context != current_function_decl)
609 struct function *p = find_function_data (context);
610 rtx label_ref = gen_rtx_LABEL_REF (Pmode, label_rtx (label));
611 rtx handler_slot, static_chain, save_area, insn;
612 tree link;
614 /* Find the corresponding handler slot for this label. */
615 handler_slot = p->x_nonlocal_goto_handler_slots;
616 for (link = p->x_nonlocal_labels; TREE_VALUE (link) != label;
617 link = TREE_CHAIN (link))
618 handler_slot = XEXP (handler_slot, 1);
619 handler_slot = XEXP (handler_slot, 0);
621 p->has_nonlocal_label = 1;
622 current_function_has_nonlocal_goto = 1;
623 LABEL_REF_NONLOCAL_P (label_ref) = 1;
625 /* Copy the rtl for the slots so that they won't be shared in
626 case the virtual stack vars register gets instantiated differently
627 in the parent than in the child. */
629 static_chain = copy_to_reg (lookup_static_chain (label));
631 /* Get addr of containing function's current nonlocal goto handler,
632 which will do any cleanups and then jump to the label. */
633 handler_slot = copy_to_reg (replace_rtx (copy_rtx (handler_slot),
634 virtual_stack_vars_rtx,
635 static_chain));
637 /* Get addr of containing function's nonlocal save area. */
638 save_area = p->x_nonlocal_goto_stack_level;
639 if (save_area)
640 save_area = replace_rtx (copy_rtx (save_area),
641 virtual_stack_vars_rtx, static_chain);
643 #if HAVE_nonlocal_goto
644 if (HAVE_nonlocal_goto)
645 emit_insn (gen_nonlocal_goto (static_chain, handler_slot,
646 save_area, label_ref));
647 else
648 #endif
650 /* Restore frame pointer for containing function.
651 This sets the actual hard register used for the frame pointer
652 to the location of the function's incoming static chain info.
653 The non-local goto handler will then adjust it to contain the
654 proper value and reload the argument pointer, if needed. */
655 emit_move_insn (hard_frame_pointer_rtx, static_chain);
656 emit_stack_restore (SAVE_NONLOCAL, save_area, NULL_RTX);
658 /* USE of hard_frame_pointer_rtx added for consistency;
659 not clear if really needed. */
660 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
661 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
662 emit_indirect_jump (handler_slot);
665 /* Search backwards to the jump insn and mark it as a
666 non-local goto. */
667 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
669 if (GET_CODE (insn) == JUMP_INSN)
671 REG_NOTES (insn) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO,
672 const0_rtx, REG_NOTES (insn));
673 break;
675 else if (GET_CODE (insn) == CALL_INSN)
676 break;
679 else
680 expand_goto_internal (label, label_rtx (label), NULL_RTX);
683 /* Generate RTL code for a `goto' statement with target label BODY.
684 LABEL should be a LABEL_REF.
685 LAST_INSN, if non-0, is the rtx we should consider as the last
686 insn emitted (for the purposes of cleaning up a return). */
688 static void
689 expand_goto_internal (body, label, last_insn)
690 tree body;
691 rtx label;
692 rtx last_insn;
694 struct nesting *block;
695 rtx stack_level = 0;
697 if (GET_CODE (label) != CODE_LABEL)
698 abort ();
700 /* If label has already been defined, we can tell now
701 whether and how we must alter the stack level. */
703 if (PREV_INSN (label) != 0)
705 /* Find the innermost pending block that contains the label.
706 (Check containment by comparing insn-uids.)
707 Then restore the outermost stack level within that block,
708 and do cleanups of all blocks contained in it. */
709 for (block = block_stack; block; block = block->next)
711 if (INSN_UID (block->data.block.first_insn) < INSN_UID (label))
712 break;
713 if (block->data.block.stack_level != 0)
714 stack_level = block->data.block.stack_level;
715 /* Execute the cleanups for blocks we are exiting. */
716 if (block->data.block.cleanups != 0)
718 expand_cleanups (block->data.block.cleanups, NULL_TREE, 1, 1);
719 do_pending_stack_adjust ();
723 if (stack_level)
725 /* Ensure stack adjust isn't done by emit_jump, as this
726 would clobber the stack pointer. This one should be
727 deleted as dead by flow. */
728 clear_pending_stack_adjust ();
729 do_pending_stack_adjust ();
731 /* Don't do this adjust if it's to the end label and this function
732 is to return with a depressed stack pointer. */
733 if (label == return_label
734 && (((TREE_CODE (TREE_TYPE (current_function_decl))
735 == FUNCTION_TYPE)
736 && (TYPE_RETURNS_STACK_DEPRESSED
737 (TREE_TYPE (current_function_decl))))))
739 else
740 emit_stack_restore (SAVE_BLOCK, stack_level, NULL_RTX);
743 if (body != 0 && DECL_TOO_LATE (body))
744 error ("jump to `%s' invalidly jumps into binding contour",
745 IDENTIFIER_POINTER (DECL_NAME (body)));
747 /* Label not yet defined: may need to put this goto
748 on the fixup list. */
749 else if (! expand_fixup (body, label, last_insn))
751 /* No fixup needed. Record that the label is the target
752 of at least one goto that has no fixup. */
753 if (body != 0)
754 TREE_ADDRESSABLE (body) = 1;
757 emit_jump (label);
760 /* Generate if necessary a fixup for a goto
761 whose target label in tree structure (if any) is TREE_LABEL
762 and whose target in rtl is RTL_LABEL.
764 If LAST_INSN is nonzero, we pretend that the jump appears
765 after insn LAST_INSN instead of at the current point in the insn stream.
767 The fixup will be used later to insert insns just before the goto.
768 Those insns will restore the stack level as appropriate for the
769 target label, and will (in the case of C++) also invoke any object
770 destructors which have to be invoked when we exit the scopes which
771 are exited by the goto.
773 Value is nonzero if a fixup is made. */
775 static int
776 expand_fixup (tree_label, rtl_label, last_insn)
777 tree tree_label;
778 rtx rtl_label;
779 rtx last_insn;
781 struct nesting *block, *end_block;
783 /* See if we can recognize which block the label will be output in.
784 This is possible in some very common cases.
785 If we succeed, set END_BLOCK to that block.
786 Otherwise, set it to 0. */
788 if (cond_stack
789 && (rtl_label == cond_stack->data.cond.endif_label
790 || rtl_label == cond_stack->data.cond.next_label))
791 end_block = cond_stack;
792 /* If we are in a loop, recognize certain labels which
793 are likely targets. This reduces the number of fixups
794 we need to create. */
795 else if (loop_stack
796 && (rtl_label == loop_stack->data.loop.start_label
797 || rtl_label == loop_stack->data.loop.end_label
798 || rtl_label == loop_stack->data.loop.continue_label))
799 end_block = loop_stack;
800 else
801 end_block = 0;
803 /* Now set END_BLOCK to the binding level to which we will return. */
805 if (end_block)
807 struct nesting *next_block = end_block->all;
808 block = block_stack;
810 /* First see if the END_BLOCK is inside the innermost binding level.
811 If so, then no cleanups or stack levels are relevant. */
812 while (next_block && next_block != block)
813 next_block = next_block->all;
815 if (next_block)
816 return 0;
818 /* Otherwise, set END_BLOCK to the innermost binding level
819 which is outside the relevant control-structure nesting. */
820 next_block = block_stack->next;
821 for (block = block_stack; block != end_block; block = block->all)
822 if (block == next_block)
823 next_block = next_block->next;
824 end_block = next_block;
827 /* Does any containing block have a stack level or cleanups?
828 If not, no fixup is needed, and that is the normal case
829 (the only case, for standard C). */
830 for (block = block_stack; block != end_block; block = block->next)
831 if (block->data.block.stack_level != 0
832 || block->data.block.cleanups != 0)
833 break;
835 if (block != end_block)
837 /* Ok, a fixup is needed. Add a fixup to the list of such. */
838 struct goto_fixup *fixup
839 = (struct goto_fixup *) ggc_alloc (sizeof (struct goto_fixup));
840 /* In case an old stack level is restored, make sure that comes
841 after any pending stack adjust. */
842 /* ?? If the fixup isn't to come at the present position,
843 doing the stack adjust here isn't useful. Doing it with our
844 settings at that location isn't useful either. Let's hope
845 someone does it! */
846 if (last_insn == 0)
847 do_pending_stack_adjust ();
848 fixup->target = tree_label;
849 fixup->target_rtl = rtl_label;
851 /* Create a BLOCK node and a corresponding matched set of
852 NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes at
853 this point. The notes will encapsulate any and all fixup
854 code which we might later insert at this point in the insn
855 stream. Also, the BLOCK node will be the parent (i.e. the
856 `SUPERBLOCK') of any other BLOCK nodes which we might create
857 later on when we are expanding the fixup code.
859 Note that optimization passes (including expand_end_loop)
860 might move the *_BLOCK notes away, so we use a NOTE_INSN_DELETED
861 as a placeholder. */
864 rtx original_before_jump
865 = last_insn ? last_insn : get_last_insn ();
866 rtx start;
867 rtx end;
868 tree block;
870 block = make_node (BLOCK);
871 TREE_USED (block) = 1;
873 if (!cfun->x_whole_function_mode_p)
874 (*lang_hooks.decls.insert_block) (block);
875 else
877 BLOCK_CHAIN (block)
878 = BLOCK_CHAIN (DECL_INITIAL (current_function_decl));
879 BLOCK_CHAIN (DECL_INITIAL (current_function_decl))
880 = block;
883 start_sequence ();
884 start = emit_note (NULL, NOTE_INSN_BLOCK_BEG);
885 if (cfun->x_whole_function_mode_p)
886 NOTE_BLOCK (start) = block;
887 fixup->before_jump = emit_note (NULL, NOTE_INSN_DELETED);
888 end = emit_note (NULL, NOTE_INSN_BLOCK_END);
889 if (cfun->x_whole_function_mode_p)
890 NOTE_BLOCK (end) = block;
891 fixup->context = block;
892 end_sequence ();
893 emit_insn_after (start, original_before_jump);
896 fixup->block_start_count = current_block_start_count;
897 fixup->stack_level = 0;
898 fixup->cleanup_list_list
899 = ((block->data.block.outer_cleanups
900 || block->data.block.cleanups)
901 ? tree_cons (NULL_TREE, block->data.block.cleanups,
902 block->data.block.outer_cleanups)
903 : 0);
904 fixup->next = goto_fixup_chain;
905 goto_fixup_chain = fixup;
908 return block != 0;
911 /* Expand any needed fixups in the outputmost binding level of the
912 function. FIRST_INSN is the first insn in the function. */
914 void
915 expand_fixups (first_insn)
916 rtx first_insn;
918 fixup_gotos (NULL, NULL_RTX, NULL_TREE, first_insn, 0);
921 /* When exiting a binding contour, process all pending gotos requiring fixups.
922 THISBLOCK is the structure that describes the block being exited.
923 STACK_LEVEL is the rtx for the stack level to restore exiting this contour.
924 CLEANUP_LIST is a list of expressions to evaluate on exiting this contour.
925 FIRST_INSN is the insn that began this contour.
927 Gotos that jump out of this contour must restore the
928 stack level and do the cleanups before actually jumping.
930 DONT_JUMP_IN nonzero means report error there is a jump into this
931 contour from before the beginning of the contour.
932 This is also done if STACK_LEVEL is nonzero. */
934 static void
935 fixup_gotos (thisblock, stack_level, cleanup_list, first_insn, dont_jump_in)
936 struct nesting *thisblock;
937 rtx stack_level;
938 tree cleanup_list;
939 rtx first_insn;
940 int dont_jump_in;
942 struct goto_fixup *f, *prev;
944 /* F is the fixup we are considering; PREV is the previous one. */
945 /* We run this loop in two passes so that cleanups of exited blocks
946 are run first, and blocks that are exited are marked so
947 afterwards. */
949 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
951 /* Test for a fixup that is inactive because it is already handled. */
952 if (f->before_jump == 0)
954 /* Delete inactive fixup from the chain, if that is easy to do. */
955 if (prev != 0)
956 prev->next = f->next;
958 /* Has this fixup's target label been defined?
959 If so, we can finalize it. */
960 else if (PREV_INSN (f->target_rtl) != 0)
962 rtx cleanup_insns;
964 /* If this fixup jumped into this contour from before the beginning
965 of this contour, report an error. This code used to use
966 the first non-label insn after f->target_rtl, but that's
967 wrong since such can be added, by things like put_var_into_stack
968 and have INSN_UIDs that are out of the range of the block. */
969 /* ??? Bug: this does not detect jumping in through intermediate
970 blocks that have stack levels or cleanups.
971 It detects only a problem with the innermost block
972 around the label. */
973 if (f->target != 0
974 && (dont_jump_in || stack_level || cleanup_list)
975 && INSN_UID (first_insn) < INSN_UID (f->target_rtl)
976 && INSN_UID (first_insn) > INSN_UID (f->before_jump)
977 && ! DECL_ERROR_ISSUED (f->target))
979 error_with_decl (f->target,
980 "label `%s' used before containing binding contour");
981 /* Prevent multiple errors for one label. */
982 DECL_ERROR_ISSUED (f->target) = 1;
985 /* We will expand the cleanups into a sequence of their own and
986 then later on we will attach this new sequence to the insn
987 stream just ahead of the actual jump insn. */
989 start_sequence ();
991 /* Temporarily restore the lexical context where we will
992 logically be inserting the fixup code. We do this for the
993 sake of getting the debugging information right. */
995 (*lang_hooks.decls.pushlevel) (0);
996 (*lang_hooks.decls.set_block) (f->context);
998 /* Expand the cleanups for blocks this jump exits. */
999 if (f->cleanup_list_list)
1001 tree lists;
1002 for (lists = f->cleanup_list_list; lists; lists = TREE_CHAIN (lists))
1003 /* Marked elements correspond to blocks that have been closed.
1004 Do their cleanups. */
1005 if (TREE_ADDRESSABLE (lists)
1006 && TREE_VALUE (lists) != 0)
1008 expand_cleanups (TREE_VALUE (lists), NULL_TREE, 1, 1);
1009 /* Pop any pushes done in the cleanups,
1010 in case function is about to return. */
1011 do_pending_stack_adjust ();
1015 /* Restore stack level for the biggest contour that this
1016 jump jumps out of. */
1017 if (f->stack_level
1018 && ! (f->target_rtl == return_label
1019 && ((TREE_CODE (TREE_TYPE (current_function_decl))
1020 == FUNCTION_TYPE)
1021 && (TYPE_RETURNS_STACK_DEPRESSED
1022 (TREE_TYPE (current_function_decl))))))
1023 emit_stack_restore (SAVE_BLOCK, f->stack_level, f->before_jump);
1025 /* Finish up the sequence containing the insns which implement the
1026 necessary cleanups, and then attach that whole sequence to the
1027 insn stream just ahead of the actual jump insn. Attaching it
1028 at that point insures that any cleanups which are in fact
1029 implicit C++ object destructions (which must be executed upon
1030 leaving the block) appear (to the debugger) to be taking place
1031 in an area of the generated code where the object(s) being
1032 destructed are still "in scope". */
1034 cleanup_insns = get_insns ();
1035 (*lang_hooks.decls.poplevel) (1, 0, 0);
1037 end_sequence ();
1038 emit_insn_after (cleanup_insns, f->before_jump);
1040 f->before_jump = 0;
1044 /* For any still-undefined labels, do the cleanups for this block now.
1045 We must do this now since items in the cleanup list may go out
1046 of scope when the block ends. */
1047 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1048 if (f->before_jump != 0
1049 && PREV_INSN (f->target_rtl) == 0
1050 /* Label has still not appeared. If we are exiting a block with
1051 a stack level to restore, that started before the fixup,
1052 mark this stack level as needing restoration
1053 when the fixup is later finalized. */
1054 && thisblock != 0
1055 /* Note: if THISBLOCK == 0 and we have a label that hasn't appeared, it
1056 means the label is undefined. That's erroneous, but possible. */
1057 && (thisblock->data.block.block_start_count
1058 <= f->block_start_count))
1060 tree lists = f->cleanup_list_list;
1061 rtx cleanup_insns;
1063 for (; lists; lists = TREE_CHAIN (lists))
1064 /* If the following elt. corresponds to our containing block
1065 then the elt. must be for this block. */
1066 if (TREE_CHAIN (lists) == thisblock->data.block.outer_cleanups)
1068 start_sequence ();
1069 (*lang_hooks.decls.pushlevel) (0);
1070 (*lang_hooks.decls.set_block) (f->context);
1071 expand_cleanups (TREE_VALUE (lists), NULL_TREE, 1, 1);
1072 do_pending_stack_adjust ();
1073 cleanup_insns = get_insns ();
1074 (*lang_hooks.decls.poplevel) (1, 0, 0);
1075 end_sequence ();
1076 if (cleanup_insns != 0)
1077 f->before_jump
1078 = emit_insn_after (cleanup_insns, f->before_jump);
1080 f->cleanup_list_list = TREE_CHAIN (lists);
1083 if (stack_level)
1084 f->stack_level = stack_level;
1088 /* Return the number of times character C occurs in string S. */
1089 static int
1090 n_occurrences (c, s)
1091 int c;
1092 const char *s;
1094 int n = 0;
1095 while (*s)
1096 n += (*s++ == c);
1097 return n;
1100 /* Generate RTL for an asm statement (explicit assembler code).
1101 STRING is a STRING_CST node containing the assembler code text,
1102 or an ADDR_EXPR containing a STRING_CST. VOL nonzero means the
1103 insn is volatile; don't optimize it. */
1105 void
1106 expand_asm (string, vol)
1107 tree string;
1108 int vol;
1110 rtx body;
1112 if (TREE_CODE (string) == ADDR_EXPR)
1113 string = TREE_OPERAND (string, 0);
1115 body = gen_rtx_ASM_INPUT (VOIDmode, TREE_STRING_POINTER (string));
1117 MEM_VOLATILE_P (body) = vol;
1119 emit_insn (body);
1121 clear_last_expr ();
1124 /* Parse the output constraint pointed to by *CONSTRAINT_P. It is the
1125 OPERAND_NUMth output operand, indexed from zero. There are NINPUTS
1126 inputs and NOUTPUTS outputs to this extended-asm. Upon return,
1127 *ALLOWS_MEM will be TRUE iff the constraint allows the use of a
1128 memory operand. Similarly, *ALLOWS_REG will be TRUE iff the
1129 constraint allows the use of a register operand. And, *IS_INOUT
1130 will be true if the operand is read-write, i.e., if it is used as
1131 an input as well as an output. If *CONSTRAINT_P is not in
1132 canonical form, it will be made canonical. (Note that `+' will be
1133 replaced with `=' as part of this process.)
1135 Returns TRUE if all went well; FALSE if an error occurred. */
1137 bool
1138 parse_output_constraint (constraint_p, operand_num, ninputs, noutputs,
1139 allows_mem, allows_reg, is_inout)
1140 const char **constraint_p;
1141 int operand_num;
1142 int ninputs;
1143 int noutputs;
1144 bool *allows_mem;
1145 bool *allows_reg;
1146 bool *is_inout;
1148 const char *constraint = *constraint_p;
1149 const char *p;
1151 /* Assume the constraint doesn't allow the use of either a register
1152 or memory. */
1153 *allows_mem = false;
1154 *allows_reg = false;
1156 /* Allow the `=' or `+' to not be at the beginning of the string,
1157 since it wasn't explicitly documented that way, and there is a
1158 large body of code that puts it last. Swap the character to
1159 the front, so as not to uglify any place else. */
1160 p = strchr (constraint, '=');
1161 if (!p)
1162 p = strchr (constraint, '+');
1164 /* If the string doesn't contain an `=', issue an error
1165 message. */
1166 if (!p)
1168 error ("output operand constraint lacks `='");
1169 return false;
1172 /* If the constraint begins with `+', then the operand is both read
1173 from and written to. */
1174 *is_inout = (*p == '+');
1176 /* Canonicalize the output constraint so that it begins with `='. */
1177 if (p != constraint || is_inout)
1179 char *buf;
1180 size_t c_len = strlen (constraint);
1182 if (p != constraint)
1183 warning ("output constraint `%c' for operand %d is not at the beginning",
1184 *p, operand_num);
1186 /* Make a copy of the constraint. */
1187 buf = alloca (c_len + 1);
1188 strcpy (buf, constraint);
1189 /* Swap the first character and the `=' or `+'. */
1190 buf[p - constraint] = buf[0];
1191 /* Make sure the first character is an `='. (Until we do this,
1192 it might be a `+'.) */
1193 buf[0] = '=';
1194 /* Replace the constraint with the canonicalized string. */
1195 *constraint_p = ggc_alloc_string (buf, c_len);
1196 constraint = *constraint_p;
1199 /* Loop through the constraint string. */
1200 for (p = constraint + 1; *p; p += CONSTRAINT_LEN (*p, p))
1201 switch (*p)
1203 case '+':
1204 case '=':
1205 error ("operand constraint contains incorrectly positioned '+' or '='");
1206 return false;
1208 case '%':
1209 if (operand_num + 1 == ninputs + noutputs)
1211 error ("`%%' constraint used with last operand");
1212 return false;
1214 break;
1216 case 'V': case 'm': case 'o':
1217 *allows_mem = true;
1218 break;
1220 case '?': case '!': case '*': case '&': case '#':
1221 case 'E': case 'F': case 'G': case 'H':
1222 case 's': case 'i': case 'n':
1223 case 'I': case 'J': case 'K': case 'L': case 'M':
1224 case 'N': case 'O': case 'P': case ',':
1225 break;
1227 case '0': case '1': case '2': case '3': case '4':
1228 case '5': case '6': case '7': case '8': case '9':
1229 case '[':
1230 error ("matching constraint not valid in output operand");
1231 return false;
1233 case '<': case '>':
1234 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
1235 excepting those that expand_call created. So match memory
1236 and hope. */
1237 *allows_mem = true;
1238 break;
1240 case 'g': case 'X':
1241 *allows_reg = true;
1242 *allows_mem = true;
1243 break;
1245 case 'p': case 'r':
1246 *allows_reg = true;
1247 break;
1249 default:
1250 if (!ISALPHA (*p))
1251 break;
1252 if (REG_CLASS_FROM_CONSTRAINT (*p, p) != NO_REGS)
1253 *allows_reg = true;
1254 #ifdef EXTRA_CONSTRAINT_STR
1255 else if (EXTRA_ADDRESS_CONSTRAINT (*p, p))
1256 *allows_reg = true;
1257 else if (EXTRA_MEMORY_CONSTRAINT (*p, p))
1258 *allows_mem = true;
1259 else
1261 /* Otherwise we can't assume anything about the nature of
1262 the constraint except that it isn't purely registers.
1263 Treat it like "g" and hope for the best. */
1264 *allows_reg = true;
1265 *allows_mem = true;
1267 #endif
1268 break;
1271 return true;
1274 /* Similar, but for input constraints. */
1276 static bool
1277 parse_input_constraint (constraint_p, input_num, ninputs, noutputs, ninout,
1278 constraints, allows_mem, allows_reg)
1279 const char **constraint_p;
1280 int input_num;
1281 int ninputs;
1282 int noutputs;
1283 int ninout;
1284 const char * const * constraints;
1285 bool *allows_mem;
1286 bool *allows_reg;
1288 const char *constraint = *constraint_p;
1289 const char *orig_constraint = constraint;
1290 size_t c_len = strlen (constraint);
1291 size_t j;
1293 /* Assume the constraint doesn't allow the use of either
1294 a register or memory. */
1295 *allows_mem = false;
1296 *allows_reg = false;
1298 /* Make sure constraint has neither `=', `+', nor '&'. */
1300 for (j = 0; j < c_len; j += CONSTRAINT_LEN (constraint[j], constraint+j))
1301 switch (constraint[j])
1303 case '+': case '=': case '&':
1304 if (constraint == orig_constraint)
1306 error ("input operand constraint contains `%c'", constraint[j]);
1307 return false;
1309 break;
1311 case '%':
1312 if (constraint == orig_constraint
1313 && input_num + 1 == ninputs - ninout)
1315 error ("`%%' constraint used with last operand");
1316 return false;
1318 break;
1320 case 'V': case 'm': case 'o':
1321 *allows_mem = true;
1322 break;
1324 case '<': case '>':
1325 case '?': case '!': case '*': case '#':
1326 case 'E': case 'F': case 'G': case 'H':
1327 case 's': case 'i': case 'n':
1328 case 'I': case 'J': case 'K': case 'L': case 'M':
1329 case 'N': case 'O': case 'P': case ',':
1330 break;
1332 /* Whether or not a numeric constraint allows a register is
1333 decided by the matching constraint, and so there is no need
1334 to do anything special with them. We must handle them in
1335 the default case, so that we don't unnecessarily force
1336 operands to memory. */
1337 case '0': case '1': case '2': case '3': case '4':
1338 case '5': case '6': case '7': case '8': case '9':
1340 char *end;
1341 unsigned long match;
1343 match = strtoul (constraint + j, &end, 10);
1344 if (match >= (unsigned long) noutputs)
1346 error ("matching constraint references invalid operand number");
1347 return false;
1350 /* Try and find the real constraint for this dup. Only do this
1351 if the matching constraint is the only alternative. */
1352 if (*end == '\0'
1353 && (j == 0 || (j == 1 && constraint[0] == '%')))
1355 constraint = constraints[match];
1356 *constraint_p = constraint;
1357 c_len = strlen (constraint);
1358 j = 0;
1359 /* ??? At the end of the loop, we will skip the first part of
1360 the matched constraint. This assumes not only that the
1361 other constraint is an output constraint, but also that
1362 the '=' or '+' come first. */
1363 break;
1365 else
1366 j = end - constraint;
1367 /* Anticipate increment at end of loop. */
1368 j--;
1370 /* Fall through. */
1372 case 'p': case 'r':
1373 *allows_reg = true;
1374 break;
1376 case 'g': case 'X':
1377 *allows_reg = true;
1378 *allows_mem = true;
1379 break;
1381 default:
1382 if (! ISALPHA (constraint[j]))
1384 error ("invalid punctuation `%c' in constraint", constraint[j]);
1385 return false;
1387 if (REG_CLASS_FROM_CONSTRAINT (constraint[j], constraint + j)
1388 != NO_REGS)
1389 *allows_reg = true;
1390 #ifdef EXTRA_CONSTRAINT_STR
1391 else if (EXTRA_ADDRESS_CONSTRAINT (constraint[j], constraint + j))
1392 *allows_reg = true;
1393 else if (EXTRA_MEMORY_CONSTRAINT (constraint[j], constraint + j))
1394 *allows_mem = true;
1395 else
1397 /* Otherwise we can't assume anything about the nature of
1398 the constraint except that it isn't purely registers.
1399 Treat it like "g" and hope for the best. */
1400 *allows_reg = true;
1401 *allows_mem = true;
1403 #endif
1404 break;
1407 return true;
1410 /* Check for overlap between registers marked in CLOBBERED_REGS and
1411 anything inappropriate in DECL. Emit error and return TRUE for error,
1412 FALSE for ok. */
1414 static bool
1415 decl_conflicts_with_clobbers_p (decl, clobbered_regs)
1416 tree decl;
1417 const HARD_REG_SET clobbered_regs;
1419 /* Conflicts between asm-declared register variables and the clobber
1420 list are not allowed. */
1421 if ((TREE_CODE (decl) == VAR_DECL || TREE_CODE (decl) == PARM_DECL)
1422 && DECL_REGISTER (decl)
1423 && REG_P (DECL_RTL (decl))
1424 && REGNO (DECL_RTL (decl)) < FIRST_PSEUDO_REGISTER)
1426 rtx reg = DECL_RTL (decl);
1427 unsigned int regno;
1429 for (regno = REGNO (reg);
1430 regno < (REGNO (reg)
1431 + HARD_REGNO_NREGS (REGNO (reg), GET_MODE (reg)));
1432 regno++)
1433 if (TEST_HARD_REG_BIT (clobbered_regs, regno))
1435 error ("asm-specifier for variable `%s' conflicts with asm clobber list",
1436 IDENTIFIER_POINTER (DECL_NAME (decl)));
1438 /* Reset registerness to stop multiple errors emitted for a
1439 single variable. */
1440 DECL_REGISTER (decl) = 0;
1441 return true;
1444 return false;
1447 /* Generate RTL for an asm statement with arguments.
1448 STRING is the instruction template.
1449 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
1450 Each output or input has an expression in the TREE_VALUE and
1451 and a tree list in TREE_PURPOSE which in turn contains a constraint
1452 name in TREE_VALUE (or NULL_TREE) and a constraint string
1453 in TREE_PURPOSE.
1454 CLOBBERS is a list of STRING_CST nodes each naming a hard register
1455 that is clobbered by this insn.
1457 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
1458 Some elements of OUTPUTS may be replaced with trees representing temporary
1459 values. The caller should copy those temporary values to the originally
1460 specified lvalues.
1462 VOL nonzero means the insn is volatile; don't optimize it. */
1464 void
1465 expand_asm_operands (string, outputs, inputs, clobbers, vol, filename, line)
1466 tree string, outputs, inputs, clobbers;
1467 int vol;
1468 const char *filename;
1469 int line;
1471 rtvec argvec, constraintvec;
1472 rtx body;
1473 int ninputs = list_length (inputs);
1474 int noutputs = list_length (outputs);
1475 int ninout;
1476 int nclobbers;
1477 HARD_REG_SET clobbered_regs;
1478 int clobber_conflict_found = 0;
1479 tree tail;
1480 int i;
1481 /* Vector of RTX's of evaluated output operands. */
1482 rtx *output_rtx = (rtx *) alloca (noutputs * sizeof (rtx));
1483 int *inout_opnum = (int *) alloca (noutputs * sizeof (int));
1484 rtx *real_output_rtx = (rtx *) alloca (noutputs * sizeof (rtx));
1485 enum machine_mode *inout_mode
1486 = (enum machine_mode *) alloca (noutputs * sizeof (enum machine_mode));
1487 const char **constraints
1488 = (const char **) alloca ((noutputs + ninputs) * sizeof (const char *));
1489 int old_generating_concat_p = generating_concat_p;
1491 /* An ASM with no outputs needs to be treated as volatile, for now. */
1492 if (noutputs == 0)
1493 vol = 1;
1495 if (! check_operand_nalternatives (outputs, inputs))
1496 return;
1498 if (! check_unique_operand_names (outputs, inputs))
1499 return;
1501 string = resolve_operand_names (string, outputs, inputs, constraints);
1503 #ifdef MD_ASM_CLOBBERS
1504 /* Sometimes we wish to automatically clobber registers across an asm.
1505 Case in point is when the i386 backend moved from cc0 to a hard reg --
1506 maintaining source-level compatibility means automatically clobbering
1507 the flags register. */
1508 MD_ASM_CLOBBERS (clobbers);
1509 #endif
1511 /* Count the number of meaningful clobbered registers, ignoring what
1512 we would ignore later. */
1513 nclobbers = 0;
1514 CLEAR_HARD_REG_SET (clobbered_regs);
1515 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1517 const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1519 i = decode_reg_name (regname);
1520 if (i >= 0 || i == -4)
1521 ++nclobbers;
1522 else if (i == -2)
1523 error ("unknown register name `%s' in `asm'", regname);
1525 /* Mark clobbered registers. */
1526 if (i >= 0)
1528 /* Clobbering the PIC register is an error */
1529 if (i == (int) PIC_OFFSET_TABLE_REGNUM)
1531 error ("PIC register `%s' clobbered in `asm'", regname);
1532 return;
1535 SET_HARD_REG_BIT (clobbered_regs, i);
1539 clear_last_expr ();
1541 /* First pass over inputs and outputs checks validity and sets
1542 mark_addressable if needed. */
1544 ninout = 0;
1545 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1547 tree val = TREE_VALUE (tail);
1548 tree type = TREE_TYPE (val);
1549 const char *constraint;
1550 bool is_inout;
1551 bool allows_reg;
1552 bool allows_mem;
1554 /* If there's an erroneous arg, emit no insn. */
1555 if (type == error_mark_node)
1556 return;
1558 /* Try to parse the output constraint. If that fails, there's
1559 no point in going further. */
1560 constraint = constraints[i];
1561 if (!parse_output_constraint (&constraint, i, ninputs, noutputs,
1562 &allows_mem, &allows_reg, &is_inout))
1563 return;
1565 if (! allows_reg
1566 && (allows_mem
1567 || is_inout
1568 || (DECL_P (val)
1569 && GET_CODE (DECL_RTL (val)) == REG
1570 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type))))
1571 (*lang_hooks.mark_addressable) (val);
1573 if (is_inout)
1574 ninout++;
1577 ninputs += ninout;
1578 if (ninputs + noutputs > MAX_RECOG_OPERANDS)
1580 error ("more than %d operands in `asm'", MAX_RECOG_OPERANDS);
1581 return;
1584 for (i = 0, tail = inputs; tail; i++, tail = TREE_CHAIN (tail))
1586 bool allows_reg, allows_mem;
1587 const char *constraint;
1589 /* If there's an erroneous arg, emit no insn, because the ASM_INPUT
1590 would get VOIDmode and that could cause a crash in reload. */
1591 if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
1592 return;
1594 constraint = constraints[i + noutputs];
1595 if (! parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
1596 constraints, &allows_mem, &allows_reg))
1597 return;
1599 if (! allows_reg && allows_mem)
1600 (*lang_hooks.mark_addressable) (TREE_VALUE (tail));
1603 /* Second pass evaluates arguments. */
1605 ninout = 0;
1606 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1608 tree val = TREE_VALUE (tail);
1609 tree type = TREE_TYPE (val);
1610 bool is_inout;
1611 bool allows_reg;
1612 bool allows_mem;
1614 if (!parse_output_constraint (&constraints[i], i, ninputs,
1615 noutputs, &allows_mem, &allows_reg,
1616 &is_inout))
1617 abort ();
1619 /* If an output operand is not a decl or indirect ref and our constraint
1620 allows a register, make a temporary to act as an intermediate.
1621 Make the asm insn write into that, then our caller will copy it to
1622 the real output operand. Likewise for promoted variables. */
1624 generating_concat_p = 0;
1626 real_output_rtx[i] = NULL_RTX;
1627 if ((TREE_CODE (val) == INDIRECT_REF
1628 && allows_mem)
1629 || (DECL_P (val)
1630 && (allows_mem || GET_CODE (DECL_RTL (val)) == REG)
1631 && ! (GET_CODE (DECL_RTL (val)) == REG
1632 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
1633 || ! allows_reg
1634 || is_inout)
1636 output_rtx[i] = expand_expr (val, NULL_RTX, VOIDmode, EXPAND_WRITE);
1638 if (! allows_reg && GET_CODE (output_rtx[i]) != MEM)
1639 error ("output number %d not directly addressable", i);
1640 if ((! allows_mem && GET_CODE (output_rtx[i]) == MEM)
1641 || GET_CODE (output_rtx[i]) == CONCAT)
1643 real_output_rtx[i] = protect_from_queue (output_rtx[i], 1);
1644 output_rtx[i] = gen_reg_rtx (GET_MODE (output_rtx[i]));
1645 if (is_inout)
1646 emit_move_insn (output_rtx[i], real_output_rtx[i]);
1649 else
1651 output_rtx[i] = assign_temp (type, 0, 0, 1);
1652 TREE_VALUE (tail) = make_tree (type, output_rtx[i]);
1655 generating_concat_p = old_generating_concat_p;
1657 if (is_inout)
1659 inout_mode[ninout] = TYPE_MODE (type);
1660 inout_opnum[ninout++] = i;
1663 if (decl_conflicts_with_clobbers_p (val, clobbered_regs))
1664 clobber_conflict_found = 1;
1667 /* Make vectors for the expression-rtx, constraint strings,
1668 and named operands. */
1670 argvec = rtvec_alloc (ninputs);
1671 constraintvec = rtvec_alloc (ninputs);
1673 body = gen_rtx_ASM_OPERANDS ((noutputs == 0 ? VOIDmode
1674 : GET_MODE (output_rtx[0])),
1675 TREE_STRING_POINTER (string),
1676 empty_string, 0, argvec, constraintvec,
1677 filename, line);
1679 MEM_VOLATILE_P (body) = vol;
1681 /* Eval the inputs and put them into ARGVEC.
1682 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
1684 for (i = 0, tail = inputs; tail; tail = TREE_CHAIN (tail), ++i)
1686 bool allows_reg, allows_mem;
1687 const char *constraint;
1688 tree val, type;
1689 rtx op;
1691 constraint = constraints[i + noutputs];
1692 if (! parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
1693 constraints, &allows_mem, &allows_reg))
1694 abort ();
1696 generating_concat_p = 0;
1698 val = TREE_VALUE (tail);
1699 type = TREE_TYPE (val);
1700 op = expand_expr (val, NULL_RTX, VOIDmode, 0);
1702 /* Never pass a CONCAT to an ASM. */
1703 if (GET_CODE (op) == CONCAT)
1704 op = force_reg (GET_MODE (op), op);
1706 if (asm_operand_ok (op, constraint) <= 0)
1708 if (allows_reg)
1709 op = force_reg (TYPE_MODE (type), op);
1710 else if (!allows_mem)
1711 warning ("asm operand %d probably doesn't match constraints",
1712 i + noutputs);
1713 else if (CONSTANT_P (op))
1714 op = force_const_mem (TYPE_MODE (type), op);
1715 else if (GET_CODE (op) == REG
1716 || GET_CODE (op) == SUBREG
1717 || GET_CODE (op) == ADDRESSOF
1718 || GET_CODE (op) == CONCAT)
1720 tree qual_type = build_qualified_type (type,
1721 (TYPE_QUALS (type)
1722 | TYPE_QUAL_CONST));
1723 rtx memloc = assign_temp (qual_type, 1, 1, 1);
1725 emit_move_insn (memloc, op);
1726 op = memloc;
1729 else if (GET_CODE (op) == MEM && MEM_VOLATILE_P (op))
1731 /* We won't recognize volatile memory as available a
1732 memory_operand at this point. Ignore it. */
1734 else if (queued_subexp_p (op))
1736 else
1737 /* ??? Leave this only until we have experience with what
1738 happens in combine and elsewhere when constraints are
1739 not satisfied. */
1740 warning ("asm operand %d probably doesn't match constraints",
1741 i + noutputs);
1744 generating_concat_p = old_generating_concat_p;
1745 ASM_OPERANDS_INPUT (body, i) = op;
1747 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, i)
1748 = gen_rtx_ASM_INPUT (TYPE_MODE (type), constraints[i + noutputs]);
1750 if (decl_conflicts_with_clobbers_p (val, clobbered_regs))
1751 clobber_conflict_found = 1;
1754 /* Protect all the operands from the queue now that they have all been
1755 evaluated. */
1757 generating_concat_p = 0;
1759 for (i = 0; i < ninputs - ninout; i++)
1760 ASM_OPERANDS_INPUT (body, i)
1761 = protect_from_queue (ASM_OPERANDS_INPUT (body, i), 0);
1763 for (i = 0; i < noutputs; i++)
1764 output_rtx[i] = protect_from_queue (output_rtx[i], 1);
1766 /* For in-out operands, copy output rtx to input rtx. */
1767 for (i = 0; i < ninout; i++)
1769 int j = inout_opnum[i];
1770 char buffer[16];
1772 ASM_OPERANDS_INPUT (body, ninputs - ninout + i)
1773 = output_rtx[j];
1775 sprintf (buffer, "%d", j);
1776 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, ninputs - ninout + i)
1777 = gen_rtx_ASM_INPUT (inout_mode[i], ggc_alloc_string (buffer, -1));
1780 generating_concat_p = old_generating_concat_p;
1782 /* Now, for each output, construct an rtx
1783 (set OUTPUT (asm_operands INSN OUTPUTCONSTRAINT OUTPUTNUMBER
1784 ARGVEC CONSTRAINTS OPNAMES))
1785 If there is more than one, put them inside a PARALLEL. */
1787 if (noutputs == 1 && nclobbers == 0)
1789 ASM_OPERANDS_OUTPUT_CONSTRAINT (body) = constraints[0];
1790 emit_insn (gen_rtx_SET (VOIDmode, output_rtx[0], body));
1793 else if (noutputs == 0 && nclobbers == 0)
1795 /* No output operands: put in a raw ASM_OPERANDS rtx. */
1796 emit_insn (body);
1799 else
1801 rtx obody = body;
1802 int num = noutputs;
1804 if (num == 0)
1805 num = 1;
1807 body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
1809 /* For each output operand, store a SET. */
1810 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1812 XVECEXP (body, 0, i)
1813 = gen_rtx_SET (VOIDmode,
1814 output_rtx[i],
1815 gen_rtx_ASM_OPERANDS
1816 (GET_MODE (output_rtx[i]),
1817 TREE_STRING_POINTER (string),
1818 constraints[i], i, argvec, constraintvec,
1819 filename, line));
1821 MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
1824 /* If there are no outputs (but there are some clobbers)
1825 store the bare ASM_OPERANDS into the PARALLEL. */
1827 if (i == 0)
1828 XVECEXP (body, 0, i++) = obody;
1830 /* Store (clobber REG) for each clobbered register specified. */
1832 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1834 const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1835 int j = decode_reg_name (regname);
1836 rtx clobbered_reg;
1838 if (j < 0)
1840 if (j == -3) /* `cc', which is not a register */
1841 continue;
1843 if (j == -4) /* `memory', don't cache memory across asm */
1845 XVECEXP (body, 0, i++)
1846 = gen_rtx_CLOBBER (VOIDmode,
1847 gen_rtx_MEM
1848 (BLKmode,
1849 gen_rtx_SCRATCH (VOIDmode)));
1850 continue;
1853 /* Ignore unknown register, error already signaled. */
1854 continue;
1857 /* Use QImode since that's guaranteed to clobber just one reg. */
1858 clobbered_reg = gen_rtx_REG (QImode, j);
1860 /* Do sanity check for overlap between clobbers and respectively
1861 input and outputs that hasn't been handled. Such overlap
1862 should have been detected and reported above. */
1863 if (!clobber_conflict_found)
1865 int opno;
1867 /* We test the old body (obody) contents to avoid tripping
1868 over the under-construction body. */
1869 for (opno = 0; opno < noutputs; opno++)
1870 if (reg_overlap_mentioned_p (clobbered_reg, output_rtx[opno]))
1871 internal_error ("asm clobber conflict with output operand");
1873 for (opno = 0; opno < ninputs - ninout; opno++)
1874 if (reg_overlap_mentioned_p (clobbered_reg,
1875 ASM_OPERANDS_INPUT (obody, opno)))
1876 internal_error ("asm clobber conflict with input operand");
1879 XVECEXP (body, 0, i++)
1880 = gen_rtx_CLOBBER (VOIDmode, clobbered_reg);
1883 emit_insn (body);
1886 /* For any outputs that needed reloading into registers, spill them
1887 back to where they belong. */
1888 for (i = 0; i < noutputs; ++i)
1889 if (real_output_rtx[i])
1890 emit_move_insn (real_output_rtx[i], output_rtx[i]);
1892 free_temp_slots ();
1895 /* A subroutine of expand_asm_operands. Check that all operands have
1896 the same number of alternatives. Return true if so. */
1898 static bool
1899 check_operand_nalternatives (outputs, inputs)
1900 tree outputs, inputs;
1902 if (outputs || inputs)
1904 tree tmp = TREE_PURPOSE (outputs ? outputs : inputs);
1905 int nalternatives
1906 = n_occurrences (',', TREE_STRING_POINTER (TREE_VALUE (tmp)));
1907 tree next = inputs;
1909 if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
1911 error ("too many alternatives in `asm'");
1912 return false;
1915 tmp = outputs;
1916 while (tmp)
1918 const char *constraint
1919 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (tmp)));
1921 if (n_occurrences (',', constraint) != nalternatives)
1923 error ("operand constraints for `asm' differ in number of alternatives");
1924 return false;
1927 if (TREE_CHAIN (tmp))
1928 tmp = TREE_CHAIN (tmp);
1929 else
1930 tmp = next, next = 0;
1934 return true;
1937 /* A subroutine of expand_asm_operands. Check that all operand names
1938 are unique. Return true if so. We rely on the fact that these names
1939 are identifiers, and so have been canonicalized by get_identifier,
1940 so all we need are pointer comparisons. */
1942 static bool
1943 check_unique_operand_names (outputs, inputs)
1944 tree outputs, inputs;
1946 tree i, j;
1948 for (i = outputs; i ; i = TREE_CHAIN (i))
1950 tree i_name = TREE_PURPOSE (TREE_PURPOSE (i));
1951 if (! i_name)
1952 continue;
1954 for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
1955 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1956 goto failure;
1959 for (i = inputs; i ; i = TREE_CHAIN (i))
1961 tree i_name = TREE_PURPOSE (TREE_PURPOSE (i));
1962 if (! i_name)
1963 continue;
1965 for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
1966 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1967 goto failure;
1968 for (j = outputs; j ; j = TREE_CHAIN (j))
1969 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1970 goto failure;
1973 return true;
1975 failure:
1976 error ("duplicate asm operand name '%s'",
1977 TREE_STRING_POINTER (TREE_PURPOSE (TREE_PURPOSE (i))));
1978 return false;
1981 /* A subroutine of expand_asm_operands. Resolve the names of the operands
1982 in *POUTPUTS and *PINPUTS to numbers, and replace the name expansions in
1983 STRING and in the constraints to those numbers. */
1985 static tree
1986 resolve_operand_names (string, outputs, inputs, pconstraints)
1987 tree string;
1988 tree outputs, inputs;
1989 const char **pconstraints;
1991 char *buffer = xstrdup (TREE_STRING_POINTER (string));
1992 char *p;
1993 tree t;
1995 /* Assume that we will not need extra space to perform the substitution.
1996 This because we get to remove '[' and ']', which means we cannot have
1997 a problem until we have more than 999 operands. */
1999 p = buffer;
2000 while ((p = strchr (p, '%')) != NULL)
2002 if (p[1] == '[')
2003 p += 1;
2004 else if (ISALPHA (p[1]) && p[2] == '[')
2005 p += 2;
2006 else
2008 p += 1;
2009 continue;
2012 p = resolve_operand_name_1 (p, outputs, inputs);
2015 string = build_string (strlen (buffer), buffer);
2016 free (buffer);
2018 /* Collect output constraints here because it's convenient.
2019 There should be no named operands here; this is verified
2020 in expand_asm_operand. */
2021 for (t = outputs; t ; t = TREE_CHAIN (t), pconstraints++)
2022 *pconstraints = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
2024 /* Substitute [<name>] in input constraint strings. */
2025 for (t = inputs; t ; t = TREE_CHAIN (t), pconstraints++)
2027 const char *c = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
2028 if (strchr (c, '[') == NULL)
2029 *pconstraints = c;
2030 else
2032 p = buffer = xstrdup (c);
2033 while ((p = strchr (p, '[')) != NULL)
2034 p = resolve_operand_name_1 (p, outputs, inputs);
2036 *pconstraints = ggc_alloc_string (buffer, -1);
2037 free (buffer);
2041 return string;
2044 /* A subroutine of resolve_operand_names. P points to the '[' for a
2045 potential named operand of the form [<name>]. In place, replace
2046 the name and brackets with a number. Return a pointer to the
2047 balance of the string after substitution. */
2049 static char *
2050 resolve_operand_name_1 (p, outputs, inputs)
2051 char *p;
2052 tree outputs, inputs;
2054 char *q;
2055 int op;
2056 tree t;
2057 size_t len;
2059 /* Collect the operand name. */
2060 q = strchr (p, ']');
2061 if (!q)
2063 error ("missing close brace for named operand");
2064 return strchr (p, '\0');
2066 len = q - p - 1;
2068 /* Resolve the name to a number. */
2069 for (op = 0, t = outputs; t ; t = TREE_CHAIN (t), op++)
2071 tree name = TREE_PURPOSE (TREE_PURPOSE (t));
2072 if (name)
2074 const char *c = TREE_STRING_POINTER (name);
2075 if (strncmp (c, p + 1, len) == 0 && c[len] == '\0')
2076 goto found;
2079 for (t = inputs; t ; t = TREE_CHAIN (t), op++)
2081 tree name = TREE_PURPOSE (TREE_PURPOSE (t));
2082 if (name)
2084 const char *c = TREE_STRING_POINTER (name);
2085 if (strncmp (c, p + 1, len) == 0 && c[len] == '\0')
2086 goto found;
2090 *q = '\0';
2091 error ("undefined named operand '%s'", p + 1);
2092 op = 0;
2093 found:
2095 /* Replace the name with the number. Unfortunately, not all libraries
2096 get the return value of sprintf correct, so search for the end of the
2097 generated string by hand. */
2098 sprintf (p, "%d", op);
2099 p = strchr (p, '\0');
2101 /* Verify the no extra buffer space assumption. */
2102 if (p > q)
2103 abort ();
2105 /* Shift the rest of the buffer down to fill the gap. */
2106 memmove (p, q + 1, strlen (q + 1) + 1);
2108 return p;
2111 /* Generate RTL to evaluate the expression EXP
2112 and remember it in case this is the VALUE in a ({... VALUE; }) constr.
2113 Provided just for backward-compatibility. expand_expr_stmt_value()
2114 should be used for new code. */
2116 void
2117 expand_expr_stmt (exp)
2118 tree exp;
2120 expand_expr_stmt_value (exp, -1, 1);
2123 /* Generate RTL to evaluate the expression EXP. WANT_VALUE tells
2124 whether to (1) save the value of the expression, (0) discard it or
2125 (-1) use expr_stmts_for_value to tell. The use of -1 is
2126 deprecated, and retained only for backward compatibility. */
2128 void
2129 expand_expr_stmt_value (exp, want_value, maybe_last)
2130 tree exp;
2131 int want_value, maybe_last;
2133 rtx value;
2134 tree type;
2136 if (want_value == -1)
2137 want_value = expr_stmts_for_value != 0;
2139 /* If -W, warn about statements with no side effects,
2140 except for an explicit cast to void (e.g. for assert()), and
2141 except for last statement in ({...}) where they may be useful. */
2142 if (! want_value
2143 && (expr_stmts_for_value == 0 || ! maybe_last)
2144 && exp != error_mark_node)
2146 if (! TREE_SIDE_EFFECTS (exp))
2148 if ((extra_warnings || warn_unused_value)
2149 && !(TREE_CODE (exp) == CONVERT_EXPR
2150 && VOID_TYPE_P (TREE_TYPE (exp))))
2151 warning_with_file_and_line (emit_filename, emit_lineno,
2152 "statement with no effect");
2154 else if (warn_unused_value)
2155 warn_if_unused_value (exp);
2158 /* If EXP is of function type and we are expanding statements for
2159 value, convert it to pointer-to-function. */
2160 if (want_value && TREE_CODE (TREE_TYPE (exp)) == FUNCTION_TYPE)
2161 exp = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (exp)), exp);
2163 /* The call to `expand_expr' could cause last_expr_type and
2164 last_expr_value to get reset. Therefore, we set last_expr_value
2165 and last_expr_type *after* calling expand_expr. */
2166 value = expand_expr (exp, want_value ? NULL_RTX : const0_rtx,
2167 VOIDmode, 0);
2168 type = TREE_TYPE (exp);
2170 /* If all we do is reference a volatile value in memory,
2171 copy it to a register to be sure it is actually touched. */
2172 if (value && GET_CODE (value) == MEM && TREE_THIS_VOLATILE (exp))
2174 if (TYPE_MODE (type) == VOIDmode)
2176 else if (TYPE_MODE (type) != BLKmode)
2177 value = copy_to_reg (value);
2178 else
2180 rtx lab = gen_label_rtx ();
2182 /* Compare the value with itself to reference it. */
2183 emit_cmp_and_jump_insns (value, value, EQ,
2184 expand_expr (TYPE_SIZE (type),
2185 NULL_RTX, VOIDmode, 0),
2186 BLKmode, 0, lab);
2187 emit_label (lab);
2191 /* If this expression is part of a ({...}) and is in memory, we may have
2192 to preserve temporaries. */
2193 preserve_temp_slots (value);
2195 /* Free any temporaries used to evaluate this expression. Any temporary
2196 used as a result of this expression will already have been preserved
2197 above. */
2198 free_temp_slots ();
2200 if (want_value)
2202 last_expr_value = value;
2203 last_expr_type = type;
2206 emit_queue ();
2209 /* Warn if EXP contains any computations whose results are not used.
2210 Return 1 if a warning is printed; 0 otherwise. */
2213 warn_if_unused_value (exp)
2214 tree exp;
2216 if (TREE_USED (exp))
2217 return 0;
2219 /* Don't warn about void constructs. This includes casting to void,
2220 void function calls, and statement expressions with a final cast
2221 to void. */
2222 if (VOID_TYPE_P (TREE_TYPE (exp)))
2223 return 0;
2225 switch (TREE_CODE (exp))
2227 case PREINCREMENT_EXPR:
2228 case POSTINCREMENT_EXPR:
2229 case PREDECREMENT_EXPR:
2230 case POSTDECREMENT_EXPR:
2231 case MODIFY_EXPR:
2232 case INIT_EXPR:
2233 case TARGET_EXPR:
2234 case CALL_EXPR:
2235 case METHOD_CALL_EXPR:
2236 case RTL_EXPR:
2237 case TRY_CATCH_EXPR:
2238 case WITH_CLEANUP_EXPR:
2239 case EXIT_EXPR:
2240 return 0;
2242 case BIND_EXPR:
2243 /* For a binding, warn if no side effect within it. */
2244 return warn_if_unused_value (TREE_OPERAND (exp, 1));
2246 case SAVE_EXPR:
2247 return warn_if_unused_value (TREE_OPERAND (exp, 1));
2249 case TRUTH_ORIF_EXPR:
2250 case TRUTH_ANDIF_EXPR:
2251 /* In && or ||, warn if 2nd operand has no side effect. */
2252 return warn_if_unused_value (TREE_OPERAND (exp, 1));
2254 case COMPOUND_EXPR:
2255 if (TREE_NO_UNUSED_WARNING (exp))
2256 return 0;
2257 if (warn_if_unused_value (TREE_OPERAND (exp, 0)))
2258 return 1;
2259 /* Let people do `(foo (), 0)' without a warning. */
2260 if (TREE_CONSTANT (TREE_OPERAND (exp, 1)))
2261 return 0;
2262 return warn_if_unused_value (TREE_OPERAND (exp, 1));
2264 case NOP_EXPR:
2265 case CONVERT_EXPR:
2266 case NON_LVALUE_EXPR:
2267 /* Don't warn about conversions not explicit in the user's program. */
2268 if (TREE_NO_UNUSED_WARNING (exp))
2269 return 0;
2270 /* Assignment to a cast usually results in a cast of a modify.
2271 Don't complain about that. There can be an arbitrary number of
2272 casts before the modify, so we must loop until we find the first
2273 non-cast expression and then test to see if that is a modify. */
2275 tree tem = TREE_OPERAND (exp, 0);
2277 while (TREE_CODE (tem) == CONVERT_EXPR || TREE_CODE (tem) == NOP_EXPR)
2278 tem = TREE_OPERAND (tem, 0);
2280 if (TREE_CODE (tem) == MODIFY_EXPR || TREE_CODE (tem) == INIT_EXPR
2281 || TREE_CODE (tem) == CALL_EXPR)
2282 return 0;
2284 goto maybe_warn;
2286 case INDIRECT_REF:
2287 /* Don't warn about automatic dereferencing of references, since
2288 the user cannot control it. */
2289 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == REFERENCE_TYPE)
2290 return warn_if_unused_value (TREE_OPERAND (exp, 0));
2291 /* Fall through. */
2293 default:
2294 /* Referencing a volatile value is a side effect, so don't warn. */
2295 if ((DECL_P (exp)
2296 || TREE_CODE_CLASS (TREE_CODE (exp)) == 'r')
2297 && TREE_THIS_VOLATILE (exp))
2298 return 0;
2300 /* If this is an expression which has no operands, there is no value
2301 to be unused. There are no such language-independent codes,
2302 but front ends may define such. */
2303 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'e'
2304 && TREE_CODE_LENGTH (TREE_CODE (exp)) == 0)
2305 return 0;
2307 maybe_warn:
2308 /* If this is an expression with side effects, don't warn. */
2309 if (TREE_SIDE_EFFECTS (exp))
2310 return 0;
2312 warning_with_file_and_line (emit_filename, emit_lineno,
2313 "value computed is not used");
2314 return 1;
2318 /* Clear out the memory of the last expression evaluated. */
2320 void
2321 clear_last_expr ()
2323 last_expr_type = NULL_TREE;
2324 last_expr_value = NULL_RTX;
2327 /* Begin a statement-expression, i.e., a series of statements which
2328 may return a value. Return the RTL_EXPR for this statement expr.
2329 The caller must save that value and pass it to
2330 expand_end_stmt_expr. If HAS_SCOPE is nonzero, temporaries created
2331 in the statement-expression are deallocated at the end of the
2332 expression. */
2334 tree
2335 expand_start_stmt_expr (has_scope)
2336 int has_scope;
2338 tree t;
2340 /* Make the RTL_EXPR node temporary, not momentary,
2341 so that rtl_expr_chain doesn't become garbage. */
2342 t = make_node (RTL_EXPR);
2343 do_pending_stack_adjust ();
2344 if (has_scope)
2345 start_sequence_for_rtl_expr (t);
2346 else
2347 start_sequence ();
2348 NO_DEFER_POP;
2349 expr_stmts_for_value++;
2350 return t;
2353 /* Restore the previous state at the end of a statement that returns a value.
2354 Returns a tree node representing the statement's value and the
2355 insns to compute the value.
2357 The nodes of that expression have been freed by now, so we cannot use them.
2358 But we don't want to do that anyway; the expression has already been
2359 evaluated and now we just want to use the value. So generate a RTL_EXPR
2360 with the proper type and RTL value.
2362 If the last substatement was not an expression,
2363 return something with type `void'. */
2365 tree
2366 expand_end_stmt_expr (t)
2367 tree t;
2369 OK_DEFER_POP;
2371 if (! last_expr_value || ! last_expr_type)
2373 last_expr_value = const0_rtx;
2374 last_expr_type = void_type_node;
2376 else if (GET_CODE (last_expr_value) != REG && ! CONSTANT_P (last_expr_value))
2377 /* Remove any possible QUEUED. */
2378 last_expr_value = protect_from_queue (last_expr_value, 0);
2380 emit_queue ();
2382 TREE_TYPE (t) = last_expr_type;
2383 RTL_EXPR_RTL (t) = last_expr_value;
2384 RTL_EXPR_SEQUENCE (t) = get_insns ();
2386 rtl_expr_chain = tree_cons (NULL_TREE, t, rtl_expr_chain);
2388 end_sequence ();
2390 /* Don't consider deleting this expr or containing exprs at tree level. */
2391 TREE_SIDE_EFFECTS (t) = 1;
2392 /* Propagate volatility of the actual RTL expr. */
2393 TREE_THIS_VOLATILE (t) = volatile_refs_p (last_expr_value);
2395 clear_last_expr ();
2396 expr_stmts_for_value--;
2398 return t;
2401 /* Generate RTL for the start of an if-then. COND is the expression
2402 whose truth should be tested.
2404 If EXITFLAG is nonzero, this conditional is visible to
2405 `exit_something'. */
2407 void
2408 expand_start_cond (cond, exitflag)
2409 tree cond;
2410 int exitflag;
2412 struct nesting *thiscond = ALLOC_NESTING ();
2414 /* Make an entry on cond_stack for the cond we are entering. */
2416 thiscond->desc = COND_NESTING;
2417 thiscond->next = cond_stack;
2418 thiscond->all = nesting_stack;
2419 thiscond->depth = ++nesting_depth;
2420 thiscond->data.cond.next_label = gen_label_rtx ();
2421 /* Before we encounter an `else', we don't need a separate exit label
2422 unless there are supposed to be exit statements
2423 to exit this conditional. */
2424 thiscond->exit_label = exitflag ? gen_label_rtx () : 0;
2425 thiscond->data.cond.endif_label = thiscond->exit_label;
2426 cond_stack = thiscond;
2427 nesting_stack = thiscond;
2429 do_jump (cond, thiscond->data.cond.next_label, NULL_RTX);
2432 /* Generate RTL between then-clause and the elseif-clause
2433 of an if-then-elseif-.... */
2435 void
2436 expand_start_elseif (cond)
2437 tree cond;
2439 if (cond_stack->data.cond.endif_label == 0)
2440 cond_stack->data.cond.endif_label = gen_label_rtx ();
2441 emit_jump (cond_stack->data.cond.endif_label);
2442 emit_label (cond_stack->data.cond.next_label);
2443 cond_stack->data.cond.next_label = gen_label_rtx ();
2444 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
2447 /* Generate RTL between the then-clause and the else-clause
2448 of an if-then-else. */
2450 void
2451 expand_start_else ()
2453 if (cond_stack->data.cond.endif_label == 0)
2454 cond_stack->data.cond.endif_label = gen_label_rtx ();
2456 emit_jump (cond_stack->data.cond.endif_label);
2457 emit_label (cond_stack->data.cond.next_label);
2458 cond_stack->data.cond.next_label = 0; /* No more _else or _elseif calls. */
2461 /* After calling expand_start_else, turn this "else" into an "else if"
2462 by providing another condition. */
2464 void
2465 expand_elseif (cond)
2466 tree cond;
2468 cond_stack->data.cond.next_label = gen_label_rtx ();
2469 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
2472 /* Generate RTL for the end of an if-then.
2473 Pop the record for it off of cond_stack. */
2475 void
2476 expand_end_cond ()
2478 struct nesting *thiscond = cond_stack;
2480 do_pending_stack_adjust ();
2481 if (thiscond->data.cond.next_label)
2482 emit_label (thiscond->data.cond.next_label);
2483 if (thiscond->data.cond.endif_label)
2484 emit_label (thiscond->data.cond.endif_label);
2486 POPSTACK (cond_stack);
2487 clear_last_expr ();
2490 /* Generate RTL for the start of a loop. EXIT_FLAG is nonzero if this
2491 loop should be exited by `exit_something'. This is a loop for which
2492 `expand_continue' will jump to the top of the loop.
2494 Make an entry on loop_stack to record the labels associated with
2495 this loop. */
2497 struct nesting *
2498 expand_start_loop (exit_flag)
2499 int exit_flag;
2501 struct nesting *thisloop = ALLOC_NESTING ();
2503 /* Make an entry on loop_stack for the loop we are entering. */
2505 thisloop->desc = LOOP_NESTING;
2506 thisloop->next = loop_stack;
2507 thisloop->all = nesting_stack;
2508 thisloop->depth = ++nesting_depth;
2509 thisloop->data.loop.start_label = gen_label_rtx ();
2510 thisloop->data.loop.end_label = gen_label_rtx ();
2511 thisloop->data.loop.continue_label = thisloop->data.loop.start_label;
2512 thisloop->exit_label = exit_flag ? thisloop->data.loop.end_label : 0;
2513 loop_stack = thisloop;
2514 nesting_stack = thisloop;
2516 do_pending_stack_adjust ();
2517 emit_queue ();
2518 emit_note (NULL, NOTE_INSN_LOOP_BEG);
2519 emit_label (thisloop->data.loop.start_label);
2521 return thisloop;
2524 /* Like expand_start_loop but for a loop where the continuation point
2525 (for expand_continue_loop) will be specified explicitly. */
2527 struct nesting *
2528 expand_start_loop_continue_elsewhere (exit_flag)
2529 int exit_flag;
2531 struct nesting *thisloop = expand_start_loop (exit_flag);
2532 loop_stack->data.loop.continue_label = gen_label_rtx ();
2533 return thisloop;
2536 /* Begin a null, aka do { } while (0) "loop". But since the contents
2537 of said loop can still contain a break, we must frob the loop nest. */
2539 struct nesting *
2540 expand_start_null_loop ()
2542 struct nesting *thisloop = ALLOC_NESTING ();
2544 /* Make an entry on loop_stack for the loop we are entering. */
2546 thisloop->desc = LOOP_NESTING;
2547 thisloop->next = loop_stack;
2548 thisloop->all = nesting_stack;
2549 thisloop->depth = ++nesting_depth;
2550 thisloop->data.loop.start_label = emit_note (NULL, NOTE_INSN_DELETED);
2551 thisloop->data.loop.end_label = gen_label_rtx ();
2552 thisloop->data.loop.continue_label = thisloop->data.loop.end_label;
2553 thisloop->exit_label = thisloop->data.loop.end_label;
2554 loop_stack = thisloop;
2555 nesting_stack = thisloop;
2557 return thisloop;
2560 /* Specify the continuation point for a loop started with
2561 expand_start_loop_continue_elsewhere.
2562 Use this at the point in the code to which a continue statement
2563 should jump. */
2565 void
2566 expand_loop_continue_here ()
2568 do_pending_stack_adjust ();
2569 emit_note (NULL, NOTE_INSN_LOOP_CONT);
2570 emit_label (loop_stack->data.loop.continue_label);
2573 /* Finish a loop. Generate a jump back to the top and the loop-exit label.
2574 Pop the block off of loop_stack. */
2576 void
2577 expand_end_loop ()
2579 rtx start_label = loop_stack->data.loop.start_label;
2580 rtx etc_note;
2581 int eh_regions, debug_blocks;
2582 bool empty_test;
2584 /* Mark the continue-point at the top of the loop if none elsewhere. */
2585 if (start_label == loop_stack->data.loop.continue_label)
2586 emit_note_before (NOTE_INSN_LOOP_CONT, start_label);
2588 do_pending_stack_adjust ();
2590 /* If the loop starts with a loop exit, roll that to the end where
2591 it will optimize together with the jump back.
2593 If the loop presently looks like this (in pseudo-C):
2595 LOOP_BEG
2596 start_label:
2597 if (test) goto end_label;
2598 LOOP_END_TOP_COND
2599 body;
2600 goto start_label;
2601 end_label:
2603 transform it to look like:
2605 LOOP_BEG
2606 goto start_label;
2607 top_label:
2608 body;
2609 start_label:
2610 if (test) goto end_label;
2611 goto top_label;
2612 end_label:
2614 We rely on the presence of NOTE_INSN_LOOP_END_TOP_COND to mark
2615 the end of the entry conditional. Without this, our lexical scan
2616 can't tell the difference between an entry conditional and a
2617 body conditional that exits the loop. Mistaking the two means
2618 that we can misplace the NOTE_INSN_LOOP_CONT note, which can
2619 screw up loop unrolling.
2621 Things will be oh so much better when loop optimization is done
2622 off of a proper control flow graph... */
2624 /* Scan insns from the top of the loop looking for the END_TOP_COND note. */
2626 empty_test = true;
2627 eh_regions = debug_blocks = 0;
2628 for (etc_note = start_label; etc_note ; etc_note = NEXT_INSN (etc_note))
2629 if (GET_CODE (etc_note) == NOTE)
2631 if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_LOOP_END_TOP_COND)
2632 break;
2634 /* We must not walk into a nested loop. */
2635 else if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_LOOP_BEG)
2637 etc_note = NULL_RTX;
2638 break;
2641 /* At the same time, scan for EH region notes, as we don't want
2642 to scrog region nesting. This shouldn't happen, but... */
2643 else if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_EH_REGION_BEG)
2644 eh_regions++;
2645 else if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_EH_REGION_END)
2647 if (--eh_regions < 0)
2648 /* We've come to the end of an EH region, but never saw the
2649 beginning of that region. That means that an EH region
2650 begins before the top of the loop, and ends in the middle
2651 of it. The existence of such a situation violates a basic
2652 assumption in this code, since that would imply that even
2653 when EH_REGIONS is zero, we might move code out of an
2654 exception region. */
2655 abort ();
2658 /* Likewise for debug scopes. In this case we'll either (1) move
2659 all of the notes if they are properly nested or (2) leave the
2660 notes alone and only rotate the loop at high optimization
2661 levels when we expect to scrog debug info. */
2662 else if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_BLOCK_BEG)
2663 debug_blocks++;
2664 else if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_BLOCK_END)
2665 debug_blocks--;
2667 else if (INSN_P (etc_note))
2668 empty_test = false;
2670 if (etc_note
2671 && optimize
2672 && ! empty_test
2673 && eh_regions == 0
2674 && (debug_blocks == 0 || optimize >= 2)
2675 && NEXT_INSN (etc_note) != NULL_RTX
2676 && ! any_condjump_p (get_last_insn ()))
2678 /* We found one. Move everything from START to ETC to the end
2679 of the loop, and add a jump from the top of the loop. */
2680 rtx top_label = gen_label_rtx ();
2681 rtx start_move = start_label;
2683 /* If the start label is preceded by a NOTE_INSN_LOOP_CONT note,
2684 then we want to move this note also. */
2685 if (GET_CODE (PREV_INSN (start_move)) == NOTE
2686 && NOTE_LINE_NUMBER (PREV_INSN (start_move)) == NOTE_INSN_LOOP_CONT)
2687 start_move = PREV_INSN (start_move);
2689 emit_label_before (top_label, start_move);
2691 /* Actually move the insns. If the debug scopes are nested, we
2692 can move everything at once. Otherwise we have to move them
2693 one by one and squeeze out the block notes. */
2694 if (debug_blocks == 0)
2695 reorder_insns (start_move, etc_note, get_last_insn ());
2696 else
2698 rtx insn, next_insn;
2699 for (insn = start_move; insn; insn = next_insn)
2701 /* Figure out which insn comes after this one. We have
2702 to do this before we move INSN. */
2703 next_insn = (insn == etc_note ? NULL : NEXT_INSN (insn));
2705 if (GET_CODE (insn) == NOTE
2706 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2707 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
2708 continue;
2710 reorder_insns (insn, insn, get_last_insn ());
2714 /* Add the jump from the top of the loop. */
2715 emit_jump_insn_before (gen_jump (start_label), top_label);
2716 emit_barrier_before (top_label);
2717 start_label = top_label;
2720 emit_jump (start_label);
2721 emit_note (NULL, NOTE_INSN_LOOP_END);
2722 emit_label (loop_stack->data.loop.end_label);
2724 POPSTACK (loop_stack);
2726 clear_last_expr ();
2729 /* Finish a null loop, aka do { } while (0). */
2731 void
2732 expand_end_null_loop ()
2734 do_pending_stack_adjust ();
2735 emit_label (loop_stack->data.loop.end_label);
2737 POPSTACK (loop_stack);
2739 clear_last_expr ();
2742 /* Generate a jump to the current loop's continue-point.
2743 This is usually the top of the loop, but may be specified
2744 explicitly elsewhere. If not currently inside a loop,
2745 return 0 and do nothing; caller will print an error message. */
2748 expand_continue_loop (whichloop)
2749 struct nesting *whichloop;
2751 /* Emit information for branch prediction. */
2752 rtx note;
2754 note = emit_note (NULL, NOTE_INSN_PREDICTION);
2755 NOTE_PREDICTION (note) = NOTE_PREDICT (PRED_CONTINUE, IS_TAKEN);
2756 clear_last_expr ();
2757 if (whichloop == 0)
2758 whichloop = loop_stack;
2759 if (whichloop == 0)
2760 return 0;
2761 expand_goto_internal (NULL_TREE, whichloop->data.loop.continue_label,
2762 NULL_RTX);
2763 return 1;
2766 /* Generate a jump to exit the current loop. If not currently inside a loop,
2767 return 0 and do nothing; caller will print an error message. */
2770 expand_exit_loop (whichloop)
2771 struct nesting *whichloop;
2773 clear_last_expr ();
2774 if (whichloop == 0)
2775 whichloop = loop_stack;
2776 if (whichloop == 0)
2777 return 0;
2778 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label, NULL_RTX);
2779 return 1;
2782 /* Generate a conditional jump to exit the current loop if COND
2783 evaluates to zero. If not currently inside a loop,
2784 return 0 and do nothing; caller will print an error message. */
2787 expand_exit_loop_if_false (whichloop, cond)
2788 struct nesting *whichloop;
2789 tree cond;
2791 rtx label;
2792 clear_last_expr ();
2794 if (whichloop == 0)
2795 whichloop = loop_stack;
2796 if (whichloop == 0)
2797 return 0;
2799 if (integer_nonzerop (cond))
2800 return 1;
2801 if (integer_zerop (cond))
2802 return expand_exit_loop (whichloop);
2804 /* Check if we definitely won't need a fixup. */
2805 if (whichloop == nesting_stack)
2807 jumpifnot (cond, whichloop->data.loop.end_label);
2808 return 1;
2811 /* In order to handle fixups, we actually create a conditional jump
2812 around an unconditional branch to exit the loop. If fixups are
2813 necessary, they go before the unconditional branch. */
2815 label = gen_label_rtx ();
2816 jumpif (cond, label);
2817 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label,
2818 NULL_RTX);
2819 emit_label (label);
2821 return 1;
2824 /* Like expand_exit_loop_if_false except also emit a note marking
2825 the end of the conditional. Should only be used immediately
2826 after expand_loop_start. */
2829 expand_exit_loop_top_cond (whichloop, cond)
2830 struct nesting *whichloop;
2831 tree cond;
2833 if (! expand_exit_loop_if_false (whichloop, cond))
2834 return 0;
2836 emit_note (NULL, NOTE_INSN_LOOP_END_TOP_COND);
2837 return 1;
2840 /* Return nonzero if we should preserve sub-expressions as separate
2841 pseudos. We never do so if we aren't optimizing. We always do so
2842 if -fexpensive-optimizations.
2844 Otherwise, we only do so if we are in the "early" part of a loop. I.e.,
2845 the loop may still be a small one. */
2848 preserve_subexpressions_p ()
2850 rtx insn;
2852 if (flag_expensive_optimizations)
2853 return 1;
2855 if (optimize == 0 || cfun == 0 || cfun->stmt == 0 || loop_stack == 0)
2856 return 0;
2858 insn = get_last_insn_anywhere ();
2860 return (insn
2861 && (INSN_UID (insn) - INSN_UID (loop_stack->data.loop.start_label)
2862 < n_non_fixed_regs * 3));
2866 /* Generate a jump to exit the current loop, conditional, binding contour
2867 or case statement. Not all such constructs are visible to this function,
2868 only those started with EXIT_FLAG nonzero. Individual languages use
2869 the EXIT_FLAG parameter to control which kinds of constructs you can
2870 exit this way.
2872 If not currently inside anything that can be exited,
2873 return 0 and do nothing; caller will print an error message. */
2876 expand_exit_something ()
2878 struct nesting *n;
2879 clear_last_expr ();
2880 for (n = nesting_stack; n; n = n->all)
2881 if (n->exit_label != 0)
2883 expand_goto_internal (NULL_TREE, n->exit_label, NULL_RTX);
2884 return 1;
2887 return 0;
2890 /* Generate RTL to return from the current function, with no value.
2891 (That is, we do not do anything about returning any value.) */
2893 void
2894 expand_null_return ()
2896 rtx last_insn;
2898 last_insn = get_last_insn ();
2900 /* If this function was declared to return a value, but we
2901 didn't, clobber the return registers so that they are not
2902 propagated live to the rest of the function. */
2903 clobber_return_register ();
2905 expand_null_return_1 (last_insn);
2908 /* Try to guess whether the value of return means error code. */
2909 static enum br_predictor
2910 return_prediction (val)
2911 rtx val;
2913 /* Different heuristics for pointers and scalars. */
2914 if (POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (current_function_decl))))
2916 /* NULL is usually not returned. */
2917 if (val == const0_rtx)
2918 return PRED_NULL_RETURN;
2920 else
2922 /* Negative return values are often used to indicate
2923 errors. */
2924 if (GET_CODE (val) == CONST_INT
2925 && INTVAL (val) < 0)
2926 return PRED_NEGATIVE_RETURN;
2927 /* Constant return values are also usually erors,
2928 zero/one often mean booleans so exclude them from the
2929 heuristics. */
2930 if (CONSTANT_P (val)
2931 && (val != const0_rtx && val != const1_rtx))
2932 return PRED_CONST_RETURN;
2934 return PRED_NO_PREDICTION;
2937 /* Generate RTL to return from the current function, with value VAL. */
2939 static void
2940 expand_value_return (val)
2941 rtx val;
2943 rtx last_insn;
2944 rtx return_reg;
2945 enum br_predictor pred;
2947 if ((pred = return_prediction (val)) != PRED_NO_PREDICTION)
2949 /* Emit information for branch prediction. */
2950 rtx note;
2952 note = emit_note (NULL, NOTE_INSN_PREDICTION);
2954 NOTE_PREDICTION (note) = NOTE_PREDICT (pred, NOT_TAKEN);
2958 last_insn = get_last_insn ();
2959 return_reg = DECL_RTL (DECL_RESULT (current_function_decl));
2961 /* Copy the value to the return location
2962 unless it's already there. */
2964 if (return_reg != val)
2966 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
2967 #ifdef PROMOTE_FUNCTION_RETURN
2968 int unsignedp = TREE_UNSIGNED (type);
2969 enum machine_mode old_mode
2970 = DECL_MODE (DECL_RESULT (current_function_decl));
2971 enum machine_mode mode
2972 = promote_mode (type, old_mode, &unsignedp, 1);
2974 if (mode != old_mode)
2975 val = convert_modes (mode, old_mode, val, unsignedp);
2976 #endif
2977 if (GET_CODE (return_reg) == PARALLEL)
2978 emit_group_load (return_reg, val, int_size_in_bytes (type));
2979 else
2980 emit_move_insn (return_reg, val);
2983 expand_null_return_1 (last_insn);
2986 /* Output a return with no value. If LAST_INSN is nonzero,
2987 pretend that the return takes place after LAST_INSN. */
2989 static void
2990 expand_null_return_1 (last_insn)
2991 rtx last_insn;
2993 rtx end_label = cleanup_label ? cleanup_label : return_label;
2995 clear_pending_stack_adjust ();
2996 do_pending_stack_adjust ();
2997 clear_last_expr ();
2999 if (end_label == 0)
3000 end_label = return_label = gen_label_rtx ();
3001 expand_goto_internal (NULL_TREE, end_label, last_insn);
3004 /* Generate RTL to evaluate the expression RETVAL and return it
3005 from the current function. */
3007 void
3008 expand_return (retval)
3009 tree retval;
3011 /* If there are any cleanups to be performed, then they will
3012 be inserted following LAST_INSN. It is desirable
3013 that the last_insn, for such purposes, should be the
3014 last insn before computing the return value. Otherwise, cleanups
3015 which call functions can clobber the return value. */
3016 /* ??? rms: I think that is erroneous, because in C++ it would
3017 run destructors on variables that might be used in the subsequent
3018 computation of the return value. */
3019 rtx last_insn = 0;
3020 rtx result_rtl;
3021 rtx val = 0;
3022 tree retval_rhs;
3024 /* If function wants no value, give it none. */
3025 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
3027 expand_expr (retval, NULL_RTX, VOIDmode, 0);
3028 emit_queue ();
3029 expand_null_return ();
3030 return;
3033 if (retval == error_mark_node)
3035 /* Treat this like a return of no value from a function that
3036 returns a value. */
3037 expand_null_return ();
3038 return;
3040 else if (TREE_CODE (retval) == RESULT_DECL)
3041 retval_rhs = retval;
3042 else if ((TREE_CODE (retval) == MODIFY_EXPR || TREE_CODE (retval) == INIT_EXPR)
3043 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
3044 retval_rhs = TREE_OPERAND (retval, 1);
3045 else if (VOID_TYPE_P (TREE_TYPE (retval)))
3046 /* Recognize tail-recursive call to void function. */
3047 retval_rhs = retval;
3048 else
3049 retval_rhs = NULL_TREE;
3051 last_insn = get_last_insn ();
3053 /* Distribute return down conditional expr if either of the sides
3054 may involve tail recursion (see test below). This enhances the number
3055 of tail recursions we see. Don't do this always since it can produce
3056 sub-optimal code in some cases and we distribute assignments into
3057 conditional expressions when it would help. */
3059 if (optimize && retval_rhs != 0
3060 && frame_offset == 0
3061 && TREE_CODE (retval_rhs) == COND_EXPR
3062 && (TREE_CODE (TREE_OPERAND (retval_rhs, 1)) == CALL_EXPR
3063 || TREE_CODE (TREE_OPERAND (retval_rhs, 2)) == CALL_EXPR))
3065 rtx label = gen_label_rtx ();
3066 tree expr;
3068 do_jump (TREE_OPERAND (retval_rhs, 0), label, NULL_RTX);
3069 start_cleanup_deferral ();
3070 expr = build (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (current_function_decl)),
3071 DECL_RESULT (current_function_decl),
3072 TREE_OPERAND (retval_rhs, 1));
3073 TREE_SIDE_EFFECTS (expr) = 1;
3074 expand_return (expr);
3075 emit_label (label);
3077 expr = build (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (current_function_decl)),
3078 DECL_RESULT (current_function_decl),
3079 TREE_OPERAND (retval_rhs, 2));
3080 TREE_SIDE_EFFECTS (expr) = 1;
3081 expand_return (expr);
3082 end_cleanup_deferral ();
3083 return;
3086 result_rtl = DECL_RTL (DECL_RESULT (current_function_decl));
3088 /* If the result is an aggregate that is being returned in one (or more)
3089 registers, load the registers here. The compiler currently can't handle
3090 copying a BLKmode value into registers. We could put this code in a
3091 more general area (for use by everyone instead of just function
3092 call/return), but until this feature is generally usable it is kept here
3093 (and in expand_call). The value must go into a pseudo in case there
3094 are cleanups that will clobber the real return register. */
3096 if (retval_rhs != 0
3097 && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
3098 && GET_CODE (result_rtl) == REG)
3100 int i;
3101 unsigned HOST_WIDE_INT bitpos, xbitpos;
3102 unsigned HOST_WIDE_INT big_endian_correction = 0;
3103 unsigned HOST_WIDE_INT bytes
3104 = int_size_in_bytes (TREE_TYPE (retval_rhs));
3105 int n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
3106 unsigned int bitsize
3107 = MIN (TYPE_ALIGN (TREE_TYPE (retval_rhs)), BITS_PER_WORD);
3108 rtx *result_pseudos = (rtx *) alloca (sizeof (rtx) * n_regs);
3109 rtx result_reg, src = NULL_RTX, dst = NULL_RTX;
3110 rtx result_val = expand_expr (retval_rhs, NULL_RTX, VOIDmode, 0);
3111 enum machine_mode tmpmode, result_reg_mode;
3113 if (bytes == 0)
3115 expand_null_return ();
3116 return;
3119 /* Structures whose size is not a multiple of a word are aligned
3120 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
3121 machine, this means we must skip the empty high order bytes when
3122 calculating the bit offset. */
3123 if (BYTES_BIG_ENDIAN
3124 && bytes % UNITS_PER_WORD)
3125 big_endian_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
3126 * BITS_PER_UNIT));
3128 /* Copy the structure BITSIZE bits at a time. */
3129 for (bitpos = 0, xbitpos = big_endian_correction;
3130 bitpos < bytes * BITS_PER_UNIT;
3131 bitpos += bitsize, xbitpos += bitsize)
3133 /* We need a new destination pseudo each time xbitpos is
3134 on a word boundary and when xbitpos == big_endian_correction
3135 (the first time through). */
3136 if (xbitpos % BITS_PER_WORD == 0
3137 || xbitpos == big_endian_correction)
3139 /* Generate an appropriate register. */
3140 dst = gen_reg_rtx (word_mode);
3141 result_pseudos[xbitpos / BITS_PER_WORD] = dst;
3143 /* Clear the destination before we move anything into it. */
3144 emit_move_insn (dst, CONST0_RTX (GET_MODE (dst)));
3147 /* We need a new source operand each time bitpos is on a word
3148 boundary. */
3149 if (bitpos % BITS_PER_WORD == 0)
3150 src = operand_subword_force (result_val,
3151 bitpos / BITS_PER_WORD,
3152 BLKmode);
3154 /* Use bitpos for the source extraction (left justified) and
3155 xbitpos for the destination store (right justified). */
3156 store_bit_field (dst, bitsize, xbitpos % BITS_PER_WORD, word_mode,
3157 extract_bit_field (src, bitsize,
3158 bitpos % BITS_PER_WORD, 1,
3159 NULL_RTX, word_mode, word_mode,
3160 BITS_PER_WORD),
3161 BITS_PER_WORD);
3164 /* Find the smallest integer mode large enough to hold the
3165 entire structure and use that mode instead of BLKmode
3166 on the USE insn for the return register. */
3167 for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3168 tmpmode != VOIDmode;
3169 tmpmode = GET_MODE_WIDER_MODE (tmpmode))
3170 /* Have we found a large enough mode? */
3171 if (GET_MODE_SIZE (tmpmode) >= bytes)
3172 break;
3174 /* No suitable mode found. */
3175 if (tmpmode == VOIDmode)
3176 abort ();
3178 PUT_MODE (result_rtl, tmpmode);
3180 if (GET_MODE_SIZE (tmpmode) < GET_MODE_SIZE (word_mode))
3181 result_reg_mode = word_mode;
3182 else
3183 result_reg_mode = tmpmode;
3184 result_reg = gen_reg_rtx (result_reg_mode);
3186 emit_queue ();
3187 for (i = 0; i < n_regs; i++)
3188 emit_move_insn (operand_subword (result_reg, i, 0, result_reg_mode),
3189 result_pseudos[i]);
3191 if (tmpmode != result_reg_mode)
3192 result_reg = gen_lowpart (tmpmode, result_reg);
3194 expand_value_return (result_reg);
3196 else if (retval_rhs != 0
3197 && !VOID_TYPE_P (TREE_TYPE (retval_rhs))
3198 && (GET_CODE (result_rtl) == REG
3199 || (GET_CODE (result_rtl) == PARALLEL)))
3201 /* Calculate the return value into a temporary (usually a pseudo
3202 reg). */
3203 tree ot = TREE_TYPE (DECL_RESULT (current_function_decl));
3204 tree nt = build_qualified_type (ot, TYPE_QUALS (ot) | TYPE_QUAL_CONST);
3206 val = assign_temp (nt, 0, 0, 1);
3207 val = expand_expr (retval_rhs, val, GET_MODE (val), 0);
3208 val = force_not_mem (val);
3209 emit_queue ();
3210 /* Return the calculated value, doing cleanups first. */
3211 expand_value_return (val);
3213 else
3215 /* No cleanups or no hard reg used;
3216 calculate value into hard return reg. */
3217 expand_expr (retval, const0_rtx, VOIDmode, 0);
3218 emit_queue ();
3219 expand_value_return (result_rtl);
3223 /* Attempt to optimize a potential tail recursion call into a goto.
3224 ARGUMENTS are the arguments to a CALL_EXPR; LAST_INSN indicates
3225 where to place the jump to the tail recursion label.
3227 Return TRUE if the call was optimized into a goto. */
3230 optimize_tail_recursion (arguments, last_insn)
3231 tree arguments;
3232 rtx last_insn;
3234 /* Finish checking validity, and if valid emit code to set the
3235 argument variables for the new call. */
3236 if (tail_recursion_args (arguments, DECL_ARGUMENTS (current_function_decl)))
3238 if (tail_recursion_label == 0)
3240 tail_recursion_label = gen_label_rtx ();
3241 emit_label_after (tail_recursion_label,
3242 tail_recursion_reentry);
3244 emit_queue ();
3245 expand_goto_internal (NULL_TREE, tail_recursion_label, last_insn);
3246 emit_barrier ();
3247 return 1;
3249 return 0;
3252 /* Emit code to alter this function's formal parms for a tail-recursive call.
3253 ACTUALS is a list of actual parameter expressions (chain of TREE_LISTs).
3254 FORMALS is the chain of decls of formals.
3255 Return 1 if this can be done;
3256 otherwise return 0 and do not emit any code. */
3258 static int
3259 tail_recursion_args (actuals, formals)
3260 tree actuals, formals;
3262 tree a = actuals, f = formals;
3263 int i;
3264 rtx *argvec;
3266 /* Check that number and types of actuals are compatible
3267 with the formals. This is not always true in valid C code.
3268 Also check that no formal needs to be addressable
3269 and that all formals are scalars. */
3271 /* Also count the args. */
3273 for (a = actuals, f = formals, i = 0; a && f; a = TREE_CHAIN (a), f = TREE_CHAIN (f), i++)
3275 if (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_VALUE (a)))
3276 != TYPE_MAIN_VARIANT (TREE_TYPE (f)))
3277 return 0;
3278 if (GET_CODE (DECL_RTL (f)) != REG || DECL_MODE (f) == BLKmode)
3279 return 0;
3281 if (a != 0 || f != 0)
3282 return 0;
3284 /* Compute all the actuals. */
3286 argvec = (rtx *) alloca (i * sizeof (rtx));
3288 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
3289 argvec[i] = expand_expr (TREE_VALUE (a), NULL_RTX, VOIDmode, 0);
3291 /* Find which actual values refer to current values of previous formals.
3292 Copy each of them now, before any formal is changed. */
3294 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
3296 int copy = 0;
3297 int j;
3298 for (f = formals, j = 0; j < i; f = TREE_CHAIN (f), j++)
3299 if (reg_mentioned_p (DECL_RTL (f), argvec[i]))
3301 copy = 1;
3302 break;
3304 if (copy)
3305 argvec[i] = copy_to_reg (argvec[i]);
3308 /* Store the values of the actuals into the formals. */
3310 for (f = formals, a = actuals, i = 0; f;
3311 f = TREE_CHAIN (f), a = TREE_CHAIN (a), i++)
3313 if (GET_MODE (DECL_RTL (f)) == GET_MODE (argvec[i]))
3314 emit_move_insn (DECL_RTL (f), argvec[i]);
3315 else
3317 rtx tmp = argvec[i];
3319 if (DECL_MODE (f) != GET_MODE (DECL_RTL (f)))
3321 tmp = gen_reg_rtx (DECL_MODE (f));
3322 convert_move (tmp, argvec[i],
3323 TREE_UNSIGNED (TREE_TYPE (TREE_VALUE (a))));
3325 convert_move (DECL_RTL (f), tmp,
3326 TREE_UNSIGNED (TREE_TYPE (TREE_VALUE (a))));
3330 free_temp_slots ();
3331 return 1;
3334 /* Generate the RTL code for entering a binding contour.
3335 The variables are declared one by one, by calls to `expand_decl'.
3337 FLAGS is a bitwise or of the following flags:
3339 1 - Nonzero if this construct should be visible to
3340 `exit_something'.
3342 2 - Nonzero if this contour does not require a
3343 NOTE_INSN_BLOCK_BEG note. Virtually all calls from
3344 language-independent code should set this flag because they
3345 will not create corresponding BLOCK nodes. (There should be
3346 a one-to-one correspondence between NOTE_INSN_BLOCK_BEG notes
3347 and BLOCKs.) If this flag is set, MARK_ENDS should be zero
3348 when expand_end_bindings is called.
3350 If we are creating a NOTE_INSN_BLOCK_BEG note, a BLOCK may
3351 optionally be supplied. If so, it becomes the NOTE_BLOCK for the
3352 note. */
3354 void
3355 expand_start_bindings_and_block (flags, block)
3356 int flags;
3357 tree block;
3359 struct nesting *thisblock = ALLOC_NESTING ();
3360 rtx note;
3361 int exit_flag = ((flags & 1) != 0);
3362 int block_flag = ((flags & 2) == 0);
3364 /* If a BLOCK is supplied, then the caller should be requesting a
3365 NOTE_INSN_BLOCK_BEG note. */
3366 if (!block_flag && block)
3367 abort ();
3369 /* Create a note to mark the beginning of the block. */
3370 if (block_flag)
3372 note = emit_note (NULL, NOTE_INSN_BLOCK_BEG);
3373 NOTE_BLOCK (note) = block;
3375 else
3376 note = emit_note (NULL, NOTE_INSN_DELETED);
3378 /* Make an entry on block_stack for the block we are entering. */
3380 thisblock->desc = BLOCK_NESTING;
3381 thisblock->next = block_stack;
3382 thisblock->all = nesting_stack;
3383 thisblock->depth = ++nesting_depth;
3384 thisblock->data.block.stack_level = 0;
3385 thisblock->data.block.cleanups = 0;
3386 thisblock->data.block.n_function_calls = 0;
3387 thisblock->data.block.exception_region = 0;
3388 thisblock->data.block.block_target_temp_slot_level = target_temp_slot_level;
3390 thisblock->data.block.conditional_code = 0;
3391 thisblock->data.block.last_unconditional_cleanup = note;
3392 /* When we insert instructions after the last unconditional cleanup,
3393 we don't adjust last_insn. That means that a later add_insn will
3394 clobber the instructions we've just added. The easiest way to
3395 fix this is to just insert another instruction here, so that the
3396 instructions inserted after the last unconditional cleanup are
3397 never the last instruction. */
3398 emit_note (NULL, NOTE_INSN_DELETED);
3400 if (block_stack
3401 && !(block_stack->data.block.cleanups == NULL_TREE
3402 && block_stack->data.block.outer_cleanups == NULL_TREE))
3403 thisblock->data.block.outer_cleanups
3404 = tree_cons (NULL_TREE, block_stack->data.block.cleanups,
3405 block_stack->data.block.outer_cleanups);
3406 else
3407 thisblock->data.block.outer_cleanups = 0;
3408 thisblock->data.block.label_chain = 0;
3409 thisblock->data.block.innermost_stack_block = stack_block_stack;
3410 thisblock->data.block.first_insn = note;
3411 thisblock->data.block.block_start_count = ++current_block_start_count;
3412 thisblock->exit_label = exit_flag ? gen_label_rtx () : 0;
3413 block_stack = thisblock;
3414 nesting_stack = thisblock;
3416 /* Make a new level for allocating stack slots. */
3417 push_temp_slots ();
3420 /* Specify the scope of temporaries created by TARGET_EXPRs. Similar
3421 to CLEANUP_POINT_EXPR, but handles cases when a series of calls to
3422 expand_expr are made. After we end the region, we know that all
3423 space for all temporaries that were created by TARGET_EXPRs will be
3424 destroyed and their space freed for reuse. */
3426 void
3427 expand_start_target_temps ()
3429 /* This is so that even if the result is preserved, the space
3430 allocated will be freed, as we know that it is no longer in use. */
3431 push_temp_slots ();
3433 /* Start a new binding layer that will keep track of all cleanup
3434 actions to be performed. */
3435 expand_start_bindings (2);
3437 target_temp_slot_level = temp_slot_level;
3440 void
3441 expand_end_target_temps ()
3443 expand_end_bindings (NULL_TREE, 0, 0);
3445 /* This is so that even if the result is preserved, the space
3446 allocated will be freed, as we know that it is no longer in use. */
3447 pop_temp_slots ();
3450 /* Given a pointer to a BLOCK node return nonzero if (and only if) the node
3451 in question represents the outermost pair of curly braces (i.e. the "body
3452 block") of a function or method.
3454 For any BLOCK node representing a "body block" of a function or method, the
3455 BLOCK_SUPERCONTEXT of the node will point to another BLOCK node which
3456 represents the outermost (function) scope for the function or method (i.e.
3457 the one which includes the formal parameters). The BLOCK_SUPERCONTEXT of
3458 *that* node in turn will point to the relevant FUNCTION_DECL node. */
3461 is_body_block (stmt)
3462 tree stmt;
3464 if (TREE_CODE (stmt) == BLOCK)
3466 tree parent = BLOCK_SUPERCONTEXT (stmt);
3468 if (parent && TREE_CODE (parent) == BLOCK)
3470 tree grandparent = BLOCK_SUPERCONTEXT (parent);
3472 if (grandparent && TREE_CODE (grandparent) == FUNCTION_DECL)
3473 return 1;
3477 return 0;
3480 /* True if we are currently emitting insns in an area of output code
3481 that is controlled by a conditional expression. This is used by
3482 the cleanup handling code to generate conditional cleanup actions. */
3485 conditional_context ()
3487 return block_stack && block_stack->data.block.conditional_code;
3490 /* Return an opaque pointer to the current nesting level, so frontend code
3491 can check its own sanity. */
3493 struct nesting *
3494 current_nesting_level ()
3496 return cfun ? block_stack : 0;
3499 /* Emit a handler label for a nonlocal goto handler.
3500 Also emit code to store the handler label in SLOT before BEFORE_INSN. */
3502 static rtx
3503 expand_nl_handler_label (slot, before_insn)
3504 rtx slot, before_insn;
3506 rtx insns;
3507 rtx handler_label = gen_label_rtx ();
3509 /* Don't let cleanup_cfg delete the handler. */
3510 LABEL_PRESERVE_P (handler_label) = 1;
3512 start_sequence ();
3513 emit_move_insn (slot, gen_rtx_LABEL_REF (Pmode, handler_label));
3514 insns = get_insns ();
3515 end_sequence ();
3516 emit_insn_before (insns, before_insn);
3518 emit_label (handler_label);
3520 return handler_label;
3523 /* Emit code to restore vital registers at the beginning of a nonlocal goto
3524 handler. */
3525 static void
3526 expand_nl_goto_receiver ()
3528 #ifdef HAVE_nonlocal_goto
3529 if (! HAVE_nonlocal_goto)
3530 #endif
3531 /* First adjust our frame pointer to its actual value. It was
3532 previously set to the start of the virtual area corresponding to
3533 the stacked variables when we branched here and now needs to be
3534 adjusted to the actual hardware fp value.
3536 Assignments are to virtual registers are converted by
3537 instantiate_virtual_regs into the corresponding assignment
3538 to the underlying register (fp in this case) that makes
3539 the original assignment true.
3540 So the following insn will actually be
3541 decrementing fp by STARTING_FRAME_OFFSET. */
3542 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
3544 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3545 if (fixed_regs[ARG_POINTER_REGNUM])
3547 #ifdef ELIMINABLE_REGS
3548 /* If the argument pointer can be eliminated in favor of the
3549 frame pointer, we don't need to restore it. We assume here
3550 that if such an elimination is present, it can always be used.
3551 This is the case on all known machines; if we don't make this
3552 assumption, we do unnecessary saving on many machines. */
3553 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
3554 size_t i;
3556 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
3557 if (elim_regs[i].from == ARG_POINTER_REGNUM
3558 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
3559 break;
3561 if (i == ARRAY_SIZE (elim_regs))
3562 #endif
3564 /* Now restore our arg pointer from the address at which it
3565 was saved in our stack frame. */
3566 emit_move_insn (virtual_incoming_args_rtx,
3567 copy_to_reg (get_arg_pointer_save_area (cfun)));
3570 #endif
3572 #ifdef HAVE_nonlocal_goto_receiver
3573 if (HAVE_nonlocal_goto_receiver)
3574 emit_insn (gen_nonlocal_goto_receiver ());
3575 #endif
3578 /* Make handlers for nonlocal gotos taking place in the function calls in
3579 block THISBLOCK. */
3581 static void
3582 expand_nl_goto_receivers (thisblock)
3583 struct nesting *thisblock;
3585 tree link;
3586 rtx afterward = gen_label_rtx ();
3587 rtx insns, slot;
3588 rtx label_list;
3589 int any_invalid;
3591 /* Record the handler address in the stack slot for that purpose,
3592 during this block, saving and restoring the outer value. */
3593 if (thisblock->next != 0)
3594 for (slot = nonlocal_goto_handler_slots; slot; slot = XEXP (slot, 1))
3596 rtx save_receiver = gen_reg_rtx (Pmode);
3597 emit_move_insn (XEXP (slot, 0), save_receiver);
3599 start_sequence ();
3600 emit_move_insn (save_receiver, XEXP (slot, 0));
3601 insns = get_insns ();
3602 end_sequence ();
3603 emit_insn_before (insns, thisblock->data.block.first_insn);
3606 /* Jump around the handlers; they run only when specially invoked. */
3607 emit_jump (afterward);
3609 /* Make a separate handler for each label. */
3610 link = nonlocal_labels;
3611 slot = nonlocal_goto_handler_slots;
3612 label_list = NULL_RTX;
3613 for (; link; link = TREE_CHAIN (link), slot = XEXP (slot, 1))
3614 /* Skip any labels we shouldn't be able to jump to from here,
3615 we generate one special handler for all of them below which just calls
3616 abort. */
3617 if (! DECL_TOO_LATE (TREE_VALUE (link)))
3619 rtx lab;
3620 lab = expand_nl_handler_label (XEXP (slot, 0),
3621 thisblock->data.block.first_insn);
3622 label_list = gen_rtx_EXPR_LIST (VOIDmode, lab, label_list);
3624 expand_nl_goto_receiver ();
3626 /* Jump to the "real" nonlocal label. */
3627 expand_goto (TREE_VALUE (link));
3630 /* A second pass over all nonlocal labels; this time we handle those
3631 we should not be able to jump to at this point. */
3632 link = nonlocal_labels;
3633 slot = nonlocal_goto_handler_slots;
3634 any_invalid = 0;
3635 for (; link; link = TREE_CHAIN (link), slot = XEXP (slot, 1))
3636 if (DECL_TOO_LATE (TREE_VALUE (link)))
3638 rtx lab;
3639 lab = expand_nl_handler_label (XEXP (slot, 0),
3640 thisblock->data.block.first_insn);
3641 label_list = gen_rtx_EXPR_LIST (VOIDmode, lab, label_list);
3642 any_invalid = 1;
3645 if (any_invalid)
3647 expand_nl_goto_receiver ();
3648 expand_builtin_trap ();
3651 nonlocal_goto_handler_labels = label_list;
3652 emit_label (afterward);
3655 /* Warn about any unused VARS (which may contain nodes other than
3656 VAR_DECLs, but such nodes are ignored). The nodes are connected
3657 via the TREE_CHAIN field. */
3659 void
3660 warn_about_unused_variables (vars)
3661 tree vars;
3663 tree decl;
3665 if (warn_unused_variable)
3666 for (decl = vars; decl; decl = TREE_CHAIN (decl))
3667 if (TREE_CODE (decl) == VAR_DECL
3668 && ! TREE_USED (decl)
3669 && ! DECL_IN_SYSTEM_HEADER (decl)
3670 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
3671 warning_with_decl (decl, "unused variable `%s'");
3674 /* Generate RTL code to terminate a binding contour.
3676 VARS is the chain of VAR_DECL nodes for the variables bound in this
3677 contour. There may actually be other nodes in this chain, but any
3678 nodes other than VAR_DECLS are ignored.
3680 MARK_ENDS is nonzero if we should put a note at the beginning
3681 and end of this binding contour.
3683 DONT_JUMP_IN is nonzero if it is not valid to jump into this contour.
3684 (That is true automatically if the contour has a saved stack level.) */
3686 void
3687 expand_end_bindings (vars, mark_ends, dont_jump_in)
3688 tree vars;
3689 int mark_ends;
3690 int dont_jump_in;
3692 struct nesting *thisblock = block_stack;
3694 /* If any of the variables in this scope were not used, warn the
3695 user. */
3696 warn_about_unused_variables (vars);
3698 if (thisblock->exit_label)
3700 do_pending_stack_adjust ();
3701 emit_label (thisblock->exit_label);
3704 /* If necessary, make handlers for nonlocal gotos taking
3705 place in the function calls in this block. */
3706 if (function_call_count != thisblock->data.block.n_function_calls
3707 && nonlocal_labels
3708 /* Make handler for outermost block
3709 if there were any nonlocal gotos to this function. */
3710 && (thisblock->next == 0 ? current_function_has_nonlocal_label
3711 /* Make handler for inner block if it has something
3712 special to do when you jump out of it. */
3713 : (thisblock->data.block.cleanups != 0
3714 || thisblock->data.block.stack_level != 0)))
3715 expand_nl_goto_receivers (thisblock);
3717 /* Don't allow jumping into a block that has a stack level.
3718 Cleanups are allowed, though. */
3719 if (dont_jump_in
3720 || thisblock->data.block.stack_level != 0)
3722 struct label_chain *chain;
3724 /* Any labels in this block are no longer valid to go to.
3725 Mark them to cause an error message. */
3726 for (chain = thisblock->data.block.label_chain; chain; chain = chain->next)
3728 DECL_TOO_LATE (chain->label) = 1;
3729 /* If any goto without a fixup came to this label,
3730 that must be an error, because gotos without fixups
3731 come from outside all saved stack-levels. */
3732 if (TREE_ADDRESSABLE (chain->label))
3733 error_with_decl (chain->label,
3734 "label `%s' used before containing binding contour");
3738 /* Restore stack level in effect before the block
3739 (only if variable-size objects allocated). */
3740 /* Perform any cleanups associated with the block. */
3742 if (thisblock->data.block.stack_level != 0
3743 || thisblock->data.block.cleanups != 0)
3745 int reachable;
3746 rtx insn;
3748 /* Don't let cleanups affect ({...}) constructs. */
3749 int old_expr_stmts_for_value = expr_stmts_for_value;
3750 rtx old_last_expr_value = last_expr_value;
3751 tree old_last_expr_type = last_expr_type;
3752 expr_stmts_for_value = 0;
3754 /* Only clean up here if this point can actually be reached. */
3755 insn = get_last_insn ();
3756 if (GET_CODE (insn) == NOTE)
3757 insn = prev_nonnote_insn (insn);
3758 reachable = (! insn || GET_CODE (insn) != BARRIER);
3760 /* Do the cleanups. */
3761 expand_cleanups (thisblock->data.block.cleanups, NULL_TREE, 0, reachable);
3762 if (reachable)
3763 do_pending_stack_adjust ();
3765 expr_stmts_for_value = old_expr_stmts_for_value;
3766 last_expr_value = old_last_expr_value;
3767 last_expr_type = old_last_expr_type;
3769 /* Restore the stack level. */
3771 if (reachable && thisblock->data.block.stack_level != 0)
3773 emit_stack_restore (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3774 thisblock->data.block.stack_level, NULL_RTX);
3775 if (nonlocal_goto_handler_slots != 0)
3776 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level,
3777 NULL_RTX);
3780 /* Any gotos out of this block must also do these things.
3781 Also report any gotos with fixups that came to labels in this
3782 level. */
3783 fixup_gotos (thisblock,
3784 thisblock->data.block.stack_level,
3785 thisblock->data.block.cleanups,
3786 thisblock->data.block.first_insn,
3787 dont_jump_in);
3790 /* Mark the beginning and end of the scope if requested.
3791 We do this now, after running cleanups on the variables
3792 just going out of scope, so they are in scope for their cleanups. */
3794 if (mark_ends)
3796 rtx note = emit_note (NULL, NOTE_INSN_BLOCK_END);
3797 NOTE_BLOCK (note) = NOTE_BLOCK (thisblock->data.block.first_insn);
3799 else
3800 /* Get rid of the beginning-mark if we don't make an end-mark. */
3801 NOTE_LINE_NUMBER (thisblock->data.block.first_insn) = NOTE_INSN_DELETED;
3803 /* Restore the temporary level of TARGET_EXPRs. */
3804 target_temp_slot_level = thisblock->data.block.block_target_temp_slot_level;
3806 /* Restore block_stack level for containing block. */
3808 stack_block_stack = thisblock->data.block.innermost_stack_block;
3809 POPSTACK (block_stack);
3811 /* Pop the stack slot nesting and free any slots at this level. */
3812 pop_temp_slots ();
3815 /* Generate code to save the stack pointer at the start of the current block
3816 and set up to restore it on exit. */
3818 void
3819 save_stack_pointer ()
3821 struct nesting *thisblock = block_stack;
3823 if (thisblock->data.block.stack_level == 0)
3825 emit_stack_save (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3826 &thisblock->data.block.stack_level,
3827 thisblock->data.block.first_insn);
3828 stack_block_stack = thisblock;
3832 /* Generate RTL for the automatic variable declaration DECL.
3833 (Other kinds of declarations are simply ignored if seen here.) */
3835 void
3836 expand_decl (decl)
3837 tree decl;
3839 tree type;
3841 type = TREE_TYPE (decl);
3843 /* For a CONST_DECL, set mode, alignment, and sizes from those of the
3844 type in case this node is used in a reference. */
3845 if (TREE_CODE (decl) == CONST_DECL)
3847 DECL_MODE (decl) = TYPE_MODE (type);
3848 DECL_ALIGN (decl) = TYPE_ALIGN (type);
3849 DECL_SIZE (decl) = TYPE_SIZE (type);
3850 DECL_SIZE_UNIT (decl) = TYPE_SIZE_UNIT (type);
3851 return;
3854 /* Otherwise, only automatic variables need any expansion done. Static and
3855 external variables, and external functions, will be handled by
3856 `assemble_variable' (called from finish_decl). TYPE_DECL requires
3857 nothing. PARM_DECLs are handled in `assign_parms'. */
3858 if (TREE_CODE (decl) != VAR_DECL)
3859 return;
3861 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
3862 return;
3864 /* Create the RTL representation for the variable. */
3866 if (type == error_mark_node)
3867 SET_DECL_RTL (decl, gen_rtx_MEM (BLKmode, const0_rtx));
3869 else if (DECL_SIZE (decl) == 0)
3870 /* Variable with incomplete type. */
3872 rtx x;
3873 if (DECL_INITIAL (decl) == 0)
3874 /* Error message was already done; now avoid a crash. */
3875 x = gen_rtx_MEM (BLKmode, const0_rtx);
3876 else
3877 /* An initializer is going to decide the size of this array.
3878 Until we know the size, represent its address with a reg. */
3879 x = gen_rtx_MEM (BLKmode, gen_reg_rtx (Pmode));
3881 set_mem_attributes (x, decl, 1);
3882 SET_DECL_RTL (decl, x);
3884 else if (DECL_MODE (decl) != BLKmode
3885 /* If -ffloat-store, don't put explicit float vars
3886 into regs. */
3887 && !(flag_float_store
3888 && TREE_CODE (type) == REAL_TYPE)
3889 && ! TREE_THIS_VOLATILE (decl)
3890 && (DECL_REGISTER (decl) || optimize))
3892 /* Automatic variable that can go in a register. */
3893 int unsignedp = TREE_UNSIGNED (type);
3894 enum machine_mode reg_mode
3895 = promote_mode (type, DECL_MODE (decl), &unsignedp, 0);
3897 SET_DECL_RTL (decl, gen_reg_rtx (reg_mode));
3899 if (GET_CODE (DECL_RTL (decl)) == REG)
3900 REGNO_DECL (REGNO (DECL_RTL (decl))) = decl;
3901 else if (GET_CODE (DECL_RTL (decl)) == CONCAT)
3903 REGNO_DECL (REGNO (XEXP (DECL_RTL (decl), 0))) = decl;
3904 REGNO_DECL (REGNO (XEXP (DECL_RTL (decl), 1))) = decl;
3907 mark_user_reg (DECL_RTL (decl));
3909 if (POINTER_TYPE_P (type))
3910 mark_reg_pointer (DECL_RTL (decl),
3911 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (decl))));
3913 maybe_set_unchanging (DECL_RTL (decl), decl);
3915 /* If something wants our address, try to use ADDRESSOF. */
3916 if (TREE_ADDRESSABLE (decl))
3917 put_var_into_stack (decl);
3920 else if (TREE_CODE (DECL_SIZE_UNIT (decl)) == INTEGER_CST
3921 && ! (flag_stack_check && ! STACK_CHECK_BUILTIN
3922 && 0 < compare_tree_int (DECL_SIZE_UNIT (decl),
3923 STACK_CHECK_MAX_VAR_SIZE)))
3925 /* Variable of fixed size that goes on the stack. */
3926 rtx oldaddr = 0;
3927 rtx addr;
3928 rtx x;
3930 /* If we previously made RTL for this decl, it must be an array
3931 whose size was determined by the initializer.
3932 The old address was a register; set that register now
3933 to the proper address. */
3934 if (DECL_RTL_SET_P (decl))
3936 if (GET_CODE (DECL_RTL (decl)) != MEM
3937 || GET_CODE (XEXP (DECL_RTL (decl), 0)) != REG)
3938 abort ();
3939 oldaddr = XEXP (DECL_RTL (decl), 0);
3942 /* Set alignment we actually gave this decl. */
3943 DECL_ALIGN (decl) = (DECL_MODE (decl) == BLKmode ? BIGGEST_ALIGNMENT
3944 : GET_MODE_BITSIZE (DECL_MODE (decl)));
3945 DECL_USER_ALIGN (decl) = 0;
3947 x = assign_temp (decl, 1, 1, 1);
3948 set_mem_attributes (x, decl, 1);
3949 SET_DECL_RTL (decl, x);
3951 if (oldaddr)
3953 addr = force_operand (XEXP (DECL_RTL (decl), 0), oldaddr);
3954 if (addr != oldaddr)
3955 emit_move_insn (oldaddr, addr);
3958 else
3959 /* Dynamic-size object: must push space on the stack. */
3961 rtx address, size, x;
3963 /* Record the stack pointer on entry to block, if have
3964 not already done so. */
3965 do_pending_stack_adjust ();
3966 save_stack_pointer ();
3968 /* In function-at-a-time mode, variable_size doesn't expand this,
3969 so do it now. */
3970 if (TREE_CODE (type) == ARRAY_TYPE && TYPE_DOMAIN (type))
3971 expand_expr (TYPE_MAX_VALUE (TYPE_DOMAIN (type)),
3972 const0_rtx, VOIDmode, 0);
3974 /* Compute the variable's size, in bytes. */
3975 size = expand_expr (DECL_SIZE_UNIT (decl), NULL_RTX, VOIDmode, 0);
3976 free_temp_slots ();
3978 /* Allocate space on the stack for the variable. Note that
3979 DECL_ALIGN says how the variable is to be aligned and we
3980 cannot use it to conclude anything about the alignment of
3981 the size. */
3982 address = allocate_dynamic_stack_space (size, NULL_RTX,
3983 TYPE_ALIGN (TREE_TYPE (decl)));
3985 /* Reference the variable indirect through that rtx. */
3986 x = gen_rtx_MEM (DECL_MODE (decl), address);
3987 set_mem_attributes (x, decl, 1);
3988 SET_DECL_RTL (decl, x);
3991 /* Indicate the alignment we actually gave this variable. */
3992 #ifdef STACK_BOUNDARY
3993 DECL_ALIGN (decl) = STACK_BOUNDARY;
3994 #else
3995 DECL_ALIGN (decl) = BIGGEST_ALIGNMENT;
3996 #endif
3997 DECL_USER_ALIGN (decl) = 0;
4001 /* Emit code to perform the initialization of a declaration DECL. */
4003 void
4004 expand_decl_init (decl)
4005 tree decl;
4007 int was_used = TREE_USED (decl);
4009 /* If this is a CONST_DECL, we don't have to generate any code. Likewise
4010 for static decls. */
4011 if (TREE_CODE (decl) == CONST_DECL
4012 || TREE_STATIC (decl))
4013 return;
4015 /* Compute and store the initial value now. */
4017 if (DECL_INITIAL (decl) == error_mark_node)
4019 enum tree_code code = TREE_CODE (TREE_TYPE (decl));
4021 if (code == INTEGER_TYPE || code == REAL_TYPE || code == ENUMERAL_TYPE
4022 || code == POINTER_TYPE || code == REFERENCE_TYPE)
4023 expand_assignment (decl, convert (TREE_TYPE (decl), integer_zero_node),
4024 0, 0);
4025 emit_queue ();
4027 else if (DECL_INITIAL (decl) && TREE_CODE (DECL_INITIAL (decl)) != TREE_LIST)
4029 emit_line_note (DECL_SOURCE_FILE (decl), DECL_SOURCE_LINE (decl));
4030 expand_assignment (decl, DECL_INITIAL (decl), 0, 0);
4031 emit_queue ();
4034 /* Don't let the initialization count as "using" the variable. */
4035 TREE_USED (decl) = was_used;
4037 /* Free any temporaries we made while initializing the decl. */
4038 preserve_temp_slots (NULL_RTX);
4039 free_temp_slots ();
4042 /* CLEANUP is an expression to be executed at exit from this binding contour;
4043 for example, in C++, it might call the destructor for this variable.
4045 We wrap CLEANUP in an UNSAVE_EXPR node, so that we can expand the
4046 CLEANUP multiple times, and have the correct semantics. This
4047 happens in exception handling, for gotos, returns, breaks that
4048 leave the current scope.
4050 If CLEANUP is nonzero and DECL is zero, we record a cleanup
4051 that is not associated with any particular variable. */
4054 expand_decl_cleanup (decl, cleanup)
4055 tree decl, cleanup;
4057 struct nesting *thisblock;
4059 /* Error if we are not in any block. */
4060 if (cfun == 0 || block_stack == 0)
4061 return 0;
4063 thisblock = block_stack;
4065 /* Record the cleanup if there is one. */
4067 if (cleanup != 0)
4069 tree t;
4070 rtx seq;
4071 tree *cleanups = &thisblock->data.block.cleanups;
4072 int cond_context = conditional_context ();
4074 if (cond_context)
4076 rtx flag = gen_reg_rtx (word_mode);
4077 rtx set_flag_0;
4078 tree cond;
4080 start_sequence ();
4081 emit_move_insn (flag, const0_rtx);
4082 set_flag_0 = get_insns ();
4083 end_sequence ();
4085 thisblock->data.block.last_unconditional_cleanup
4086 = emit_insn_after (set_flag_0,
4087 thisblock->data.block.last_unconditional_cleanup);
4089 emit_move_insn (flag, const1_rtx);
4091 cond = build_decl (VAR_DECL, NULL_TREE,
4092 (*lang_hooks.types.type_for_mode) (word_mode, 1));
4093 SET_DECL_RTL (cond, flag);
4095 /* Conditionalize the cleanup. */
4096 cleanup = build (COND_EXPR, void_type_node,
4097 (*lang_hooks.truthvalue_conversion) (cond),
4098 cleanup, integer_zero_node);
4099 cleanup = fold (cleanup);
4101 cleanups = &thisblock->data.block.cleanups;
4104 cleanup = unsave_expr (cleanup);
4106 t = *cleanups = tree_cons (decl, cleanup, *cleanups);
4108 if (! cond_context)
4109 /* If this block has a cleanup, it belongs in stack_block_stack. */
4110 stack_block_stack = thisblock;
4112 if (cond_context)
4114 start_sequence ();
4117 if (! using_eh_for_cleanups_p)
4118 TREE_ADDRESSABLE (t) = 1;
4119 else
4120 expand_eh_region_start ();
4122 if (cond_context)
4124 seq = get_insns ();
4125 end_sequence ();
4126 if (seq)
4127 thisblock->data.block.last_unconditional_cleanup
4128 = emit_insn_after (seq,
4129 thisblock->data.block.last_unconditional_cleanup);
4131 else
4133 thisblock->data.block.last_unconditional_cleanup
4134 = get_last_insn ();
4135 /* When we insert instructions after the last unconditional cleanup,
4136 we don't adjust last_insn. That means that a later add_insn will
4137 clobber the instructions we've just added. The easiest way to
4138 fix this is to just insert another instruction here, so that the
4139 instructions inserted after the last unconditional cleanup are
4140 never the last instruction. */
4141 emit_note (NULL, NOTE_INSN_DELETED);
4144 return 1;
4147 /* Like expand_decl_cleanup, but maybe only run the cleanup if an exception
4148 is thrown. */
4151 expand_decl_cleanup_eh (decl, cleanup, eh_only)
4152 tree decl, cleanup;
4153 int eh_only;
4155 int ret = expand_decl_cleanup (decl, cleanup);
4156 if (cleanup && ret)
4158 tree node = block_stack->data.block.cleanups;
4159 CLEANUP_EH_ONLY (node) = eh_only;
4161 return ret;
4164 /* DECL is an anonymous union. CLEANUP is a cleanup for DECL.
4165 DECL_ELTS is the list of elements that belong to DECL's type.
4166 In each, the TREE_VALUE is a VAR_DECL, and the TREE_PURPOSE a cleanup. */
4168 void
4169 expand_anon_union_decl (decl, cleanup, decl_elts)
4170 tree decl, cleanup, decl_elts;
4172 struct nesting *thisblock = cfun == 0 ? 0 : block_stack;
4173 rtx x;
4174 tree t;
4176 /* If any of the elements are addressable, so is the entire union. */
4177 for (t = decl_elts; t; t = TREE_CHAIN (t))
4178 if (TREE_ADDRESSABLE (TREE_VALUE (t)))
4180 TREE_ADDRESSABLE (decl) = 1;
4181 break;
4184 expand_decl (decl);
4185 expand_decl_cleanup (decl, cleanup);
4186 x = DECL_RTL (decl);
4188 /* Go through the elements, assigning RTL to each. */
4189 for (t = decl_elts; t; t = TREE_CHAIN (t))
4191 tree decl_elt = TREE_VALUE (t);
4192 tree cleanup_elt = TREE_PURPOSE (t);
4193 enum machine_mode mode = TYPE_MODE (TREE_TYPE (decl_elt));
4195 /* If any of the elements are addressable, so is the entire
4196 union. */
4197 if (TREE_USED (decl_elt))
4198 TREE_USED (decl) = 1;
4200 /* Propagate the union's alignment to the elements. */
4201 DECL_ALIGN (decl_elt) = DECL_ALIGN (decl);
4202 DECL_USER_ALIGN (decl_elt) = DECL_USER_ALIGN (decl);
4204 /* If the element has BLKmode and the union doesn't, the union is
4205 aligned such that the element doesn't need to have BLKmode, so
4206 change the element's mode to the appropriate one for its size. */
4207 if (mode == BLKmode && DECL_MODE (decl) != BLKmode)
4208 DECL_MODE (decl_elt) = mode
4209 = mode_for_size_tree (DECL_SIZE (decl_elt), MODE_INT, 1);
4211 /* (SUBREG (MEM ...)) at RTL generation time is invalid, so we
4212 instead create a new MEM rtx with the proper mode. */
4213 if (GET_CODE (x) == MEM)
4215 if (mode == GET_MODE (x))
4216 SET_DECL_RTL (decl_elt, x);
4217 else
4218 SET_DECL_RTL (decl_elt, adjust_address_nv (x, mode, 0));
4220 else if (GET_CODE (x) == REG)
4222 if (mode == GET_MODE (x))
4223 SET_DECL_RTL (decl_elt, x);
4224 else
4225 SET_DECL_RTL (decl_elt, gen_lowpart_SUBREG (mode, x));
4227 else
4228 abort ();
4230 /* Record the cleanup if there is one. */
4232 if (cleanup != 0)
4233 thisblock->data.block.cleanups
4234 = tree_cons (decl_elt, cleanup_elt,
4235 thisblock->data.block.cleanups);
4239 /* Expand a list of cleanups LIST.
4240 Elements may be expressions or may be nested lists.
4242 If DONT_DO is nonnull, then any list-element
4243 whose TREE_PURPOSE matches DONT_DO is omitted.
4244 This is sometimes used to avoid a cleanup associated with
4245 a value that is being returned out of the scope.
4247 If IN_FIXUP is nonzero, we are generating this cleanup for a fixup
4248 goto and handle protection regions specially in that case.
4250 If REACHABLE, we emit code, otherwise just inform the exception handling
4251 code about this finalization. */
4253 static void
4254 expand_cleanups (list, dont_do, in_fixup, reachable)
4255 tree list;
4256 tree dont_do;
4257 int in_fixup;
4258 int reachable;
4260 tree tail;
4261 for (tail = list; tail; tail = TREE_CHAIN (tail))
4262 if (dont_do == 0 || TREE_PURPOSE (tail) != dont_do)
4264 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
4265 expand_cleanups (TREE_VALUE (tail), dont_do, in_fixup, reachable);
4266 else
4268 if (! in_fixup && using_eh_for_cleanups_p)
4269 expand_eh_region_end_cleanup (TREE_VALUE (tail));
4271 if (reachable && !CLEANUP_EH_ONLY (tail))
4273 /* Cleanups may be run multiple times. For example,
4274 when exiting a binding contour, we expand the
4275 cleanups associated with that contour. When a goto
4276 within that binding contour has a target outside that
4277 contour, it will expand all cleanups from its scope to
4278 the target. Though the cleanups are expanded multiple
4279 times, the control paths are non-overlapping so the
4280 cleanups will not be executed twice. */
4282 /* We may need to protect from outer cleanups. */
4283 if (in_fixup && using_eh_for_cleanups_p)
4285 expand_eh_region_start ();
4287 expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
4289 expand_eh_region_end_fixup (TREE_VALUE (tail));
4291 else
4292 expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
4294 free_temp_slots ();
4300 /* Mark when the context we are emitting RTL for as a conditional
4301 context, so that any cleanup actions we register with
4302 expand_decl_init will be properly conditionalized when those
4303 cleanup actions are later performed. Must be called before any
4304 expression (tree) is expanded that is within a conditional context. */
4306 void
4307 start_cleanup_deferral ()
4309 /* block_stack can be NULL if we are inside the parameter list. It is
4310 OK to do nothing, because cleanups aren't possible here. */
4311 if (block_stack)
4312 ++block_stack->data.block.conditional_code;
4315 /* Mark the end of a conditional region of code. Because cleanup
4316 deferrals may be nested, we may still be in a conditional region
4317 after we end the currently deferred cleanups, only after we end all
4318 deferred cleanups, are we back in unconditional code. */
4320 void
4321 end_cleanup_deferral ()
4323 /* block_stack can be NULL if we are inside the parameter list. It is
4324 OK to do nothing, because cleanups aren't possible here. */
4325 if (block_stack)
4326 --block_stack->data.block.conditional_code;
4329 tree
4330 last_cleanup_this_contour ()
4332 if (block_stack == 0)
4333 return 0;
4335 return block_stack->data.block.cleanups;
4338 /* Return 1 if there are any pending cleanups at this point.
4339 If THIS_CONTOUR is nonzero, check the current contour as well.
4340 Otherwise, look only at the contours that enclose this one. */
4343 any_pending_cleanups (this_contour)
4344 int this_contour;
4346 struct nesting *block;
4348 if (cfun == NULL || cfun->stmt == NULL || block_stack == 0)
4349 return 0;
4351 if (this_contour && block_stack->data.block.cleanups != NULL)
4352 return 1;
4353 if (block_stack->data.block.cleanups == 0
4354 && block_stack->data.block.outer_cleanups == 0)
4355 return 0;
4357 for (block = block_stack->next; block; block = block->next)
4358 if (block->data.block.cleanups != 0)
4359 return 1;
4361 return 0;
4364 /* Enter a case (Pascal) or switch (C) statement.
4365 Push a block onto case_stack and nesting_stack
4366 to accumulate the case-labels that are seen
4367 and to record the labels generated for the statement.
4369 EXIT_FLAG is nonzero if `exit_something' should exit this case stmt.
4370 Otherwise, this construct is transparent for `exit_something'.
4372 EXPR is the index-expression to be dispatched on.
4373 TYPE is its nominal type. We could simply convert EXPR to this type,
4374 but instead we take short cuts. */
4376 void
4377 expand_start_case (exit_flag, expr, type, printname)
4378 int exit_flag;
4379 tree expr;
4380 tree type;
4381 const char *printname;
4383 struct nesting *thiscase = ALLOC_NESTING ();
4385 /* Make an entry on case_stack for the case we are entering. */
4387 thiscase->desc = CASE_NESTING;
4388 thiscase->next = case_stack;
4389 thiscase->all = nesting_stack;
4390 thiscase->depth = ++nesting_depth;
4391 thiscase->exit_label = exit_flag ? gen_label_rtx () : 0;
4392 thiscase->data.case_stmt.case_list = 0;
4393 thiscase->data.case_stmt.index_expr = expr;
4394 thiscase->data.case_stmt.nominal_type = type;
4395 thiscase->data.case_stmt.default_label = 0;
4396 thiscase->data.case_stmt.printname = printname;
4397 thiscase->data.case_stmt.line_number_status = force_line_numbers ();
4398 case_stack = thiscase;
4399 nesting_stack = thiscase;
4401 do_pending_stack_adjust ();
4403 /* Make sure case_stmt.start points to something that won't
4404 need any transformation before expand_end_case. */
4405 if (GET_CODE (get_last_insn ()) != NOTE)
4406 emit_note (NULL, NOTE_INSN_DELETED);
4408 thiscase->data.case_stmt.start = get_last_insn ();
4410 start_cleanup_deferral ();
4413 /* Start a "dummy case statement" within which case labels are invalid
4414 and are not connected to any larger real case statement.
4415 This can be used if you don't want to let a case statement jump
4416 into the middle of certain kinds of constructs. */
4418 void
4419 expand_start_case_dummy ()
4421 struct nesting *thiscase = ALLOC_NESTING ();
4423 /* Make an entry on case_stack for the dummy. */
4425 thiscase->desc = CASE_NESTING;
4426 thiscase->next = case_stack;
4427 thiscase->all = nesting_stack;
4428 thiscase->depth = ++nesting_depth;
4429 thiscase->exit_label = 0;
4430 thiscase->data.case_stmt.case_list = 0;
4431 thiscase->data.case_stmt.start = 0;
4432 thiscase->data.case_stmt.nominal_type = 0;
4433 thiscase->data.case_stmt.default_label = 0;
4434 case_stack = thiscase;
4435 nesting_stack = thiscase;
4436 start_cleanup_deferral ();
4439 static void
4440 check_seenlabel ()
4442 /* If this is the first label, warn if any insns have been emitted. */
4443 if (case_stack->data.case_stmt.line_number_status >= 0)
4445 rtx insn;
4447 restore_line_number_status
4448 (case_stack->data.case_stmt.line_number_status);
4449 case_stack->data.case_stmt.line_number_status = -1;
4451 for (insn = case_stack->data.case_stmt.start;
4452 insn;
4453 insn = NEXT_INSN (insn))
4455 if (GET_CODE (insn) == CODE_LABEL)
4456 break;
4457 if (GET_CODE (insn) != NOTE
4458 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn)) != USE))
4461 insn = PREV_INSN (insn);
4462 while (insn && (GET_CODE (insn) != NOTE || NOTE_LINE_NUMBER (insn) < 0));
4464 /* If insn is zero, then there must have been a syntax error. */
4465 if (insn)
4466 warning_with_file_and_line (NOTE_SOURCE_FILE (insn),
4467 NOTE_LINE_NUMBER (insn),
4468 "unreachable code at beginning of %s",
4469 case_stack->data.case_stmt.printname);
4470 break;
4476 /* Accumulate one case or default label inside a case or switch statement.
4477 VALUE is the value of the case (a null pointer, for a default label).
4478 The function CONVERTER, when applied to arguments T and V,
4479 converts the value V to the type T.
4481 If not currently inside a case or switch statement, return 1 and do
4482 nothing. The caller will print a language-specific error message.
4483 If VALUE is a duplicate or overlaps, return 2 and do nothing
4484 except store the (first) duplicate node in *DUPLICATE.
4485 If VALUE is out of range, return 3 and do nothing.
4486 If we are jumping into the scope of a cleanup or var-sized array, return 5.
4487 Return 0 on success.
4489 Extended to handle range statements. */
4492 pushcase (value, converter, label, duplicate)
4493 tree value;
4494 tree (*converter) PARAMS ((tree, tree));
4495 tree label;
4496 tree *duplicate;
4498 tree index_type;
4499 tree nominal_type;
4501 /* Fail if not inside a real case statement. */
4502 if (! (case_stack && case_stack->data.case_stmt.start))
4503 return 1;
4505 if (stack_block_stack
4506 && stack_block_stack->depth > case_stack->depth)
4507 return 5;
4509 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4510 nominal_type = case_stack->data.case_stmt.nominal_type;
4512 /* If the index is erroneous, avoid more problems: pretend to succeed. */
4513 if (index_type == error_mark_node)
4514 return 0;
4516 /* Convert VALUE to the type in which the comparisons are nominally done. */
4517 if (value != 0)
4518 value = (*converter) (nominal_type, value);
4520 check_seenlabel ();
4522 /* Fail if this value is out of range for the actual type of the index
4523 (which may be narrower than NOMINAL_TYPE). */
4524 if (value != 0
4525 && (TREE_CONSTANT_OVERFLOW (value)
4526 || ! int_fits_type_p (value, index_type)))
4527 return 3;
4529 return add_case_node (value, value, label, duplicate);
4532 /* Like pushcase but this case applies to all values between VALUE1 and
4533 VALUE2 (inclusive). If VALUE1 is NULL, the range starts at the lowest
4534 value of the index type and ends at VALUE2. If VALUE2 is NULL, the range
4535 starts at VALUE1 and ends at the highest value of the index type.
4536 If both are NULL, this case applies to all values.
4538 The return value is the same as that of pushcase but there is one
4539 additional error code: 4 means the specified range was empty. */
4542 pushcase_range (value1, value2, converter, label, duplicate)
4543 tree value1, value2;
4544 tree (*converter) PARAMS ((tree, tree));
4545 tree label;
4546 tree *duplicate;
4548 tree index_type;
4549 tree nominal_type;
4551 /* Fail if not inside a real case statement. */
4552 if (! (case_stack && case_stack->data.case_stmt.start))
4553 return 1;
4555 if (stack_block_stack
4556 && stack_block_stack->depth > case_stack->depth)
4557 return 5;
4559 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4560 nominal_type = case_stack->data.case_stmt.nominal_type;
4562 /* If the index is erroneous, avoid more problems: pretend to succeed. */
4563 if (index_type == error_mark_node)
4564 return 0;
4566 check_seenlabel ();
4568 /* Convert VALUEs to type in which the comparisons are nominally done
4569 and replace any unspecified value with the corresponding bound. */
4570 if (value1 == 0)
4571 value1 = TYPE_MIN_VALUE (index_type);
4572 if (value2 == 0)
4573 value2 = TYPE_MAX_VALUE (index_type);
4575 /* Fail if the range is empty. Do this before any conversion since
4576 we want to allow out-of-range empty ranges. */
4577 if (value2 != 0 && tree_int_cst_lt (value2, value1))
4578 return 4;
4580 /* If the max was unbounded, use the max of the nominal_type we are
4581 converting to. Do this after the < check above to suppress false
4582 positives. */
4583 if (value2 == 0)
4584 value2 = TYPE_MAX_VALUE (nominal_type);
4586 value1 = (*converter) (nominal_type, value1);
4587 value2 = (*converter) (nominal_type, value2);
4589 /* Fail if these values are out of range. */
4590 if (TREE_CONSTANT_OVERFLOW (value1)
4591 || ! int_fits_type_p (value1, index_type))
4592 return 3;
4594 if (TREE_CONSTANT_OVERFLOW (value2)
4595 || ! int_fits_type_p (value2, index_type))
4596 return 3;
4598 return add_case_node (value1, value2, label, duplicate);
4601 /* Do the actual insertion of a case label for pushcase and pushcase_range
4602 into case_stack->data.case_stmt.case_list. Use an AVL tree to avoid
4603 slowdown for large switch statements. */
4606 add_case_node (low, high, label, duplicate)
4607 tree low, high;
4608 tree label;
4609 tree *duplicate;
4611 struct case_node *p, **q, *r;
4613 /* If there's no HIGH value, then this is not a case range; it's
4614 just a simple case label. But that's just a degenerate case
4615 range. */
4616 if (!high)
4617 high = low;
4619 /* Handle default labels specially. */
4620 if (!high && !low)
4622 if (case_stack->data.case_stmt.default_label != 0)
4624 *duplicate = case_stack->data.case_stmt.default_label;
4625 return 2;
4627 case_stack->data.case_stmt.default_label = label;
4628 expand_label (label);
4629 return 0;
4632 q = &case_stack->data.case_stmt.case_list;
4633 p = *q;
4635 while ((r = *q))
4637 p = r;
4639 /* Keep going past elements distinctly greater than HIGH. */
4640 if (tree_int_cst_lt (high, p->low))
4641 q = &p->left;
4643 /* or distinctly less than LOW. */
4644 else if (tree_int_cst_lt (p->high, low))
4645 q = &p->right;
4647 else
4649 /* We have an overlap; this is an error. */
4650 *duplicate = p->code_label;
4651 return 2;
4655 /* Add this label to the chain, and succeed. */
4657 r = (struct case_node *) ggc_alloc (sizeof (struct case_node));
4658 r->low = low;
4660 /* If the bounds are equal, turn this into the one-value case. */
4661 if (tree_int_cst_equal (low, high))
4662 r->high = r->low;
4663 else
4664 r->high = high;
4666 r->code_label = label;
4667 expand_label (label);
4669 *q = r;
4670 r->parent = p;
4671 r->left = 0;
4672 r->right = 0;
4673 r->balance = 0;
4675 while (p)
4677 struct case_node *s;
4679 if (r == p->left)
4681 int b;
4683 if (! (b = p->balance))
4684 /* Growth propagation from left side. */
4685 p->balance = -1;
4686 else if (b < 0)
4688 if (r->balance < 0)
4690 /* R-Rotation */
4691 if ((p->left = s = r->right))
4692 s->parent = p;
4694 r->right = p;
4695 p->balance = 0;
4696 r->balance = 0;
4697 s = p->parent;
4698 p->parent = r;
4700 if ((r->parent = s))
4702 if (s->left == p)
4703 s->left = r;
4704 else
4705 s->right = r;
4707 else
4708 case_stack->data.case_stmt.case_list = r;
4710 else
4711 /* r->balance == +1 */
4713 /* LR-Rotation */
4715 int b2;
4716 struct case_node *t = r->right;
4718 if ((p->left = s = t->right))
4719 s->parent = p;
4721 t->right = p;
4722 if ((r->right = s = t->left))
4723 s->parent = r;
4725 t->left = r;
4726 b = t->balance;
4727 b2 = b < 0;
4728 p->balance = b2;
4729 b2 = -b2 - b;
4730 r->balance = b2;
4731 t->balance = 0;
4732 s = p->parent;
4733 p->parent = t;
4734 r->parent = t;
4736 if ((t->parent = s))
4738 if (s->left == p)
4739 s->left = t;
4740 else
4741 s->right = t;
4743 else
4744 case_stack->data.case_stmt.case_list = t;
4746 break;
4749 else
4751 /* p->balance == +1; growth of left side balances the node. */
4752 p->balance = 0;
4753 break;
4756 else
4757 /* r == p->right */
4759 int b;
4761 if (! (b = p->balance))
4762 /* Growth propagation from right side. */
4763 p->balance++;
4764 else if (b > 0)
4766 if (r->balance > 0)
4768 /* L-Rotation */
4770 if ((p->right = s = r->left))
4771 s->parent = p;
4773 r->left = p;
4774 p->balance = 0;
4775 r->balance = 0;
4776 s = p->parent;
4777 p->parent = r;
4778 if ((r->parent = s))
4780 if (s->left == p)
4781 s->left = r;
4782 else
4783 s->right = r;
4786 else
4787 case_stack->data.case_stmt.case_list = r;
4790 else
4791 /* r->balance == -1 */
4793 /* RL-Rotation */
4794 int b2;
4795 struct case_node *t = r->left;
4797 if ((p->right = s = t->left))
4798 s->parent = p;
4800 t->left = p;
4802 if ((r->left = s = t->right))
4803 s->parent = r;
4805 t->right = r;
4806 b = t->balance;
4807 b2 = b < 0;
4808 r->balance = b2;
4809 b2 = -b2 - b;
4810 p->balance = b2;
4811 t->balance = 0;
4812 s = p->parent;
4813 p->parent = t;
4814 r->parent = t;
4816 if ((t->parent = s))
4818 if (s->left == p)
4819 s->left = t;
4820 else
4821 s->right = t;
4824 else
4825 case_stack->data.case_stmt.case_list = t;
4827 break;
4829 else
4831 /* p->balance == -1; growth of right side balances the node. */
4832 p->balance = 0;
4833 break;
4837 r = p;
4838 p = p->parent;
4841 return 0;
4844 /* Returns the number of possible values of TYPE.
4845 Returns -1 if the number is unknown, variable, or if the number does not
4846 fit in a HOST_WIDE_INT.
4847 Sets *SPARSENESS to 2 if TYPE is an ENUMERAL_TYPE whose values
4848 do not increase monotonically (there may be duplicates);
4849 to 1 if the values increase monotonically, but not always by 1;
4850 otherwise sets it to 0. */
4852 HOST_WIDE_INT
4853 all_cases_count (type, sparseness)
4854 tree type;
4855 int *sparseness;
4857 tree t;
4858 HOST_WIDE_INT count, minval, lastval;
4860 *sparseness = 0;
4862 switch (TREE_CODE (type))
4864 case BOOLEAN_TYPE:
4865 count = 2;
4866 break;
4868 case CHAR_TYPE:
4869 count = 1 << BITS_PER_UNIT;
4870 break;
4872 default:
4873 case INTEGER_TYPE:
4874 if (TYPE_MAX_VALUE (type) != 0
4875 && 0 != (t = fold (build (MINUS_EXPR, type, TYPE_MAX_VALUE (type),
4876 TYPE_MIN_VALUE (type))))
4877 && 0 != (t = fold (build (PLUS_EXPR, type, t,
4878 convert (type, integer_zero_node))))
4879 && host_integerp (t, 1))
4880 count = tree_low_cst (t, 1);
4881 else
4882 return -1;
4883 break;
4885 case ENUMERAL_TYPE:
4886 /* Don't waste time with enumeral types with huge values. */
4887 if (! host_integerp (TYPE_MIN_VALUE (type), 0)
4888 || TYPE_MAX_VALUE (type) == 0
4889 || ! host_integerp (TYPE_MAX_VALUE (type), 0))
4890 return -1;
4892 lastval = minval = tree_low_cst (TYPE_MIN_VALUE (type), 0);
4893 count = 0;
4895 for (t = TYPE_VALUES (type); t != NULL_TREE; t = TREE_CHAIN (t))
4897 HOST_WIDE_INT thisval = tree_low_cst (TREE_VALUE (t), 0);
4899 if (*sparseness == 2 || thisval <= lastval)
4900 *sparseness = 2;
4901 else if (thisval != minval + count)
4902 *sparseness = 1;
4904 lastval = thisval;
4905 count++;
4909 return count;
4912 #define BITARRAY_TEST(ARRAY, INDEX) \
4913 ((ARRAY)[(unsigned) (INDEX) / HOST_BITS_PER_CHAR]\
4914 & (1 << ((unsigned) (INDEX) % HOST_BITS_PER_CHAR)))
4915 #define BITARRAY_SET(ARRAY, INDEX) \
4916 ((ARRAY)[(unsigned) (INDEX) / HOST_BITS_PER_CHAR]\
4917 |= 1 << ((unsigned) (INDEX) % HOST_BITS_PER_CHAR))
4919 /* Set the elements of the bitstring CASES_SEEN (which has length COUNT),
4920 with the case values we have seen, assuming the case expression
4921 has the given TYPE.
4922 SPARSENESS is as determined by all_cases_count.
4924 The time needed is proportional to COUNT, unless
4925 SPARSENESS is 2, in which case quadratic time is needed. */
4927 void
4928 mark_seen_cases (type, cases_seen, count, sparseness)
4929 tree type;
4930 unsigned char *cases_seen;
4931 HOST_WIDE_INT count;
4932 int sparseness;
4934 tree next_node_to_try = NULL_TREE;
4935 HOST_WIDE_INT next_node_offset = 0;
4937 struct case_node *n, *root = case_stack->data.case_stmt.case_list;
4938 tree val = make_node (INTEGER_CST);
4940 TREE_TYPE (val) = type;
4941 if (! root)
4942 /* Do nothing. */
4944 else if (sparseness == 2)
4946 tree t;
4947 unsigned HOST_WIDE_INT xlo;
4949 /* This less efficient loop is only needed to handle
4950 duplicate case values (multiple enum constants
4951 with the same value). */
4952 TREE_TYPE (val) = TREE_TYPE (root->low);
4953 for (t = TYPE_VALUES (type), xlo = 0; t != NULL_TREE;
4954 t = TREE_CHAIN (t), xlo++)
4956 TREE_INT_CST_LOW (val) = TREE_INT_CST_LOW (TREE_VALUE (t));
4957 TREE_INT_CST_HIGH (val) = TREE_INT_CST_HIGH (TREE_VALUE (t));
4958 n = root;
4961 /* Keep going past elements distinctly greater than VAL. */
4962 if (tree_int_cst_lt (val, n->low))
4963 n = n->left;
4965 /* or distinctly less than VAL. */
4966 else if (tree_int_cst_lt (n->high, val))
4967 n = n->right;
4969 else
4971 /* We have found a matching range. */
4972 BITARRAY_SET (cases_seen, xlo);
4973 break;
4976 while (n);
4979 else
4981 if (root->left)
4982 case_stack->data.case_stmt.case_list = root = case_tree2list (root, 0);
4984 for (n = root; n; n = n->right)
4986 TREE_INT_CST_LOW (val) = TREE_INT_CST_LOW (n->low);
4987 TREE_INT_CST_HIGH (val) = TREE_INT_CST_HIGH (n->low);
4988 while (! tree_int_cst_lt (n->high, val))
4990 /* Calculate (into xlo) the "offset" of the integer (val).
4991 The element with lowest value has offset 0, the next smallest
4992 element has offset 1, etc. */
4994 unsigned HOST_WIDE_INT xlo;
4995 HOST_WIDE_INT xhi;
4996 tree t;
4998 if (sparseness && TYPE_VALUES (type) != NULL_TREE)
5000 /* The TYPE_VALUES will be in increasing order, so
5001 starting searching where we last ended. */
5002 t = next_node_to_try;
5003 xlo = next_node_offset;
5004 xhi = 0;
5005 for (;;)
5007 if (t == NULL_TREE)
5009 t = TYPE_VALUES (type);
5010 xlo = 0;
5012 if (tree_int_cst_equal (val, TREE_VALUE (t)))
5014 next_node_to_try = TREE_CHAIN (t);
5015 next_node_offset = xlo + 1;
5016 break;
5018 xlo++;
5019 t = TREE_CHAIN (t);
5020 if (t == next_node_to_try)
5022 xlo = -1;
5023 break;
5027 else
5029 t = TYPE_MIN_VALUE (type);
5030 if (t)
5031 neg_double (TREE_INT_CST_LOW (t), TREE_INT_CST_HIGH (t),
5032 &xlo, &xhi);
5033 else
5034 xlo = xhi = 0;
5035 add_double (xlo, xhi,
5036 TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
5037 &xlo, &xhi);
5040 if (xhi == 0 && xlo < (unsigned HOST_WIDE_INT) count)
5041 BITARRAY_SET (cases_seen, xlo);
5043 add_double (TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
5044 1, 0,
5045 &TREE_INT_CST_LOW (val), &TREE_INT_CST_HIGH (val));
5051 /* Given a switch statement with an expression that is an enumeration
5052 type, warn if any of the enumeration type's literals are not
5053 covered by the case expressions of the switch. Also, warn if there
5054 are any extra switch cases that are *not* elements of the
5055 enumerated type.
5057 Historical note:
5059 At one stage this function would: ``If all enumeration literals
5060 were covered by the case expressions, turn one of the expressions
5061 into the default expression since it should not be possible to fall
5062 through such a switch.''
5064 That code has since been removed as: ``This optimization is
5065 disabled because it causes valid programs to fail. ANSI C does not
5066 guarantee that an expression with enum type will have a value that
5067 is the same as one of the enumeration literals.'' */
5069 void
5070 check_for_full_enumeration_handling (type)
5071 tree type;
5073 struct case_node *n;
5074 tree chain;
5076 /* True iff the selector type is a numbered set mode. */
5077 int sparseness = 0;
5079 /* The number of possible selector values. */
5080 HOST_WIDE_INT size;
5082 /* For each possible selector value. a one iff it has been matched
5083 by a case value alternative. */
5084 unsigned char *cases_seen;
5086 /* The allocated size of cases_seen, in chars. */
5087 HOST_WIDE_INT bytes_needed;
5089 size = all_cases_count (type, &sparseness);
5090 bytes_needed = (size + HOST_BITS_PER_CHAR) / HOST_BITS_PER_CHAR;
5092 if (size > 0 && size < 600000
5093 /* We deliberately use calloc here, not cmalloc, so that we can suppress
5094 this optimization if we don't have enough memory rather than
5095 aborting, as xmalloc would do. */
5096 && (cases_seen =
5097 (unsigned char *) really_call_calloc (bytes_needed, 1)) != NULL)
5099 HOST_WIDE_INT i;
5100 tree v = TYPE_VALUES (type);
5102 /* The time complexity of this code is normally O(N), where
5103 N being the number of members in the enumerated type.
5104 However, if type is an ENUMERAL_TYPE whose values do not
5105 increase monotonically, O(N*log(N)) time may be needed. */
5107 mark_seen_cases (type, cases_seen, size, sparseness);
5109 for (i = 0; v != NULL_TREE && i < size; i++, v = TREE_CHAIN (v))
5110 if (BITARRAY_TEST (cases_seen, i) == 0)
5111 warning ("enumeration value `%s' not handled in switch",
5112 IDENTIFIER_POINTER (TREE_PURPOSE (v)));
5114 free (cases_seen);
5117 /* Now we go the other way around; we warn if there are case
5118 expressions that don't correspond to enumerators. This can
5119 occur since C and C++ don't enforce type-checking of
5120 assignments to enumeration variables. */
5122 if (case_stack->data.case_stmt.case_list
5123 && case_stack->data.case_stmt.case_list->left)
5124 case_stack->data.case_stmt.case_list
5125 = case_tree2list (case_stack->data.case_stmt.case_list, 0);
5126 for (n = case_stack->data.case_stmt.case_list; n; n = n->right)
5128 for (chain = TYPE_VALUES (type);
5129 chain && !tree_int_cst_equal (n->low, TREE_VALUE (chain));
5130 chain = TREE_CHAIN (chain))
5133 if (!chain)
5135 if (TYPE_NAME (type) == 0)
5136 warning ("case value `%ld' not in enumerated type",
5137 (long) TREE_INT_CST_LOW (n->low));
5138 else
5139 warning ("case value `%ld' not in enumerated type `%s'",
5140 (long) TREE_INT_CST_LOW (n->low),
5141 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
5142 == IDENTIFIER_NODE)
5143 ? TYPE_NAME (type)
5144 : DECL_NAME (TYPE_NAME (type))));
5146 if (!tree_int_cst_equal (n->low, n->high))
5148 for (chain = TYPE_VALUES (type);
5149 chain && !tree_int_cst_equal (n->high, TREE_VALUE (chain));
5150 chain = TREE_CHAIN (chain))
5153 if (!chain)
5155 if (TYPE_NAME (type) == 0)
5156 warning ("case value `%ld' not in enumerated type",
5157 (long) TREE_INT_CST_LOW (n->high));
5158 else
5159 warning ("case value `%ld' not in enumerated type `%s'",
5160 (long) TREE_INT_CST_LOW (n->high),
5161 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
5162 == IDENTIFIER_NODE)
5163 ? TYPE_NAME (type)
5164 : DECL_NAME (TYPE_NAME (type))));
5172 /* Terminate a case (Pascal) or switch (C) statement
5173 in which ORIG_INDEX is the expression to be tested.
5174 If ORIG_TYPE is not NULL, it is the original ORIG_INDEX
5175 type as given in the source before any compiler conversions.
5176 Generate the code to test it and jump to the right place. */
5178 void
5179 expand_end_case_type (orig_index, orig_type)
5180 tree orig_index, orig_type;
5182 tree minval = NULL_TREE, maxval = NULL_TREE, range = NULL_TREE;
5183 rtx default_label = 0;
5184 struct case_node *n;
5185 unsigned int count;
5186 rtx index;
5187 rtx table_label;
5188 int ncases;
5189 rtx *labelvec;
5190 int i;
5191 rtx before_case, end;
5192 struct nesting *thiscase = case_stack;
5193 tree index_expr, index_type;
5194 int unsignedp;
5196 /* Don't crash due to previous errors. */
5197 if (thiscase == NULL)
5198 return;
5200 table_label = gen_label_rtx ();
5201 index_expr = thiscase->data.case_stmt.index_expr;
5202 index_type = TREE_TYPE (index_expr);
5203 unsignedp = TREE_UNSIGNED (index_type);
5204 if (orig_type == NULL)
5205 orig_type = TREE_TYPE (orig_index);
5207 do_pending_stack_adjust ();
5209 /* This might get a spurious warning in the presence of a syntax error;
5210 it could be fixed by moving the call to check_seenlabel after the
5211 check for error_mark_node, and copying the code of check_seenlabel that
5212 deals with case_stack->data.case_stmt.line_number_status /
5213 restore_line_number_status in front of the call to end_cleanup_deferral;
5214 However, this might miss some useful warnings in the presence of
5215 non-syntax errors. */
5216 check_seenlabel ();
5218 /* An ERROR_MARK occurs for various reasons including invalid data type. */
5219 if (index_type != error_mark_node)
5221 /* If the switch expression was an enumerated type, check that
5222 exactly all enumeration literals are covered by the cases.
5223 The check is made when -Wswitch was specified and there is no
5224 default case, or when -Wswitch-enum was specified. */
5225 if (((warn_switch && !thiscase->data.case_stmt.default_label)
5226 || warn_switch_enum)
5227 && TREE_CODE (orig_type) == ENUMERAL_TYPE
5228 && TREE_CODE (index_expr) != INTEGER_CST)
5229 check_for_full_enumeration_handling (orig_type);
5231 if (warn_switch_default && !thiscase->data.case_stmt.default_label)
5232 warning ("switch missing default case");
5234 /* If we don't have a default-label, create one here,
5235 after the body of the switch. */
5236 if (thiscase->data.case_stmt.default_label == 0)
5238 thiscase->data.case_stmt.default_label
5239 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
5240 expand_label (thiscase->data.case_stmt.default_label);
5242 default_label = label_rtx (thiscase->data.case_stmt.default_label);
5244 before_case = get_last_insn ();
5246 if (thiscase->data.case_stmt.case_list
5247 && thiscase->data.case_stmt.case_list->left)
5248 thiscase->data.case_stmt.case_list
5249 = case_tree2list (thiscase->data.case_stmt.case_list, 0);
5251 /* Simplify the case-list before we count it. */
5252 group_case_nodes (thiscase->data.case_stmt.case_list);
5254 /* Get upper and lower bounds of case values.
5255 Also convert all the case values to the index expr's data type. */
5257 count = 0;
5258 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5260 /* Check low and high label values are integers. */
5261 if (TREE_CODE (n->low) != INTEGER_CST)
5262 abort ();
5263 if (TREE_CODE (n->high) != INTEGER_CST)
5264 abort ();
5266 n->low = convert (index_type, n->low);
5267 n->high = convert (index_type, n->high);
5269 /* Count the elements and track the largest and smallest
5270 of them (treating them as signed even if they are not). */
5271 if (count++ == 0)
5273 minval = n->low;
5274 maxval = n->high;
5276 else
5278 if (INT_CST_LT (n->low, minval))
5279 minval = n->low;
5280 if (INT_CST_LT (maxval, n->high))
5281 maxval = n->high;
5283 /* A range counts double, since it requires two compares. */
5284 if (! tree_int_cst_equal (n->low, n->high))
5285 count++;
5288 /* Compute span of values. */
5289 if (count != 0)
5290 range = fold (build (MINUS_EXPR, index_type, maxval, minval));
5292 end_cleanup_deferral ();
5294 if (count == 0)
5296 expand_expr (index_expr, const0_rtx, VOIDmode, 0);
5297 emit_queue ();
5298 emit_jump (default_label);
5301 /* If range of values is much bigger than number of values,
5302 make a sequence of conditional branches instead of a dispatch.
5303 If the switch-index is a constant, do it this way
5304 because we can optimize it. */
5306 else if (count < case_values_threshold ()
5307 || compare_tree_int (range, 10 * count) > 0
5308 /* RANGE may be signed, and really large ranges will show up
5309 as negative numbers. */
5310 || compare_tree_int (range, 0) < 0
5311 #ifndef ASM_OUTPUT_ADDR_DIFF_ELT
5312 || flag_pic
5313 #endif
5314 || TREE_CODE (index_expr) == INTEGER_CST
5315 || (TREE_CODE (index_expr) == COMPOUND_EXPR
5316 && TREE_CODE (TREE_OPERAND (index_expr, 1)) == INTEGER_CST))
5318 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5320 /* If the index is a short or char that we do not have
5321 an insn to handle comparisons directly, convert it to
5322 a full integer now, rather than letting each comparison
5323 generate the conversion. */
5325 if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT
5326 && ! have_insn_for (COMPARE, GET_MODE (index)))
5328 enum machine_mode wider_mode;
5329 for (wider_mode = GET_MODE (index); wider_mode != VOIDmode;
5330 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
5331 if (have_insn_for (COMPARE, wider_mode))
5333 index = convert_to_mode (wider_mode, index, unsignedp);
5334 break;
5338 emit_queue ();
5339 do_pending_stack_adjust ();
5341 index = protect_from_queue (index, 0);
5342 if (GET_CODE (index) == MEM)
5343 index = copy_to_reg (index);
5344 if (GET_CODE (index) == CONST_INT
5345 || TREE_CODE (index_expr) == INTEGER_CST)
5347 /* Make a tree node with the proper constant value
5348 if we don't already have one. */
5349 if (TREE_CODE (index_expr) != INTEGER_CST)
5351 index_expr
5352 = build_int_2 (INTVAL (index),
5353 unsignedp || INTVAL (index) >= 0 ? 0 : -1);
5354 index_expr = convert (index_type, index_expr);
5357 /* For constant index expressions we need only
5358 issue an unconditional branch to the appropriate
5359 target code. The job of removing any unreachable
5360 code is left to the optimisation phase if the
5361 "-O" option is specified. */
5362 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5363 if (! tree_int_cst_lt (index_expr, n->low)
5364 && ! tree_int_cst_lt (n->high, index_expr))
5365 break;
5367 if (n)
5368 emit_jump (label_rtx (n->code_label));
5369 else
5370 emit_jump (default_label);
5372 else
5374 /* If the index expression is not constant we generate
5375 a binary decision tree to select the appropriate
5376 target code. This is done as follows:
5378 The list of cases is rearranged into a binary tree,
5379 nearly optimal assuming equal probability for each case.
5381 The tree is transformed into RTL, eliminating
5382 redundant test conditions at the same time.
5384 If program flow could reach the end of the
5385 decision tree an unconditional jump to the
5386 default code is emitted. */
5388 use_cost_table
5389 = (TREE_CODE (orig_type) != ENUMERAL_TYPE
5390 && estimate_case_costs (thiscase->data.case_stmt.case_list));
5391 balance_case_nodes (&thiscase->data.case_stmt.case_list, NULL);
5392 emit_case_nodes (index, thiscase->data.case_stmt.case_list,
5393 default_label, index_type);
5394 emit_jump_if_reachable (default_label);
5397 else
5399 if (! try_casesi (index_type, index_expr, minval, range,
5400 table_label, default_label))
5402 index_type = thiscase->data.case_stmt.nominal_type;
5404 /* Index jumptables from zero for suitable values of
5405 minval to avoid a subtraction. */
5406 if (! optimize_size
5407 && compare_tree_int (minval, 0) > 0
5408 && compare_tree_int (minval, 3) < 0)
5410 minval = integer_zero_node;
5411 range = maxval;
5414 if (! try_tablejump (index_type, index_expr, minval, range,
5415 table_label, default_label))
5416 abort ();
5419 /* Get table of labels to jump to, in order of case index. */
5421 ncases = tree_low_cst (range, 0) + 1;
5422 labelvec = (rtx *) alloca (ncases * sizeof (rtx));
5423 memset ((char *) labelvec, 0, ncases * sizeof (rtx));
5425 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5427 /* Compute the low and high bounds relative to the minimum
5428 value since that should fit in a HOST_WIDE_INT while the
5429 actual values may not. */
5430 HOST_WIDE_INT i_low
5431 = tree_low_cst (fold (build (MINUS_EXPR, index_type,
5432 n->low, minval)), 1);
5433 HOST_WIDE_INT i_high
5434 = tree_low_cst (fold (build (MINUS_EXPR, index_type,
5435 n->high, minval)), 1);
5436 HOST_WIDE_INT i;
5438 for (i = i_low; i <= i_high; i ++)
5439 labelvec[i]
5440 = gen_rtx_LABEL_REF (Pmode, label_rtx (n->code_label));
5443 /* Fill in the gaps with the default. */
5444 for (i = 0; i < ncases; i++)
5445 if (labelvec[i] == 0)
5446 labelvec[i] = gen_rtx_LABEL_REF (Pmode, default_label);
5448 /* Output the table */
5449 emit_label (table_label);
5451 if (CASE_VECTOR_PC_RELATIVE || flag_pic)
5452 emit_jump_insn (gen_rtx_ADDR_DIFF_VEC (CASE_VECTOR_MODE,
5453 gen_rtx_LABEL_REF (Pmode, table_label),
5454 gen_rtvec_v (ncases, labelvec),
5455 const0_rtx, const0_rtx));
5456 else
5457 emit_jump_insn (gen_rtx_ADDR_VEC (CASE_VECTOR_MODE,
5458 gen_rtvec_v (ncases, labelvec)));
5460 /* If the case insn drops through the table,
5461 after the table we must jump to the default-label.
5462 Otherwise record no drop-through after the table. */
5463 #ifdef CASE_DROPS_THROUGH
5464 emit_jump (default_label);
5465 #else
5466 emit_barrier ();
5467 #endif
5470 before_case = NEXT_INSN (before_case);
5471 end = get_last_insn ();
5472 if (squeeze_notes (&before_case, &end))
5473 abort ();
5474 reorder_insns (before_case, end,
5475 thiscase->data.case_stmt.start);
5477 else
5478 end_cleanup_deferral ();
5480 if (thiscase->exit_label)
5481 emit_label (thiscase->exit_label);
5483 POPSTACK (case_stack);
5485 free_temp_slots ();
5488 /* Convert the tree NODE into a list linked by the right field, with the left
5489 field zeroed. RIGHT is used for recursion; it is a list to be placed
5490 rightmost in the resulting list. */
5492 static struct case_node *
5493 case_tree2list (node, right)
5494 struct case_node *node, *right;
5496 struct case_node *left;
5498 if (node->right)
5499 right = case_tree2list (node->right, right);
5501 node->right = right;
5502 if ((left = node->left))
5504 node->left = 0;
5505 return case_tree2list (left, node);
5508 return node;
5511 /* Generate code to jump to LABEL if OP1 and OP2 are equal. */
5513 static void
5514 do_jump_if_equal (op1, op2, label, unsignedp)
5515 rtx op1, op2, label;
5516 int unsignedp;
5518 if (GET_CODE (op1) == CONST_INT && GET_CODE (op2) == CONST_INT)
5520 if (INTVAL (op1) == INTVAL (op2))
5521 emit_jump (label);
5523 else
5524 emit_cmp_and_jump_insns (op1, op2, EQ, NULL_RTX,
5525 (GET_MODE (op1) == VOIDmode
5526 ? GET_MODE (op2) : GET_MODE (op1)),
5527 unsignedp, label);
5530 /* Not all case values are encountered equally. This function
5531 uses a heuristic to weight case labels, in cases where that
5532 looks like a reasonable thing to do.
5534 Right now, all we try to guess is text, and we establish the
5535 following weights:
5537 chars above space: 16
5538 digits: 16
5539 default: 12
5540 space, punct: 8
5541 tab: 4
5542 newline: 2
5543 other "\" chars: 1
5544 remaining chars: 0
5546 If we find any cases in the switch that are not either -1 or in the range
5547 of valid ASCII characters, or are control characters other than those
5548 commonly used with "\", don't treat this switch scanning text.
5550 Return 1 if these nodes are suitable for cost estimation, otherwise
5551 return 0. */
5553 static int
5554 estimate_case_costs (node)
5555 case_node_ptr node;
5557 tree min_ascii = integer_minus_one_node;
5558 tree max_ascii = convert (TREE_TYPE (node->high), build_int_2 (127, 0));
5559 case_node_ptr n;
5560 int i;
5562 /* If we haven't already made the cost table, make it now. Note that the
5563 lower bound of the table is -1, not zero. */
5565 if (! cost_table_initialized)
5567 cost_table_initialized = 1;
5569 for (i = 0; i < 128; i++)
5571 if (ISALNUM (i))
5572 COST_TABLE (i) = 16;
5573 else if (ISPUNCT (i))
5574 COST_TABLE (i) = 8;
5575 else if (ISCNTRL (i))
5576 COST_TABLE (i) = -1;
5579 COST_TABLE (' ') = 8;
5580 COST_TABLE ('\t') = 4;
5581 COST_TABLE ('\0') = 4;
5582 COST_TABLE ('\n') = 2;
5583 COST_TABLE ('\f') = 1;
5584 COST_TABLE ('\v') = 1;
5585 COST_TABLE ('\b') = 1;
5588 /* See if all the case expressions look like text. It is text if the
5589 constant is >= -1 and the highest constant is <= 127. Do all comparisons
5590 as signed arithmetic since we don't want to ever access cost_table with a
5591 value less than -1. Also check that none of the constants in a range
5592 are strange control characters. */
5594 for (n = node; n; n = n->right)
5596 if ((INT_CST_LT (n->low, min_ascii)) || INT_CST_LT (max_ascii, n->high))
5597 return 0;
5599 for (i = (HOST_WIDE_INT) TREE_INT_CST_LOW (n->low);
5600 i <= (HOST_WIDE_INT) TREE_INT_CST_LOW (n->high); i++)
5601 if (COST_TABLE (i) < 0)
5602 return 0;
5605 /* All interesting values are within the range of interesting
5606 ASCII characters. */
5607 return 1;
5610 /* Scan an ordered list of case nodes
5611 combining those with consecutive values or ranges.
5613 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
5615 static void
5616 group_case_nodes (head)
5617 case_node_ptr head;
5619 case_node_ptr node = head;
5621 while (node)
5623 rtx lb = next_real_insn (label_rtx (node->code_label));
5624 rtx lb2;
5625 case_node_ptr np = node;
5627 /* Try to group the successors of NODE with NODE. */
5628 while (((np = np->right) != 0)
5629 /* Do they jump to the same place? */
5630 && ((lb2 = next_real_insn (label_rtx (np->code_label))) == lb
5631 || (lb != 0 && lb2 != 0
5632 && simplejump_p (lb)
5633 && simplejump_p (lb2)
5634 && rtx_equal_p (SET_SRC (PATTERN (lb)),
5635 SET_SRC (PATTERN (lb2)))))
5636 /* Are their ranges consecutive? */
5637 && tree_int_cst_equal (np->low,
5638 fold (build (PLUS_EXPR,
5639 TREE_TYPE (node->high),
5640 node->high,
5641 integer_one_node)))
5642 /* An overflow is not consecutive. */
5643 && tree_int_cst_lt (node->high,
5644 fold (build (PLUS_EXPR,
5645 TREE_TYPE (node->high),
5646 node->high,
5647 integer_one_node))))
5649 node->high = np->high;
5651 /* NP is the first node after NODE which can't be grouped with it.
5652 Delete the nodes in between, and move on to that node. */
5653 node->right = np;
5654 node = np;
5658 /* Take an ordered list of case nodes
5659 and transform them into a near optimal binary tree,
5660 on the assumption that any target code selection value is as
5661 likely as any other.
5663 The transformation is performed by splitting the ordered
5664 list into two equal sections plus a pivot. The parts are
5665 then attached to the pivot as left and right branches. Each
5666 branch is then transformed recursively. */
5668 static void
5669 balance_case_nodes (head, parent)
5670 case_node_ptr *head;
5671 case_node_ptr parent;
5673 case_node_ptr np;
5675 np = *head;
5676 if (np)
5678 int cost = 0;
5679 int i = 0;
5680 int ranges = 0;
5681 case_node_ptr *npp;
5682 case_node_ptr left;
5684 /* Count the number of entries on branch. Also count the ranges. */
5686 while (np)
5688 if (!tree_int_cst_equal (np->low, np->high))
5690 ranges++;
5691 if (use_cost_table)
5692 cost += COST_TABLE (TREE_INT_CST_LOW (np->high));
5695 if (use_cost_table)
5696 cost += COST_TABLE (TREE_INT_CST_LOW (np->low));
5698 i++;
5699 np = np->right;
5702 if (i > 2)
5704 /* Split this list if it is long enough for that to help. */
5705 npp = head;
5706 left = *npp;
5707 if (use_cost_table)
5709 /* Find the place in the list that bisects the list's total cost,
5710 Here I gets half the total cost. */
5711 int n_moved = 0;
5712 i = (cost + 1) / 2;
5713 while (1)
5715 /* Skip nodes while their cost does not reach that amount. */
5716 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5717 i -= COST_TABLE (TREE_INT_CST_LOW ((*npp)->high));
5718 i -= COST_TABLE (TREE_INT_CST_LOW ((*npp)->low));
5719 if (i <= 0)
5720 break;
5721 npp = &(*npp)->right;
5722 n_moved += 1;
5724 if (n_moved == 0)
5726 /* Leave this branch lopsided, but optimize left-hand
5727 side and fill in `parent' fields for right-hand side. */
5728 np = *head;
5729 np->parent = parent;
5730 balance_case_nodes (&np->left, np);
5731 for (; np->right; np = np->right)
5732 np->right->parent = np;
5733 return;
5736 /* If there are just three nodes, split at the middle one. */
5737 else if (i == 3)
5738 npp = &(*npp)->right;
5739 else
5741 /* Find the place in the list that bisects the list's total cost,
5742 where ranges count as 2.
5743 Here I gets half the total cost. */
5744 i = (i + ranges + 1) / 2;
5745 while (1)
5747 /* Skip nodes while their cost does not reach that amount. */
5748 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5749 i--;
5750 i--;
5751 if (i <= 0)
5752 break;
5753 npp = &(*npp)->right;
5756 *head = np = *npp;
5757 *npp = 0;
5758 np->parent = parent;
5759 np->left = left;
5761 /* Optimize each of the two split parts. */
5762 balance_case_nodes (&np->left, np);
5763 balance_case_nodes (&np->right, np);
5765 else
5767 /* Else leave this branch as one level,
5768 but fill in `parent' fields. */
5769 np = *head;
5770 np->parent = parent;
5771 for (; np->right; np = np->right)
5772 np->right->parent = np;
5777 /* Search the parent sections of the case node tree
5778 to see if a test for the lower bound of NODE would be redundant.
5779 INDEX_TYPE is the type of the index expression.
5781 The instructions to generate the case decision tree are
5782 output in the same order as nodes are processed so it is
5783 known that if a parent node checks the range of the current
5784 node minus one that the current node is bounded at its lower
5785 span. Thus the test would be redundant. */
5787 static int
5788 node_has_low_bound (node, index_type)
5789 case_node_ptr node;
5790 tree index_type;
5792 tree low_minus_one;
5793 case_node_ptr pnode;
5795 /* If the lower bound of this node is the lowest value in the index type,
5796 we need not test it. */
5798 if (tree_int_cst_equal (node->low, TYPE_MIN_VALUE (index_type)))
5799 return 1;
5801 /* If this node has a left branch, the value at the left must be less
5802 than that at this node, so it cannot be bounded at the bottom and
5803 we need not bother testing any further. */
5805 if (node->left)
5806 return 0;
5808 low_minus_one = fold (build (MINUS_EXPR, TREE_TYPE (node->low),
5809 node->low, integer_one_node));
5811 /* If the subtraction above overflowed, we can't verify anything.
5812 Otherwise, look for a parent that tests our value - 1. */
5814 if (! tree_int_cst_lt (low_minus_one, node->low))
5815 return 0;
5817 for (pnode = node->parent; pnode; pnode = pnode->parent)
5818 if (tree_int_cst_equal (low_minus_one, pnode->high))
5819 return 1;
5821 return 0;
5824 /* Search the parent sections of the case node tree
5825 to see if a test for the upper bound of NODE would be redundant.
5826 INDEX_TYPE is the type of the index expression.
5828 The instructions to generate the case decision tree are
5829 output in the same order as nodes are processed so it is
5830 known that if a parent node checks the range of the current
5831 node plus one that the current node is bounded at its upper
5832 span. Thus the test would be redundant. */
5834 static int
5835 node_has_high_bound (node, index_type)
5836 case_node_ptr node;
5837 tree index_type;
5839 tree high_plus_one;
5840 case_node_ptr pnode;
5842 /* If there is no upper bound, obviously no test is needed. */
5844 if (TYPE_MAX_VALUE (index_type) == NULL)
5845 return 1;
5847 /* If the upper bound of this node is the highest value in the type
5848 of the index expression, we need not test against it. */
5850 if (tree_int_cst_equal (node->high, TYPE_MAX_VALUE (index_type)))
5851 return 1;
5853 /* If this node has a right branch, the value at the right must be greater
5854 than that at this node, so it cannot be bounded at the top and
5855 we need not bother testing any further. */
5857 if (node->right)
5858 return 0;
5860 high_plus_one = fold (build (PLUS_EXPR, TREE_TYPE (node->high),
5861 node->high, integer_one_node));
5863 /* If the addition above overflowed, we can't verify anything.
5864 Otherwise, look for a parent that tests our value + 1. */
5866 if (! tree_int_cst_lt (node->high, high_plus_one))
5867 return 0;
5869 for (pnode = node->parent; pnode; pnode = pnode->parent)
5870 if (tree_int_cst_equal (high_plus_one, pnode->low))
5871 return 1;
5873 return 0;
5876 /* Search the parent sections of the
5877 case node tree to see if both tests for the upper and lower
5878 bounds of NODE would be redundant. */
5880 static int
5881 node_is_bounded (node, index_type)
5882 case_node_ptr node;
5883 tree index_type;
5885 return (node_has_low_bound (node, index_type)
5886 && node_has_high_bound (node, index_type));
5889 /* Emit an unconditional jump to LABEL unless it would be dead code. */
5891 static void
5892 emit_jump_if_reachable (label)
5893 rtx label;
5895 if (GET_CODE (get_last_insn ()) != BARRIER)
5896 emit_jump (label);
5899 /* Emit step-by-step code to select a case for the value of INDEX.
5900 The thus generated decision tree follows the form of the
5901 case-node binary tree NODE, whose nodes represent test conditions.
5902 INDEX_TYPE is the type of the index of the switch.
5904 Care is taken to prune redundant tests from the decision tree
5905 by detecting any boundary conditions already checked by
5906 emitted rtx. (See node_has_high_bound, node_has_low_bound
5907 and node_is_bounded, above.)
5909 Where the test conditions can be shown to be redundant we emit
5910 an unconditional jump to the target code. As a further
5911 optimization, the subordinates of a tree node are examined to
5912 check for bounded nodes. In this case conditional and/or
5913 unconditional jumps as a result of the boundary check for the
5914 current node are arranged to target the subordinates associated
5915 code for out of bound conditions on the current node.
5917 We can assume that when control reaches the code generated here,
5918 the index value has already been compared with the parents
5919 of this node, and determined to be on the same side of each parent
5920 as this node is. Thus, if this node tests for the value 51,
5921 and a parent tested for 52, we don't need to consider
5922 the possibility of a value greater than 51. If another parent
5923 tests for the value 50, then this node need not test anything. */
5925 static void
5926 emit_case_nodes (index, node, default_label, index_type)
5927 rtx index;
5928 case_node_ptr node;
5929 rtx default_label;
5930 tree index_type;
5932 /* If INDEX has an unsigned type, we must make unsigned branches. */
5933 int unsignedp = TREE_UNSIGNED (index_type);
5934 enum machine_mode mode = GET_MODE (index);
5935 enum machine_mode imode = TYPE_MODE (index_type);
5937 /* See if our parents have already tested everything for us.
5938 If they have, emit an unconditional jump for this node. */
5939 if (node_is_bounded (node, index_type))
5940 emit_jump (label_rtx (node->code_label));
5942 else if (tree_int_cst_equal (node->low, node->high))
5944 /* Node is single valued. First see if the index expression matches
5945 this node and then check our children, if any. */
5947 do_jump_if_equal (index,
5948 convert_modes (mode, imode,
5949 expand_expr (node->low, NULL_RTX,
5950 VOIDmode, 0),
5951 unsignedp),
5952 label_rtx (node->code_label), unsignedp);
5954 if (node->right != 0 && node->left != 0)
5956 /* This node has children on both sides.
5957 Dispatch to one side or the other
5958 by comparing the index value with this node's value.
5959 If one subtree is bounded, check that one first,
5960 so we can avoid real branches in the tree. */
5962 if (node_is_bounded (node->right, index_type))
5964 emit_cmp_and_jump_insns (index,
5965 convert_modes
5966 (mode, imode,
5967 expand_expr (node->high, NULL_RTX,
5968 VOIDmode, 0),
5969 unsignedp),
5970 GT, NULL_RTX, mode, unsignedp,
5971 label_rtx (node->right->code_label));
5972 emit_case_nodes (index, node->left, default_label, index_type);
5975 else if (node_is_bounded (node->left, index_type))
5977 emit_cmp_and_jump_insns (index,
5978 convert_modes
5979 (mode, imode,
5980 expand_expr (node->high, NULL_RTX,
5981 VOIDmode, 0),
5982 unsignedp),
5983 LT, NULL_RTX, mode, unsignedp,
5984 label_rtx (node->left->code_label));
5985 emit_case_nodes (index, node->right, default_label, index_type);
5988 else
5990 /* Neither node is bounded. First distinguish the two sides;
5991 then emit the code for one side at a time. */
5993 tree test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
5995 /* See if the value is on the right. */
5996 emit_cmp_and_jump_insns (index,
5997 convert_modes
5998 (mode, imode,
5999 expand_expr (node->high, NULL_RTX,
6000 VOIDmode, 0),
6001 unsignedp),
6002 GT, NULL_RTX, mode, unsignedp,
6003 label_rtx (test_label));
6005 /* Value must be on the left.
6006 Handle the left-hand subtree. */
6007 emit_case_nodes (index, node->left, default_label, index_type);
6008 /* If left-hand subtree does nothing,
6009 go to default. */
6010 emit_jump_if_reachable (default_label);
6012 /* Code branches here for the right-hand subtree. */
6013 expand_label (test_label);
6014 emit_case_nodes (index, node->right, default_label, index_type);
6018 else if (node->right != 0 && node->left == 0)
6020 /* Here we have a right child but no left so we issue conditional
6021 branch to default and process the right child.
6023 Omit the conditional branch to default if we it avoid only one
6024 right child; it costs too much space to save so little time. */
6026 if (node->right->right || node->right->left
6027 || !tree_int_cst_equal (node->right->low, node->right->high))
6029 if (!node_has_low_bound (node, index_type))
6031 emit_cmp_and_jump_insns (index,
6032 convert_modes
6033 (mode, imode,
6034 expand_expr (node->high, NULL_RTX,
6035 VOIDmode, 0),
6036 unsignedp),
6037 LT, NULL_RTX, mode, unsignedp,
6038 default_label);
6041 emit_case_nodes (index, node->right, default_label, index_type);
6043 else
6044 /* We cannot process node->right normally
6045 since we haven't ruled out the numbers less than
6046 this node's value. So handle node->right explicitly. */
6047 do_jump_if_equal (index,
6048 convert_modes
6049 (mode, imode,
6050 expand_expr (node->right->low, NULL_RTX,
6051 VOIDmode, 0),
6052 unsignedp),
6053 label_rtx (node->right->code_label), unsignedp);
6056 else if (node->right == 0 && node->left != 0)
6058 /* Just one subtree, on the left. */
6059 if (node->left->left || node->left->right
6060 || !tree_int_cst_equal (node->left->low, node->left->high))
6062 if (!node_has_high_bound (node, index_type))
6064 emit_cmp_and_jump_insns (index,
6065 convert_modes
6066 (mode, imode,
6067 expand_expr (node->high, NULL_RTX,
6068 VOIDmode, 0),
6069 unsignedp),
6070 GT, NULL_RTX, mode, unsignedp,
6071 default_label);
6074 emit_case_nodes (index, node->left, default_label, index_type);
6076 else
6077 /* We cannot process node->left normally
6078 since we haven't ruled out the numbers less than
6079 this node's value. So handle node->left explicitly. */
6080 do_jump_if_equal (index,
6081 convert_modes
6082 (mode, imode,
6083 expand_expr (node->left->low, NULL_RTX,
6084 VOIDmode, 0),
6085 unsignedp),
6086 label_rtx (node->left->code_label), unsignedp);
6089 else
6091 /* Node is a range. These cases are very similar to those for a single
6092 value, except that we do not start by testing whether this node
6093 is the one to branch to. */
6095 if (node->right != 0 && node->left != 0)
6097 /* Node has subtrees on both sides.
6098 If the right-hand subtree is bounded,
6099 test for it first, since we can go straight there.
6100 Otherwise, we need to make a branch in the control structure,
6101 then handle the two subtrees. */
6102 tree test_label = 0;
6104 if (node_is_bounded (node->right, index_type))
6105 /* Right hand node is fully bounded so we can eliminate any
6106 testing and branch directly to the target code. */
6107 emit_cmp_and_jump_insns (index,
6108 convert_modes
6109 (mode, imode,
6110 expand_expr (node->high, NULL_RTX,
6111 VOIDmode, 0),
6112 unsignedp),
6113 GT, NULL_RTX, mode, unsignedp,
6114 label_rtx (node->right->code_label));
6115 else
6117 /* Right hand node requires testing.
6118 Branch to a label where we will handle it later. */
6120 test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
6121 emit_cmp_and_jump_insns (index,
6122 convert_modes
6123 (mode, imode,
6124 expand_expr (node->high, NULL_RTX,
6125 VOIDmode, 0),
6126 unsignedp),
6127 GT, NULL_RTX, mode, unsignedp,
6128 label_rtx (test_label));
6131 /* Value belongs to this node or to the left-hand subtree. */
6133 emit_cmp_and_jump_insns (index,
6134 convert_modes
6135 (mode, imode,
6136 expand_expr (node->low, NULL_RTX,
6137 VOIDmode, 0),
6138 unsignedp),
6139 GE, NULL_RTX, mode, unsignedp,
6140 label_rtx (node->code_label));
6142 /* Handle the left-hand subtree. */
6143 emit_case_nodes (index, node->left, default_label, index_type);
6145 /* If right node had to be handled later, do that now. */
6147 if (test_label)
6149 /* If the left-hand subtree fell through,
6150 don't let it fall into the right-hand subtree. */
6151 emit_jump_if_reachable (default_label);
6153 expand_label (test_label);
6154 emit_case_nodes (index, node->right, default_label, index_type);
6158 else if (node->right != 0 && node->left == 0)
6160 /* Deal with values to the left of this node,
6161 if they are possible. */
6162 if (!node_has_low_bound (node, index_type))
6164 emit_cmp_and_jump_insns (index,
6165 convert_modes
6166 (mode, imode,
6167 expand_expr (node->low, NULL_RTX,
6168 VOIDmode, 0),
6169 unsignedp),
6170 LT, NULL_RTX, mode, unsignedp,
6171 default_label);
6174 /* Value belongs to this node or to the right-hand subtree. */
6176 emit_cmp_and_jump_insns (index,
6177 convert_modes
6178 (mode, imode,
6179 expand_expr (node->high, NULL_RTX,
6180 VOIDmode, 0),
6181 unsignedp),
6182 LE, NULL_RTX, mode, unsignedp,
6183 label_rtx (node->code_label));
6185 emit_case_nodes (index, node->right, default_label, index_type);
6188 else if (node->right == 0 && node->left != 0)
6190 /* Deal with values to the right of this node,
6191 if they are possible. */
6192 if (!node_has_high_bound (node, index_type))
6194 emit_cmp_and_jump_insns (index,
6195 convert_modes
6196 (mode, imode,
6197 expand_expr (node->high, NULL_RTX,
6198 VOIDmode, 0),
6199 unsignedp),
6200 GT, NULL_RTX, mode, unsignedp,
6201 default_label);
6204 /* Value belongs to this node or to the left-hand subtree. */
6206 emit_cmp_and_jump_insns (index,
6207 convert_modes
6208 (mode, imode,
6209 expand_expr (node->low, NULL_RTX,
6210 VOIDmode, 0),
6211 unsignedp),
6212 GE, NULL_RTX, mode, unsignedp,
6213 label_rtx (node->code_label));
6215 emit_case_nodes (index, node->left, default_label, index_type);
6218 else
6220 /* Node has no children so we check low and high bounds to remove
6221 redundant tests. Only one of the bounds can exist,
6222 since otherwise this node is bounded--a case tested already. */
6223 int high_bound = node_has_high_bound (node, index_type);
6224 int low_bound = node_has_low_bound (node, index_type);
6226 if (!high_bound && low_bound)
6228 emit_cmp_and_jump_insns (index,
6229 convert_modes
6230 (mode, imode,
6231 expand_expr (node->high, NULL_RTX,
6232 VOIDmode, 0),
6233 unsignedp),
6234 GT, NULL_RTX, mode, unsignedp,
6235 default_label);
6238 else if (!low_bound && high_bound)
6240 emit_cmp_and_jump_insns (index,
6241 convert_modes
6242 (mode, imode,
6243 expand_expr (node->low, NULL_RTX,
6244 VOIDmode, 0),
6245 unsignedp),
6246 LT, NULL_RTX, mode, unsignedp,
6247 default_label);
6249 else if (!low_bound && !high_bound)
6251 /* Widen LOW and HIGH to the same width as INDEX. */
6252 tree type = (*lang_hooks.types.type_for_mode) (mode, unsignedp);
6253 tree low = build1 (CONVERT_EXPR, type, node->low);
6254 tree high = build1 (CONVERT_EXPR, type, node->high);
6255 rtx low_rtx, new_index, new_bound;
6257 /* Instead of doing two branches, emit one unsigned branch for
6258 (index-low) > (high-low). */
6259 low_rtx = expand_expr (low, NULL_RTX, mode, 0);
6260 new_index = expand_simple_binop (mode, MINUS, index, low_rtx,
6261 NULL_RTX, unsignedp,
6262 OPTAB_WIDEN);
6263 new_bound = expand_expr (fold (build (MINUS_EXPR, type,
6264 high, low)),
6265 NULL_RTX, mode, 0);
6267 emit_cmp_and_jump_insns (new_index, new_bound, GT, NULL_RTX,
6268 mode, 1, default_label);
6271 emit_jump (label_rtx (node->code_label));
6276 #include "gt-stmt.h"