Merge from mainline
[official-gcc.git] / gcc / stmt.c
blob9220b37e9d0f724781d04f21bac696ed347f057d
1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 /* This file handles the generation of rtl code from tree structure
23 above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
24 It also creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
27 The functions whose names start with `expand_' are called by the
28 parser to generate RTL instructions for various kinds of constructs.
30 Some control and binding constructs require calling several such
31 functions at different times. For example, a simple if-then
32 is expanded by calling `expand_start_cond' (with the condition-expression
33 as argument) before parsing the then-clause and calling `expand_end_cond'
34 after parsing the then-clause. */
36 #include "config.h"
37 #include "system.h"
39 #include "rtl.h"
40 #include "tree.h"
41 #include "tm_p.h"
42 #include "flags.h"
43 #include "except.h"
44 #include "function.h"
45 #include "insn-config.h"
46 #include "expr.h"
47 #include "libfuncs.h"
48 #include "hard-reg-set.h"
49 #include "obstack.h"
50 #include "loop.h"
51 #include "recog.h"
52 #include "machmode.h"
53 #include "toplev.h"
54 #include "output.h"
55 #include "ggc.h"
56 #include "langhooks.h"
57 #include "predict.h"
59 /* Assume that case vectors are not pc-relative. */
60 #ifndef CASE_VECTOR_PC_RELATIVE
61 #define CASE_VECTOR_PC_RELATIVE 0
62 #endif
64 /* Functions and data structures for expanding case statements. */
66 /* Case label structure, used to hold info on labels within case
67 statements. We handle "range" labels; for a single-value label
68 as in C, the high and low limits are the same.
70 An AVL tree of case nodes is initially created, and later transformed
71 to a list linked via the RIGHT fields in the nodes. Nodes with
72 higher case values are later in the list.
74 Switch statements can be output in one of two forms. A branch table
75 is used if there are more than a few labels and the labels are dense
76 within the range between the smallest and largest case value. If a
77 branch table is used, no further manipulations are done with the case
78 node chain.
80 The alternative to the use of a branch table is to generate a series
81 of compare and jump insns. When that is done, we use the LEFT, RIGHT,
82 and PARENT fields to hold a binary tree. Initially the tree is
83 totally unbalanced, with everything on the right. We balance the tree
84 with nodes on the left having lower case values than the parent
85 and nodes on the right having higher values. We then output the tree
86 in order. */
88 struct case_node GTY(())
90 struct case_node *left; /* Left son in binary tree */
91 struct case_node *right; /* Right son in binary tree; also node chain */
92 struct case_node *parent; /* Parent of node in binary tree */
93 tree low; /* Lowest index value for this label */
94 tree high; /* Highest index value for this label */
95 tree code_label; /* Label to jump to when node matches */
96 int balance;
99 typedef struct case_node case_node;
100 typedef struct case_node *case_node_ptr;
102 /* These are used by estimate_case_costs and balance_case_nodes. */
104 /* This must be a signed type, and non-ANSI compilers lack signed char. */
105 static short cost_table_[129];
106 static int use_cost_table;
107 static int cost_table_initialized;
109 /* Special care is needed because we allow -1, but TREE_INT_CST_LOW
110 is unsigned. */
111 #define COST_TABLE(I) cost_table_[(unsigned HOST_WIDE_INT) ((I) + 1)]
113 /* Stack of control and binding constructs we are currently inside.
115 These constructs begin when you call `expand_start_WHATEVER'
116 and end when you call `expand_end_WHATEVER'. This stack records
117 info about how the construct began that tells the end-function
118 what to do. It also may provide information about the construct
119 to alter the behavior of other constructs within the body.
120 For example, they may affect the behavior of C `break' and `continue'.
122 Each construct gets one `struct nesting' object.
123 All of these objects are chained through the `all' field.
124 `nesting_stack' points to the first object (innermost construct).
125 The position of an entry on `nesting_stack' is in its `depth' field.
127 Each type of construct has its own individual stack.
128 For example, loops have `loop_stack'. Each object points to the
129 next object of the same type through the `next' field.
131 Some constructs are visible to `break' exit-statements and others
132 are not. Which constructs are visible depends on the language.
133 Therefore, the data structure allows each construct to be visible
134 or not, according to the args given when the construct is started.
135 The construct is visible if the `exit_label' field is non-null.
136 In that case, the value should be a CODE_LABEL rtx. */
138 struct nesting GTY(())
140 struct nesting *all;
141 struct nesting *next;
142 int depth;
143 rtx exit_label;
144 enum nesting_desc {
145 COND_NESTING,
146 LOOP_NESTING,
147 BLOCK_NESTING,
148 CASE_NESTING
149 } desc;
150 union nesting_u
152 /* For conds (if-then and if-then-else statements). */
153 struct nesting_cond
155 /* Label for the end of the if construct.
156 There is none if EXITFLAG was not set
157 and no `else' has been seen yet. */
158 rtx endif_label;
159 /* Label for the end of this alternative.
160 This may be the end of the if or the next else/elseif. */
161 rtx next_label;
162 } GTY ((tag ("COND_NESTING"))) cond;
163 /* For loops. */
164 struct nesting_loop
166 /* Label at the top of the loop; place to loop back to. */
167 rtx start_label;
168 /* Label at the end of the whole construct. */
169 rtx end_label;
170 /* Label before a jump that branches to the end of the whole
171 construct. This is where destructors go if any. */
172 rtx alt_end_label;
173 /* Label for `continue' statement to jump to;
174 this is in front of the stepper of the loop. */
175 rtx continue_label;
176 } GTY ((tag ("LOOP_NESTING"))) loop;
177 /* For variable binding contours. */
178 struct nesting_block
180 /* Sequence number of this binding contour within the function,
181 in order of entry. */
182 int block_start_count;
183 /* Nonzero => value to restore stack to on exit. */
184 rtx stack_level;
185 /* The NOTE that starts this contour.
186 Used by expand_goto to check whether the destination
187 is within each contour or not. */
188 rtx first_insn;
189 /* Innermost containing binding contour that has a stack level. */
190 struct nesting *innermost_stack_block;
191 /* List of cleanups to be run on exit from this contour.
192 This is a list of expressions to be evaluated.
193 The TREE_PURPOSE of each link is the ..._DECL node
194 which the cleanup pertains to. */
195 tree cleanups;
196 /* List of cleanup-lists of blocks containing this block,
197 as they were at the locus where this block appears.
198 There is an element for each containing block,
199 ordered innermost containing block first.
200 The tail of this list can be 0,
201 if all remaining elements would be empty lists.
202 The element's TREE_VALUE is the cleanup-list of that block,
203 which may be null. */
204 tree outer_cleanups;
205 /* Chain of labels defined inside this binding contour.
206 For contours that have stack levels or cleanups. */
207 struct label_chain *label_chain;
208 /* Number of function calls seen, as of start of this block. */
209 int n_function_calls;
210 /* Nonzero if this is associated with an EH region. */
211 int exception_region;
212 /* The saved target_temp_slot_level from our outer block.
213 We may reset target_temp_slot_level to be the level of
214 this block, if that is done, target_temp_slot_level
215 reverts to the saved target_temp_slot_level at the very
216 end of the block. */
217 int block_target_temp_slot_level;
218 /* True if we are currently emitting insns in an area of
219 output code that is controlled by a conditional
220 expression. This is used by the cleanup handling code to
221 generate conditional cleanup actions. */
222 int conditional_code;
223 /* A place to move the start of the exception region for any
224 of the conditional cleanups, must be at the end or after
225 the start of the last unconditional cleanup, and before any
226 conditional branch points. */
227 rtx last_unconditional_cleanup;
228 } GTY ((tag ("BLOCK_NESTING"))) block;
229 /* For switch (C) or case (Pascal) statements,
230 and also for dummies (see `expand_start_case_dummy'). */
231 struct nesting_case
233 /* The insn after which the case dispatch should finally
234 be emitted. Zero for a dummy. */
235 rtx start;
236 /* A list of case labels; it is first built as an AVL tree.
237 During expand_end_case, this is converted to a list, and may be
238 rearranged into a nearly balanced binary tree. */
239 struct case_node *case_list;
240 /* Label to jump to if no case matches. */
241 tree default_label;
242 /* The expression to be dispatched on. */
243 tree index_expr;
244 /* Type that INDEX_EXPR should be converted to. */
245 tree nominal_type;
246 /* Name of this kind of statement, for warnings. */
247 const char *printname;
248 /* Used to save no_line_numbers till we see the first case label.
249 We set this to -1 when we see the first case label in this
250 case statement. */
251 int line_number_status;
252 } GTY ((tag ("CASE_NESTING"))) case_stmt;
253 } GTY ((desc ("%1.desc"))) data;
256 /* Allocate and return a new `struct nesting'. */
258 #define ALLOC_NESTING() \
259 (struct nesting *) ggc_alloc (sizeof (struct nesting))
261 /* Pop the nesting stack element by element until we pop off
262 the element which is at the top of STACK.
263 Update all the other stacks, popping off elements from them
264 as we pop them from nesting_stack. */
266 #define POPSTACK(STACK) \
267 do { struct nesting *target = STACK; \
268 struct nesting *this; \
269 do { this = nesting_stack; \
270 if (loop_stack == this) \
271 loop_stack = loop_stack->next; \
272 if (cond_stack == this) \
273 cond_stack = cond_stack->next; \
274 if (block_stack == this) \
275 block_stack = block_stack->next; \
276 if (stack_block_stack == this) \
277 stack_block_stack = stack_block_stack->next; \
278 if (case_stack == this) \
279 case_stack = case_stack->next; \
280 nesting_depth = nesting_stack->depth - 1; \
281 nesting_stack = this->all; } \
282 while (this != target); } while (0)
284 /* In some cases it is impossible to generate code for a forward goto
285 until the label definition is seen. This happens when it may be necessary
286 for the goto to reset the stack pointer: we don't yet know how to do that.
287 So expand_goto puts an entry on this fixup list.
288 Each time a binding contour that resets the stack is exited,
289 we check each fixup.
290 If the target label has now been defined, we can insert the proper code. */
292 struct goto_fixup GTY(())
294 /* Points to following fixup. */
295 struct goto_fixup *next;
296 /* Points to the insn before the jump insn.
297 If more code must be inserted, it goes after this insn. */
298 rtx before_jump;
299 /* The LABEL_DECL that this jump is jumping to, or 0
300 for break, continue or return. */
301 tree target;
302 /* The BLOCK for the place where this goto was found. */
303 tree context;
304 /* The CODE_LABEL rtx that this is jumping to. */
305 rtx target_rtl;
306 /* Number of binding contours started in current function
307 before the label reference. */
308 int block_start_count;
309 /* The outermost stack level that should be restored for this jump.
310 Each time a binding contour that resets the stack is exited,
311 if the target label is *not* yet defined, this slot is updated. */
312 rtx stack_level;
313 /* List of lists of cleanup expressions to be run by this goto.
314 There is one element for each block that this goto is within.
315 The tail of this list can be 0,
316 if all remaining elements would be empty.
317 The TREE_VALUE contains the cleanup list of that block as of the
318 time this goto was seen.
319 The TREE_ADDRESSABLE flag is 1 for a block that has been exited. */
320 tree cleanup_list_list;
323 /* Within any binding contour that must restore a stack level,
324 all labels are recorded with a chain of these structures. */
326 struct label_chain GTY(())
328 /* Points to following fixup. */
329 struct label_chain *next;
330 tree label;
333 struct stmt_status GTY(())
335 /* Chain of all pending binding contours. */
336 struct nesting * x_block_stack;
338 /* If any new stacks are added here, add them to POPSTACKS too. */
340 /* Chain of all pending binding contours that restore stack levels
341 or have cleanups. */
342 struct nesting * x_stack_block_stack;
344 /* Chain of all pending conditional statements. */
345 struct nesting * x_cond_stack;
347 /* Chain of all pending loops. */
348 struct nesting * x_loop_stack;
350 /* Chain of all pending case or switch statements. */
351 struct nesting * x_case_stack;
353 /* Separate chain including all of the above,
354 chained through the `all' field. */
355 struct nesting * x_nesting_stack;
357 /* Number of entries on nesting_stack now. */
358 int x_nesting_depth;
360 /* Number of binding contours started so far in this function. */
361 int x_block_start_count;
363 /* Each time we expand an expression-statement,
364 record the expr's type and its RTL value here. */
365 tree x_last_expr_type;
366 rtx x_last_expr_value;
368 /* Nonzero if within a ({...}) grouping, in which case we must
369 always compute a value for each expr-stmt in case it is the last one. */
370 int x_expr_stmts_for_value;
372 /* Filename and line number of last line-number note,
373 whether we actually emitted it or not. */
374 const char *x_emit_filename;
375 int x_emit_lineno;
377 struct goto_fixup *x_goto_fixup_chain;
380 #define block_stack (cfun->stmt->x_block_stack)
381 #define stack_block_stack (cfun->stmt->x_stack_block_stack)
382 #define cond_stack (cfun->stmt->x_cond_stack)
383 #define loop_stack (cfun->stmt->x_loop_stack)
384 #define case_stack (cfun->stmt->x_case_stack)
385 #define nesting_stack (cfun->stmt->x_nesting_stack)
386 #define nesting_depth (cfun->stmt->x_nesting_depth)
387 #define current_block_start_count (cfun->stmt->x_block_start_count)
388 #define last_expr_type (cfun->stmt->x_last_expr_type)
389 #define last_expr_value (cfun->stmt->x_last_expr_value)
390 #define expr_stmts_for_value (cfun->stmt->x_expr_stmts_for_value)
391 #define emit_filename (cfun->stmt->x_emit_filename)
392 #define emit_lineno (cfun->stmt->x_emit_lineno)
393 #define goto_fixup_chain (cfun->stmt->x_goto_fixup_chain)
395 /* Non-zero if we are using EH to handle cleanus. */
396 static int using_eh_for_cleanups_p = 0;
398 static int n_occurrences PARAMS ((int, const char *));
399 static bool parse_input_constraint PARAMS ((const char **, int, int, int,
400 int, const char * const *,
401 bool *, bool *));
402 static void expand_goto_internal PARAMS ((tree, rtx, rtx));
403 static int expand_fixup PARAMS ((tree, rtx, rtx));
404 static rtx expand_nl_handler_label PARAMS ((rtx, rtx));
405 static void expand_nl_goto_receiver PARAMS ((void));
406 static void expand_nl_goto_receivers PARAMS ((struct nesting *));
407 static void fixup_gotos PARAMS ((struct nesting *, rtx, tree,
408 rtx, int));
409 static bool check_operand_nalternatives PARAMS ((tree, tree));
410 static bool check_unique_operand_names PARAMS ((tree, tree));
411 static tree resolve_operand_names PARAMS ((tree, tree, tree,
412 const char **));
413 static char *resolve_operand_name_1 PARAMS ((char *, tree, tree));
414 static void expand_null_return_1 PARAMS ((rtx));
415 static enum br_predictor return_prediction PARAMS ((rtx));
416 static void expand_value_return PARAMS ((rtx));
417 static int tail_recursion_args PARAMS ((tree, tree));
418 static void expand_cleanups PARAMS ((tree, tree, int, int));
419 static void check_seenlabel PARAMS ((void));
420 static void do_jump_if_equal PARAMS ((rtx, rtx, rtx, int));
421 static int estimate_case_costs PARAMS ((case_node_ptr));
422 static void group_case_nodes PARAMS ((case_node_ptr));
423 static void balance_case_nodes PARAMS ((case_node_ptr *,
424 case_node_ptr));
425 static int node_has_low_bound PARAMS ((case_node_ptr, tree));
426 static int node_has_high_bound PARAMS ((case_node_ptr, tree));
427 static int node_is_bounded PARAMS ((case_node_ptr, tree));
428 static void emit_jump_if_reachable PARAMS ((rtx));
429 static void emit_case_nodes PARAMS ((rtx, case_node_ptr, rtx, tree));
430 static struct case_node *case_tree2list PARAMS ((case_node *, case_node *));
432 void
433 using_eh_for_cleanups ()
435 using_eh_for_cleanups_p = 1;
438 void
439 init_stmt_for_function ()
441 cfun->stmt = ((struct stmt_status *)ggc_alloc (sizeof (struct stmt_status)));
443 /* We are not currently within any block, conditional, loop or case. */
444 block_stack = 0;
445 stack_block_stack = 0;
446 loop_stack = 0;
447 case_stack = 0;
448 cond_stack = 0;
449 nesting_stack = 0;
450 nesting_depth = 0;
452 current_block_start_count = 0;
454 /* No gotos have been expanded yet. */
455 goto_fixup_chain = 0;
457 /* We are not processing a ({...}) grouping. */
458 expr_stmts_for_value = 0;
459 clear_last_expr ();
462 /* Return nonzero if anything is pushed on the loop, condition, or case
463 stack. */
465 in_control_zone_p ()
467 return cond_stack || loop_stack || case_stack;
470 /* Record the current file and line. Called from emit_line_note. */
471 void
472 set_file_and_line_for_stmt (file, line)
473 const char *file;
474 int line;
476 /* If we're outputting an inline function, and we add a line note,
477 there may be no CFUN->STMT information. So, there's no need to
478 update it. */
479 if (cfun->stmt)
481 emit_filename = file;
482 emit_lineno = line;
486 /* Emit a no-op instruction. */
488 void
489 emit_nop ()
491 rtx last_insn;
493 last_insn = get_last_insn ();
494 if (!optimize
495 && (GET_CODE (last_insn) == CODE_LABEL
496 || (GET_CODE (last_insn) == NOTE
497 && prev_real_insn (last_insn) == 0)))
498 emit_insn (gen_nop ());
501 /* Return the rtx-label that corresponds to a LABEL_DECL,
502 creating it if necessary. */
505 label_rtx (label)
506 tree label;
508 if (TREE_CODE (label) != LABEL_DECL)
509 abort ();
511 if (!DECL_RTL_SET_P (label))
512 SET_DECL_RTL (label, gen_label_rtx ());
514 return DECL_RTL (label);
518 /* Add an unconditional jump to LABEL as the next sequential instruction. */
520 void
521 emit_jump (label)
522 rtx label;
524 do_pending_stack_adjust ();
525 emit_jump_insn (gen_jump (label));
526 emit_barrier ();
529 /* Emit code to jump to the address
530 specified by the pointer expression EXP. */
532 void
533 expand_computed_goto (exp)
534 tree exp;
536 rtx x = expand_expr (exp, NULL_RTX, VOIDmode, 0);
538 #ifdef POINTERS_EXTEND_UNSIGNED
539 if (GET_MODE (x) != Pmode)
540 x = convert_memory_address (Pmode, x);
541 #endif
543 emit_queue ();
544 do_pending_stack_adjust ();
545 emit_indirect_jump (x);
547 current_function_has_computed_jump = 1;
550 /* Handle goto statements and the labels that they can go to. */
552 /* Specify the location in the RTL code of a label LABEL,
553 which is a LABEL_DECL tree node.
555 This is used for the kind of label that the user can jump to with a
556 goto statement, and for alternatives of a switch or case statement.
557 RTL labels generated for loops and conditionals don't go through here;
558 they are generated directly at the RTL level, by other functions below.
560 Note that this has nothing to do with defining label *names*.
561 Languages vary in how they do that and what that even means. */
563 void
564 expand_label (label)
565 tree label;
567 struct label_chain *p;
569 do_pending_stack_adjust ();
570 emit_label (label_rtx (label));
571 if (DECL_NAME (label))
572 LABEL_NAME (DECL_RTL (label)) = IDENTIFIER_POINTER (DECL_NAME (label));
574 if (stack_block_stack != 0)
576 p = (struct label_chain *) ggc_alloc (sizeof (struct label_chain));
577 p->next = stack_block_stack->data.block.label_chain;
578 stack_block_stack->data.block.label_chain = p;
579 p->label = label;
583 /* Declare that LABEL (a LABEL_DECL) may be used for nonlocal gotos
584 from nested functions. */
586 void
587 declare_nonlocal_label (label)
588 tree label;
590 rtx slot = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
592 nonlocal_labels = tree_cons (NULL_TREE, label, nonlocal_labels);
593 LABEL_PRESERVE_P (label_rtx (label)) = 1;
594 if (nonlocal_goto_handler_slots == 0)
596 emit_stack_save (SAVE_NONLOCAL,
597 &nonlocal_goto_stack_level,
598 PREV_INSN (tail_recursion_reentry));
600 nonlocal_goto_handler_slots
601 = gen_rtx_EXPR_LIST (VOIDmode, slot, nonlocal_goto_handler_slots);
604 /* Generate RTL code for a `goto' statement with target label LABEL.
605 LABEL should be a LABEL_DECL tree node that was or will later be
606 defined with `expand_label'. */
608 void
609 expand_goto (label)
610 tree label;
612 tree context;
614 /* Check for a nonlocal goto to a containing function. */
615 context = decl_function_context (label);
616 if (context != 0 && context != current_function_decl)
618 struct function *p = find_function_data (context);
619 rtx label_ref = gen_rtx_LABEL_REF (Pmode, label_rtx (label));
620 rtx handler_slot, static_chain, save_area, insn;
621 tree link;
623 /* Find the corresponding handler slot for this label. */
624 handler_slot = p->x_nonlocal_goto_handler_slots;
625 for (link = p->x_nonlocal_labels; TREE_VALUE (link) != label;
626 link = TREE_CHAIN (link))
627 handler_slot = XEXP (handler_slot, 1);
628 handler_slot = XEXP (handler_slot, 0);
630 p->has_nonlocal_label = 1;
631 current_function_has_nonlocal_goto = 1;
632 LABEL_REF_NONLOCAL_P (label_ref) = 1;
634 /* Copy the rtl for the slots so that they won't be shared in
635 case the virtual stack vars register gets instantiated differently
636 in the parent than in the child. */
638 static_chain = copy_to_reg (lookup_static_chain (label));
640 /* Get addr of containing function's current nonlocal goto handler,
641 which will do any cleanups and then jump to the label. */
642 handler_slot = copy_to_reg (replace_rtx (copy_rtx (handler_slot),
643 virtual_stack_vars_rtx,
644 static_chain));
646 /* Get addr of containing function's nonlocal save area. */
647 save_area = p->x_nonlocal_goto_stack_level;
648 if (save_area)
649 save_area = replace_rtx (copy_rtx (save_area),
650 virtual_stack_vars_rtx, static_chain);
652 #if HAVE_nonlocal_goto
653 if (HAVE_nonlocal_goto)
654 emit_insn (gen_nonlocal_goto (static_chain, handler_slot,
655 save_area, label_ref));
656 else
657 #endif
659 /* Restore frame pointer for containing function.
660 This sets the actual hard register used for the frame pointer
661 to the location of the function's incoming static chain info.
662 The non-local goto handler will then adjust it to contain the
663 proper value and reload the argument pointer, if needed. */
664 emit_move_insn (hard_frame_pointer_rtx, static_chain);
665 emit_stack_restore (SAVE_NONLOCAL, save_area, NULL_RTX);
667 /* USE of hard_frame_pointer_rtx added for consistency;
668 not clear if really needed. */
669 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
670 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
671 emit_indirect_jump (handler_slot);
674 /* Search backwards to the jump insn and mark it as a
675 non-local goto. */
676 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
678 if (GET_CODE (insn) == JUMP_INSN)
680 REG_NOTES (insn) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO,
681 const0_rtx, REG_NOTES (insn));
682 break;
684 else if (GET_CODE (insn) == CALL_INSN)
685 break;
688 else
689 expand_goto_internal (label, label_rtx (label), NULL_RTX);
692 /* Generate RTL code for a `goto' statement with target label BODY.
693 LABEL should be a LABEL_REF.
694 LAST_INSN, if non-0, is the rtx we should consider as the last
695 insn emitted (for the purposes of cleaning up a return). */
697 static void
698 expand_goto_internal (body, label, last_insn)
699 tree body;
700 rtx label;
701 rtx last_insn;
703 struct nesting *block;
704 rtx stack_level = 0;
706 if (GET_CODE (label) != CODE_LABEL)
707 abort ();
709 /* If label has already been defined, we can tell now
710 whether and how we must alter the stack level. */
712 if (PREV_INSN (label) != 0)
714 /* Find the innermost pending block that contains the label.
715 (Check containment by comparing insn-uids.)
716 Then restore the outermost stack level within that block,
717 and do cleanups of all blocks contained in it. */
718 for (block = block_stack; block; block = block->next)
720 if (INSN_UID (block->data.block.first_insn) < INSN_UID (label))
721 break;
722 if (block->data.block.stack_level != 0)
723 stack_level = block->data.block.stack_level;
724 /* Execute the cleanups for blocks we are exiting. */
725 if (block->data.block.cleanups != 0)
727 expand_cleanups (block->data.block.cleanups, NULL_TREE, 1, 1);
728 do_pending_stack_adjust ();
732 if (stack_level)
734 /* Ensure stack adjust isn't done by emit_jump, as this
735 would clobber the stack pointer. This one should be
736 deleted as dead by flow. */
737 clear_pending_stack_adjust ();
738 do_pending_stack_adjust ();
740 /* Don't do this adjust if it's to the end label and this function
741 is to return with a depressed stack pointer. */
742 if (label == return_label
743 && (((TREE_CODE (TREE_TYPE (current_function_decl))
744 == FUNCTION_TYPE)
745 && (TYPE_RETURNS_STACK_DEPRESSED
746 (TREE_TYPE (current_function_decl))))))
748 else
749 emit_stack_restore (SAVE_BLOCK, stack_level, NULL_RTX);
752 if (body != 0 && DECL_TOO_LATE (body))
753 error ("jump to `%s' invalidly jumps into binding contour",
754 IDENTIFIER_POINTER (DECL_NAME (body)));
756 /* Label not yet defined: may need to put this goto
757 on the fixup list. */
758 else if (! expand_fixup (body, label, last_insn))
760 /* No fixup needed. Record that the label is the target
761 of at least one goto that has no fixup. */
762 if (body != 0)
763 TREE_ADDRESSABLE (body) = 1;
766 emit_jump (label);
769 /* Generate if necessary a fixup for a goto
770 whose target label in tree structure (if any) is TREE_LABEL
771 and whose target in rtl is RTL_LABEL.
773 If LAST_INSN is nonzero, we pretend that the jump appears
774 after insn LAST_INSN instead of at the current point in the insn stream.
776 The fixup will be used later to insert insns just before the goto.
777 Those insns will restore the stack level as appropriate for the
778 target label, and will (in the case of C++) also invoke any object
779 destructors which have to be invoked when we exit the scopes which
780 are exited by the goto.
782 Value is nonzero if a fixup is made. */
784 static int
785 expand_fixup (tree_label, rtl_label, last_insn)
786 tree tree_label;
787 rtx rtl_label;
788 rtx last_insn;
790 struct nesting *block, *end_block;
792 /* See if we can recognize which block the label will be output in.
793 This is possible in some very common cases.
794 If we succeed, set END_BLOCK to that block.
795 Otherwise, set it to 0. */
797 if (cond_stack
798 && (rtl_label == cond_stack->data.cond.endif_label
799 || rtl_label == cond_stack->data.cond.next_label))
800 end_block = cond_stack;
801 /* If we are in a loop, recognize certain labels which
802 are likely targets. This reduces the number of fixups
803 we need to create. */
804 else if (loop_stack
805 && (rtl_label == loop_stack->data.loop.start_label
806 || rtl_label == loop_stack->data.loop.end_label
807 || rtl_label == loop_stack->data.loop.continue_label))
808 end_block = loop_stack;
809 else
810 end_block = 0;
812 /* Now set END_BLOCK to the binding level to which we will return. */
814 if (end_block)
816 struct nesting *next_block = end_block->all;
817 block = block_stack;
819 /* First see if the END_BLOCK is inside the innermost binding level.
820 If so, then no cleanups or stack levels are relevant. */
821 while (next_block && next_block != block)
822 next_block = next_block->all;
824 if (next_block)
825 return 0;
827 /* Otherwise, set END_BLOCK to the innermost binding level
828 which is outside the relevant control-structure nesting. */
829 next_block = block_stack->next;
830 for (block = block_stack; block != end_block; block = block->all)
831 if (block == next_block)
832 next_block = next_block->next;
833 end_block = next_block;
836 /* Does any containing block have a stack level or cleanups?
837 If not, no fixup is needed, and that is the normal case
838 (the only case, for standard C). */
839 for (block = block_stack; block != end_block; block = block->next)
840 if (block->data.block.stack_level != 0
841 || block->data.block.cleanups != 0)
842 break;
844 if (block != end_block)
846 /* Ok, a fixup is needed. Add a fixup to the list of such. */
847 struct goto_fixup *fixup
848 = (struct goto_fixup *) ggc_alloc (sizeof (struct goto_fixup));
849 /* In case an old stack level is restored, make sure that comes
850 after any pending stack adjust. */
851 /* ?? If the fixup isn't to come at the present position,
852 doing the stack adjust here isn't useful. Doing it with our
853 settings at that location isn't useful either. Let's hope
854 someone does it! */
855 if (last_insn == 0)
856 do_pending_stack_adjust ();
857 fixup->target = tree_label;
858 fixup->target_rtl = rtl_label;
860 /* Create a BLOCK node and a corresponding matched set of
861 NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes at
862 this point. The notes will encapsulate any and all fixup
863 code which we might later insert at this point in the insn
864 stream. Also, the BLOCK node will be the parent (i.e. the
865 `SUPERBLOCK') of any other BLOCK nodes which we might create
866 later on when we are expanding the fixup code.
868 Note that optimization passes (including expand_end_loop)
869 might move the *_BLOCK notes away, so we use a NOTE_INSN_DELETED
870 as a placeholder. */
873 rtx original_before_jump
874 = last_insn ? last_insn : get_last_insn ();
875 rtx start;
876 rtx end;
877 tree block;
879 block = make_node (BLOCK);
880 TREE_USED (block) = 1;
882 if (!cfun->x_whole_function_mode_p)
883 (*lang_hooks.decls.insert_block) (block);
884 else
886 BLOCK_CHAIN (block)
887 = BLOCK_CHAIN (DECL_INITIAL (current_function_decl));
888 BLOCK_CHAIN (DECL_INITIAL (current_function_decl))
889 = block;
892 start_sequence ();
893 start = emit_note (NULL, NOTE_INSN_BLOCK_BEG);
894 if (cfun->x_whole_function_mode_p)
895 NOTE_BLOCK (start) = block;
896 fixup->before_jump = emit_note (NULL, NOTE_INSN_DELETED);
897 end = emit_note (NULL, NOTE_INSN_BLOCK_END);
898 if (cfun->x_whole_function_mode_p)
899 NOTE_BLOCK (end) = block;
900 fixup->context = block;
901 end_sequence ();
902 emit_insn_after (start, original_before_jump);
905 fixup->block_start_count = current_block_start_count;
906 fixup->stack_level = 0;
907 fixup->cleanup_list_list
908 = ((block->data.block.outer_cleanups
909 || block->data.block.cleanups)
910 ? tree_cons (NULL_TREE, block->data.block.cleanups,
911 block->data.block.outer_cleanups)
912 : 0);
913 fixup->next = goto_fixup_chain;
914 goto_fixup_chain = fixup;
917 return block != 0;
920 /* Expand any needed fixups in the outputmost binding level of the
921 function. FIRST_INSN is the first insn in the function. */
923 void
924 expand_fixups (first_insn)
925 rtx first_insn;
927 fixup_gotos (NULL, NULL_RTX, NULL_TREE, first_insn, 0);
930 /* When exiting a binding contour, process all pending gotos requiring fixups.
931 THISBLOCK is the structure that describes the block being exited.
932 STACK_LEVEL is the rtx for the stack level to restore exiting this contour.
933 CLEANUP_LIST is a list of expressions to evaluate on exiting this contour.
934 FIRST_INSN is the insn that began this contour.
936 Gotos that jump out of this contour must restore the
937 stack level and do the cleanups before actually jumping.
939 DONT_JUMP_IN nonzero means report error there is a jump into this
940 contour from before the beginning of the contour.
941 This is also done if STACK_LEVEL is nonzero. */
943 static void
944 fixup_gotos (thisblock, stack_level, cleanup_list, first_insn, dont_jump_in)
945 struct nesting *thisblock;
946 rtx stack_level;
947 tree cleanup_list;
948 rtx first_insn;
949 int dont_jump_in;
951 struct goto_fixup *f, *prev;
953 /* F is the fixup we are considering; PREV is the previous one. */
954 /* We run this loop in two passes so that cleanups of exited blocks
955 are run first, and blocks that are exited are marked so
956 afterwards. */
958 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
960 /* Test for a fixup that is inactive because it is already handled. */
961 if (f->before_jump == 0)
963 /* Delete inactive fixup from the chain, if that is easy to do. */
964 if (prev != 0)
965 prev->next = f->next;
967 /* Has this fixup's target label been defined?
968 If so, we can finalize it. */
969 else if (PREV_INSN (f->target_rtl) != 0)
971 rtx cleanup_insns;
973 /* If this fixup jumped into this contour from before the beginning
974 of this contour, report an error. This code used to use
975 the first non-label insn after f->target_rtl, but that's
976 wrong since such can be added, by things like put_var_into_stack
977 and have INSN_UIDs that are out of the range of the block. */
978 /* ??? Bug: this does not detect jumping in through intermediate
979 blocks that have stack levels or cleanups.
980 It detects only a problem with the innermost block
981 around the label. */
982 if (f->target != 0
983 && (dont_jump_in || stack_level || cleanup_list)
984 && INSN_UID (first_insn) < INSN_UID (f->target_rtl)
985 && INSN_UID (first_insn) > INSN_UID (f->before_jump)
986 && ! DECL_ERROR_ISSUED (f->target))
988 error_with_decl (f->target,
989 "label `%s' used before containing binding contour");
990 /* Prevent multiple errors for one label. */
991 DECL_ERROR_ISSUED (f->target) = 1;
994 /* We will expand the cleanups into a sequence of their own and
995 then later on we will attach this new sequence to the insn
996 stream just ahead of the actual jump insn. */
998 start_sequence ();
1000 /* Temporarily restore the lexical context where we will
1001 logically be inserting the fixup code. We do this for the
1002 sake of getting the debugging information right. */
1004 (*lang_hooks.decls.pushlevel) (0);
1005 (*lang_hooks.decls.set_block) (f->context);
1007 /* Expand the cleanups for blocks this jump exits. */
1008 if (f->cleanup_list_list)
1010 tree lists;
1011 for (lists = f->cleanup_list_list; lists; lists = TREE_CHAIN (lists))
1012 /* Marked elements correspond to blocks that have been closed.
1013 Do their cleanups. */
1014 if (TREE_ADDRESSABLE (lists)
1015 && TREE_VALUE (lists) != 0)
1017 expand_cleanups (TREE_VALUE (lists), NULL_TREE, 1, 1);
1018 /* Pop any pushes done in the cleanups,
1019 in case function is about to return. */
1020 do_pending_stack_adjust ();
1024 /* Restore stack level for the biggest contour that this
1025 jump jumps out of. */
1026 if (f->stack_level
1027 && ! (f->target_rtl == return_label
1028 && ((TREE_CODE (TREE_TYPE (current_function_decl))
1029 == FUNCTION_TYPE)
1030 && (TYPE_RETURNS_STACK_DEPRESSED
1031 (TREE_TYPE (current_function_decl))))))
1032 emit_stack_restore (SAVE_BLOCK, f->stack_level, f->before_jump);
1034 /* Finish up the sequence containing the insns which implement the
1035 necessary cleanups, and then attach that whole sequence to the
1036 insn stream just ahead of the actual jump insn. Attaching it
1037 at that point insures that any cleanups which are in fact
1038 implicit C++ object destructions (which must be executed upon
1039 leaving the block) appear (to the debugger) to be taking place
1040 in an area of the generated code where the object(s) being
1041 destructed are still "in scope". */
1043 cleanup_insns = get_insns ();
1044 (*lang_hooks.decls.poplevel) (1, 0, 0);
1046 end_sequence ();
1047 emit_insn_after (cleanup_insns, f->before_jump);
1049 f->before_jump = 0;
1053 /* For any still-undefined labels, do the cleanups for this block now.
1054 We must do this now since items in the cleanup list may go out
1055 of scope when the block ends. */
1056 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1057 if (f->before_jump != 0
1058 && PREV_INSN (f->target_rtl) == 0
1059 /* Label has still not appeared. If we are exiting a block with
1060 a stack level to restore, that started before the fixup,
1061 mark this stack level as needing restoration
1062 when the fixup is later finalized. */
1063 && thisblock != 0
1064 /* Note: if THISBLOCK == 0 and we have a label that hasn't appeared, it
1065 means the label is undefined. That's erroneous, but possible. */
1066 && (thisblock->data.block.block_start_count
1067 <= f->block_start_count))
1069 tree lists = f->cleanup_list_list;
1070 rtx cleanup_insns;
1072 for (; lists; lists = TREE_CHAIN (lists))
1073 /* If the following elt. corresponds to our containing block
1074 then the elt. must be for this block. */
1075 if (TREE_CHAIN (lists) == thisblock->data.block.outer_cleanups)
1077 start_sequence ();
1078 (*lang_hooks.decls.pushlevel) (0);
1079 (*lang_hooks.decls.set_block) (f->context);
1080 expand_cleanups (TREE_VALUE (lists), NULL_TREE, 1, 1);
1081 do_pending_stack_adjust ();
1082 cleanup_insns = get_insns ();
1083 (*lang_hooks.decls.poplevel) (1, 0, 0);
1084 end_sequence ();
1085 if (cleanup_insns != 0)
1086 f->before_jump
1087 = emit_insn_after (cleanup_insns, f->before_jump);
1089 f->cleanup_list_list = TREE_CHAIN (lists);
1092 if (stack_level)
1093 f->stack_level = stack_level;
1097 /* Return the number of times character C occurs in string S. */
1098 static int
1099 n_occurrences (c, s)
1100 int c;
1101 const char *s;
1103 int n = 0;
1104 while (*s)
1105 n += (*s++ == c);
1106 return n;
1109 /* Generate RTL for an asm statement (explicit assembler code).
1110 BODY is a STRING_CST node containing the assembler code text,
1111 or an ADDR_EXPR containing a STRING_CST. */
1113 void
1114 expand_asm (body)
1115 tree body;
1117 if (TREE_CODE (body) == ADDR_EXPR)
1118 body = TREE_OPERAND (body, 0);
1120 emit_insn (gen_rtx_ASM_INPUT (VOIDmode,
1121 TREE_STRING_POINTER (body)));
1122 clear_last_expr ();
1125 /* Parse the output constraint pointed to by *CONSTRAINT_P. It is the
1126 OPERAND_NUMth output operand, indexed from zero. There are NINPUTS
1127 inputs and NOUTPUTS outputs to this extended-asm. Upon return,
1128 *ALLOWS_MEM will be TRUE iff the constraint allows the use of a
1129 memory operand. Similarly, *ALLOWS_REG will be TRUE iff the
1130 constraint allows the use of a register operand. And, *IS_INOUT
1131 will be true if the operand is read-write, i.e., if it is used as
1132 an input as well as an output. If *CONSTRAINT_P is not in
1133 canonical form, it will be made canonical. (Note that `+' will be
1134 rpelaced with `=' as part of this process.)
1136 Returns TRUE if all went well; FALSE if an error occurred. */
1138 bool
1139 parse_output_constraint (constraint_p, operand_num, ninputs, noutputs,
1140 allows_mem, allows_reg, is_inout)
1141 const char **constraint_p;
1142 int operand_num;
1143 int ninputs;
1144 int noutputs;
1145 bool *allows_mem;
1146 bool *allows_reg;
1147 bool *is_inout;
1149 const char *constraint = *constraint_p;
1150 const char *p;
1152 /* Assume the constraint doesn't allow the use of either a register
1153 or memory. */
1154 *allows_mem = false;
1155 *allows_reg = false;
1157 /* Allow the `=' or `+' to not be at the beginning of the string,
1158 since it wasn't explicitly documented that way, and there is a
1159 large body of code that puts it last. Swap the character to
1160 the front, so as not to uglify any place else. */
1161 p = strchr (constraint, '=');
1162 if (!p)
1163 p = strchr (constraint, '+');
1165 /* If the string doesn't contain an `=', issue an error
1166 message. */
1167 if (!p)
1169 error ("output operand constraint lacks `='");
1170 return false;
1173 /* If the constraint begins with `+', then the operand is both read
1174 from and written to. */
1175 *is_inout = (*p == '+');
1177 /* Canonicalize the output constraint so that it begins with `='. */
1178 if (p != constraint || is_inout)
1180 char *buf;
1181 size_t c_len = strlen (constraint);
1183 if (p != constraint)
1184 warning ("output constraint `%c' for operand %d is not at the beginning",
1185 *p, operand_num);
1187 /* Make a copy of the constraint. */
1188 buf = alloca (c_len + 1);
1189 strcpy (buf, constraint);
1190 /* Swap the first character and the `=' or `+'. */
1191 buf[p - constraint] = buf[0];
1192 /* Make sure the first character is an `='. (Until we do this,
1193 it might be a `+'.) */
1194 buf[0] = '=';
1195 /* Replace the constraint with the canonicalized string. */
1196 *constraint_p = ggc_alloc_string (buf, c_len);
1197 constraint = *constraint_p;
1200 /* Loop through the constraint string. */
1201 for (p = constraint + 1; *p; ++p)
1202 switch (*p)
1204 case '+':
1205 case '=':
1206 error ("operand constraint contains incorrectly positioned '+' or '='");
1207 return false;
1209 case '%':
1210 if (operand_num + 1 == ninputs + noutputs)
1212 error ("`%%' constraint used with last operand");
1213 return false;
1215 break;
1217 case 'V': case 'm': case 'o':
1218 *allows_mem = true;
1219 break;
1221 case '?': case '!': case '*': case '&': case '#':
1222 case 'E': case 'F': case 'G': case 'H':
1223 case 's': case 'i': case 'n':
1224 case 'I': case 'J': case 'K': case 'L': case 'M':
1225 case 'N': case 'O': case 'P': case ',':
1226 break;
1228 case '0': case '1': case '2': case '3': case '4':
1229 case '5': case '6': case '7': case '8': case '9':
1230 case '[':
1231 error ("matching constraint not valid in output operand");
1232 return false;
1234 case '<': case '>':
1235 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
1236 excepting those that expand_call created. So match memory
1237 and hope. */
1238 *allows_mem = true;
1239 break;
1241 case 'g': case 'X':
1242 *allows_reg = true;
1243 *allows_mem = true;
1244 break;
1246 case 'p': case 'r':
1247 *allows_reg = true;
1248 break;
1250 default:
1251 if (!ISALPHA (*p))
1252 break;
1253 if (REG_CLASS_FROM_LETTER (*p) != NO_REGS)
1254 *allows_reg = true;
1255 #ifdef EXTRA_CONSTRAINT
1256 else
1258 /* Otherwise we can't assume anything about the nature of
1259 the constraint except that it isn't purely registers.
1260 Treat it like "g" and hope for the best. */
1261 *allows_reg = true;
1262 *allows_mem = true;
1264 #endif
1265 break;
1268 return true;
1271 /* Similar, but for input constraints. */
1273 static bool
1274 parse_input_constraint (constraint_p, input_num, ninputs, noutputs, ninout,
1275 constraints, allows_mem, allows_reg)
1276 const char **constraint_p;
1277 int input_num;
1278 int ninputs;
1279 int noutputs;
1280 int ninout;
1281 const char * const * constraints;
1282 bool *allows_mem;
1283 bool *allows_reg;
1285 const char *constraint = *constraint_p;
1286 const char *orig_constraint = constraint;
1287 size_t c_len = strlen (constraint);
1288 size_t j;
1290 /* Assume the constraint doesn't allow the use of either
1291 a register or memory. */
1292 *allows_mem = false;
1293 *allows_reg = false;
1295 /* Make sure constraint has neither `=', `+', nor '&'. */
1297 for (j = 0; j < c_len; j++)
1298 switch (constraint[j])
1300 case '+': case '=': case '&':
1301 if (constraint == orig_constraint)
1303 error ("input operand constraint contains `%c'", constraint[j]);
1304 return false;
1306 break;
1308 case '%':
1309 if (constraint == orig_constraint
1310 && input_num + 1 == ninputs - ninout)
1312 error ("`%%' constraint used with last operand");
1313 return false;
1315 break;
1317 case 'V': case 'm': case 'o':
1318 *allows_mem = true;
1319 break;
1321 case '<': case '>':
1322 case '?': case '!': case '*': case '#':
1323 case 'E': case 'F': case 'G': case 'H':
1324 case 's': case 'i': case 'n':
1325 case 'I': case 'J': case 'K': case 'L': case 'M':
1326 case 'N': case 'O': case 'P': case ',':
1327 break;
1329 /* Whether or not a numeric constraint allows a register is
1330 decided by the matching constraint, and so there is no need
1331 to do anything special with them. We must handle them in
1332 the default case, so that we don't unnecessarily force
1333 operands to memory. */
1334 case '0': case '1': case '2': case '3': case '4':
1335 case '5': case '6': case '7': case '8': case '9':
1337 char *end;
1338 unsigned long match;
1340 match = strtoul (constraint + j, &end, 10);
1341 if (match >= (unsigned long) noutputs)
1343 error ("matching constraint references invalid operand number");
1344 return false;
1347 /* Try and find the real constraint for this dup. Only do this
1348 if the matching constraint is the only alternative. */
1349 if (*end == '\0'
1350 && (j == 0 || (j == 1 && constraint[0] == '%')))
1352 constraint = constraints[match];
1353 *constraint_p = constraint;
1354 c_len = strlen (constraint);
1355 j = 0;
1356 break;
1358 else
1359 j = end - constraint;
1361 /* Fall through. */
1363 case 'p': case 'r':
1364 *allows_reg = true;
1365 break;
1367 case 'g': case 'X':
1368 *allows_reg = true;
1369 *allows_mem = true;
1370 break;
1372 default:
1373 if (! ISALPHA (constraint[j]))
1375 error ("invalid punctuation `%c' in constraint", constraint[j]);
1376 return false;
1378 if (REG_CLASS_FROM_LETTER (constraint[j]) != NO_REGS)
1379 *allows_reg = true;
1380 #ifdef EXTRA_CONSTRAINT
1381 else
1383 /* Otherwise we can't assume anything about the nature of
1384 the constraint except that it isn't purely registers.
1385 Treat it like "g" and hope for the best. */
1386 *allows_reg = true;
1387 *allows_mem = true;
1389 #endif
1390 break;
1393 return true;
1396 /* Generate RTL for an asm statement with arguments.
1397 STRING is the instruction template.
1398 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
1399 Each output or input has an expression in the TREE_VALUE and
1400 and a tree list in TREE_PURPOSE which in turn contains a constraint
1401 name in TREE_VALUE (or NULL_TREE) and a constraint string
1402 in TREE_PURPOSE.
1403 CLOBBERS is a list of STRING_CST nodes each naming a hard register
1404 that is clobbered by this insn.
1406 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
1407 Some elements of OUTPUTS may be replaced with trees representing temporary
1408 values. The caller should copy those temporary values to the originally
1409 specified lvalues.
1411 VOL nonzero means the insn is volatile; don't optimize it. */
1413 void
1414 expand_asm_operands (string, outputs, inputs, clobbers, vol, filename, line)
1415 tree string, outputs, inputs, clobbers;
1416 int vol;
1417 const char *filename;
1418 int line;
1420 rtvec argvec, constraintvec;
1421 rtx body;
1422 int ninputs = list_length (inputs);
1423 int noutputs = list_length (outputs);
1424 int ninout;
1425 int nclobbers;
1426 tree tail;
1427 int i;
1428 /* Vector of RTX's of evaluated output operands. */
1429 rtx *output_rtx = (rtx *) alloca (noutputs * sizeof (rtx));
1430 int *inout_opnum = (int *) alloca (noutputs * sizeof (int));
1431 rtx *real_output_rtx = (rtx *) alloca (noutputs * sizeof (rtx));
1432 enum machine_mode *inout_mode
1433 = (enum machine_mode *) alloca (noutputs * sizeof (enum machine_mode));
1434 const char **constraints
1435 = (const char **) alloca ((noutputs + ninputs) * sizeof (const char *));
1436 /* The insn we have emitted. */
1437 rtx insn;
1438 int old_generating_concat_p = generating_concat_p;
1440 /* An ASM with no outputs needs to be treated as volatile, for now. */
1441 if (noutputs == 0)
1442 vol = 1;
1444 if (! check_operand_nalternatives (outputs, inputs))
1445 return;
1447 if (! check_unique_operand_names (outputs, inputs))
1448 return;
1450 string = resolve_operand_names (string, outputs, inputs, constraints);
1452 #ifdef MD_ASM_CLOBBERS
1453 /* Sometimes we wish to automatically clobber registers across an asm.
1454 Case in point is when the i386 backend moved from cc0 to a hard reg --
1455 maintaining source-level compatibility means automatically clobbering
1456 the flags register. */
1457 MD_ASM_CLOBBERS (clobbers);
1458 #endif
1460 /* Count the number of meaningful clobbered registers, ignoring what
1461 we would ignore later. */
1462 nclobbers = 0;
1463 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1465 const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1467 i = decode_reg_name (regname);
1468 if (i >= 0 || i == -4)
1469 ++nclobbers;
1470 else if (i == -2)
1471 error ("unknown register name `%s' in `asm'", regname);
1474 clear_last_expr ();
1476 /* First pass over inputs and outputs checks validity and sets
1477 mark_addressable if needed. */
1479 ninout = 0;
1480 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1482 tree val = TREE_VALUE (tail);
1483 tree type = TREE_TYPE (val);
1484 const char *constraint;
1485 bool is_inout;
1486 bool allows_reg;
1487 bool allows_mem;
1489 /* If there's an erroneous arg, emit no insn. */
1490 if (type == error_mark_node)
1491 return;
1493 /* Try to parse the output constraint. If that fails, there's
1494 no point in going further. */
1495 constraint = constraints[i];
1496 if (!parse_output_constraint (&constraint, i, ninputs, noutputs,
1497 &allows_mem, &allows_reg, &is_inout))
1498 return;
1500 if (! allows_reg
1501 && (allows_mem
1502 || is_inout
1503 || (DECL_P (val)
1504 && GET_CODE (DECL_RTL (val)) == REG
1505 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type))))
1506 (*lang_hooks.mark_addressable) (val);
1508 if (is_inout)
1509 ninout++;
1512 ninputs += ninout;
1513 if (ninputs + noutputs > MAX_RECOG_OPERANDS)
1515 error ("more than %d operands in `asm'", MAX_RECOG_OPERANDS);
1516 return;
1519 for (i = 0, tail = inputs; tail; i++, tail = TREE_CHAIN (tail))
1521 bool allows_reg, allows_mem;
1522 const char *constraint;
1524 /* If there's an erroneous arg, emit no insn, because the ASM_INPUT
1525 would get VOIDmode and that could cause a crash in reload. */
1526 if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
1527 return;
1529 constraint = constraints[i + noutputs];
1530 if (! parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
1531 constraints, &allows_mem, &allows_reg))
1532 return;
1534 if (! allows_reg && allows_mem)
1535 (*lang_hooks.mark_addressable) (TREE_VALUE (tail));
1538 /* Second pass evaluates arguments. */
1540 ninout = 0;
1541 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1543 tree val = TREE_VALUE (tail);
1544 tree type = TREE_TYPE (val);
1545 bool is_inout;
1546 bool allows_reg;
1547 bool allows_mem;
1549 if (!parse_output_constraint (&constraints[i], i, ninputs,
1550 noutputs, &allows_mem, &allows_reg,
1551 &is_inout))
1552 abort ();
1554 /* If an output operand is not a decl or indirect ref and our constraint
1555 allows a register, make a temporary to act as an intermediate.
1556 Make the asm insn write into that, then our caller will copy it to
1557 the real output operand. Likewise for promoted variables. */
1559 generating_concat_p = 0;
1561 real_output_rtx[i] = NULL_RTX;
1562 if ((TREE_CODE (val) == INDIRECT_REF
1563 && allows_mem)
1564 || (DECL_P (val)
1565 && (allows_mem || GET_CODE (DECL_RTL (val)) == REG)
1566 && ! (GET_CODE (DECL_RTL (val)) == REG
1567 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
1568 || ! allows_reg
1569 || is_inout)
1571 output_rtx[i] = expand_expr (val, NULL_RTX, VOIDmode, EXPAND_WRITE);
1573 if (! allows_reg && GET_CODE (output_rtx[i]) != MEM)
1574 error ("output number %d not directly addressable", i);
1575 if ((! allows_mem && GET_CODE (output_rtx[i]) == MEM)
1576 || GET_CODE (output_rtx[i]) == CONCAT)
1578 real_output_rtx[i] = protect_from_queue (output_rtx[i], 1);
1579 output_rtx[i] = gen_reg_rtx (GET_MODE (output_rtx[i]));
1580 if (is_inout)
1581 emit_move_insn (output_rtx[i], real_output_rtx[i]);
1584 else
1586 output_rtx[i] = assign_temp (type, 0, 0, 1);
1587 TREE_VALUE (tail) = make_tree (type, output_rtx[i]);
1590 generating_concat_p = old_generating_concat_p;
1592 if (is_inout)
1594 inout_mode[ninout] = TYPE_MODE (type);
1595 inout_opnum[ninout++] = i;
1599 /* Make vectors for the expression-rtx, constraint strings,
1600 and named operands. */
1602 argvec = rtvec_alloc (ninputs);
1603 constraintvec = rtvec_alloc (ninputs);
1605 body = gen_rtx_ASM_OPERANDS ((noutputs == 0 ? VOIDmode
1606 : GET_MODE (output_rtx[0])),
1607 TREE_STRING_POINTER (string),
1608 empty_string, 0, argvec, constraintvec,
1609 filename, line);
1611 MEM_VOLATILE_P (body) = vol;
1613 /* Eval the inputs and put them into ARGVEC.
1614 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
1616 for (i = 0, tail = inputs; tail; tail = TREE_CHAIN (tail), ++i)
1618 bool allows_reg, allows_mem;
1619 const char *constraint;
1620 tree val, type;
1621 rtx op;
1623 constraint = constraints[i + noutputs];
1624 if (! parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
1625 constraints, &allows_mem, &allows_reg))
1626 abort ();
1628 generating_concat_p = 0;
1630 val = TREE_VALUE (tail);
1631 type = TREE_TYPE (val);
1632 op = expand_expr (val, NULL_RTX, VOIDmode, 0);
1634 /* Never pass a CONCAT to an ASM. */
1635 if (GET_CODE (op) == CONCAT)
1636 op = force_reg (GET_MODE (op), op);
1638 if (asm_operand_ok (op, constraint) <= 0)
1640 if (allows_reg)
1641 op = force_reg (TYPE_MODE (type), op);
1642 else if (!allows_mem)
1643 warning ("asm operand %d probably doesn't match constraints",
1644 i + noutputs);
1645 else if (CONSTANT_P (op))
1646 op = force_const_mem (TYPE_MODE (type), op);
1647 else if (GET_CODE (op) == REG
1648 || GET_CODE (op) == SUBREG
1649 || GET_CODE (op) == ADDRESSOF
1650 || GET_CODE (op) == CONCAT)
1652 tree qual_type = build_qualified_type (type,
1653 (TYPE_QUALS (type)
1654 | TYPE_QUAL_CONST));
1655 rtx memloc = assign_temp (qual_type, 1, 1, 1);
1657 emit_move_insn (memloc, op);
1658 op = memloc;
1661 else if (GET_CODE (op) == MEM && MEM_VOLATILE_P (op))
1663 /* We won't recognize volatile memory as available a
1664 memory_operand at this point. Ignore it. */
1666 else if (queued_subexp_p (op))
1668 else
1669 /* ??? Leave this only until we have experience with what
1670 happens in combine and elsewhere when constraints are
1671 not satisfied. */
1672 warning ("asm operand %d probably doesn't match constraints",
1673 i + noutputs);
1676 generating_concat_p = old_generating_concat_p;
1677 ASM_OPERANDS_INPUT (body, i) = op;
1679 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, i)
1680 = gen_rtx_ASM_INPUT (TYPE_MODE (type), constraints[i + noutputs]);
1683 /* Protect all the operands from the queue now that they have all been
1684 evaluated. */
1686 generating_concat_p = 0;
1688 for (i = 0; i < ninputs - ninout; i++)
1689 ASM_OPERANDS_INPUT (body, i)
1690 = protect_from_queue (ASM_OPERANDS_INPUT (body, i), 0);
1692 for (i = 0; i < noutputs; i++)
1693 output_rtx[i] = protect_from_queue (output_rtx[i], 1);
1695 /* For in-out operands, copy output rtx to input rtx. */
1696 for (i = 0; i < ninout; i++)
1698 int j = inout_opnum[i];
1699 char buffer[16];
1701 ASM_OPERANDS_INPUT (body, ninputs - ninout + i)
1702 = output_rtx[j];
1704 sprintf (buffer, "%d", j);
1705 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, ninputs - ninout + i)
1706 = gen_rtx_ASM_INPUT (inout_mode[i], ggc_alloc_string (buffer, -1));
1709 generating_concat_p = old_generating_concat_p;
1711 /* Now, for each output, construct an rtx
1712 (set OUTPUT (asm_operands INSN OUTPUTCONSTRAINT OUTPUTNUMBER
1713 ARGVEC CONSTRAINTS OPNAMES))
1714 If there is more than one, put them inside a PARALLEL. */
1716 if (noutputs == 1 && nclobbers == 0)
1718 ASM_OPERANDS_OUTPUT_CONSTRAINT (body) = constraints[0];
1719 insn = emit_insn (gen_rtx_SET (VOIDmode, output_rtx[0], body));
1722 else if (noutputs == 0 && nclobbers == 0)
1724 /* No output operands: put in a raw ASM_OPERANDS rtx. */
1725 insn = emit_insn (body);
1728 else
1730 rtx obody = body;
1731 int num = noutputs;
1733 if (num == 0)
1734 num = 1;
1736 body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
1738 /* For each output operand, store a SET. */
1739 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1741 XVECEXP (body, 0, i)
1742 = gen_rtx_SET (VOIDmode,
1743 output_rtx[i],
1744 gen_rtx_ASM_OPERANDS
1745 (GET_MODE (output_rtx[i]),
1746 TREE_STRING_POINTER (string),
1747 constraints[i], i, argvec, constraintvec,
1748 filename, line));
1750 MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
1753 /* If there are no outputs (but there are some clobbers)
1754 store the bare ASM_OPERANDS into the PARALLEL. */
1756 if (i == 0)
1757 XVECEXP (body, 0, i++) = obody;
1759 /* Store (clobber REG) for each clobbered register specified. */
1761 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1763 const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1764 int j = decode_reg_name (regname);
1766 if (j < 0)
1768 if (j == -3) /* `cc', which is not a register */
1769 continue;
1771 if (j == -4) /* `memory', don't cache memory across asm */
1773 XVECEXP (body, 0, i++)
1774 = gen_rtx_CLOBBER (VOIDmode,
1775 gen_rtx_MEM
1776 (BLKmode,
1777 gen_rtx_SCRATCH (VOIDmode)));
1778 continue;
1781 /* Ignore unknown register, error already signaled. */
1782 continue;
1785 /* Use QImode since that's guaranteed to clobber just one reg. */
1786 XVECEXP (body, 0, i++)
1787 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (QImode, j));
1790 insn = emit_insn (body);
1793 /* For any outputs that needed reloading into registers, spill them
1794 back to where they belong. */
1795 for (i = 0; i < noutputs; ++i)
1796 if (real_output_rtx[i])
1797 emit_move_insn (real_output_rtx[i], output_rtx[i]);
1799 free_temp_slots ();
1802 /* A subroutine of expand_asm_operands. Check that all operands have
1803 the same number of alternatives. Return true if so. */
1805 static bool
1806 check_operand_nalternatives (outputs, inputs)
1807 tree outputs, inputs;
1809 if (outputs || inputs)
1811 tree tmp = TREE_PURPOSE (outputs ? outputs : inputs);
1812 int nalternatives
1813 = n_occurrences (',', TREE_STRING_POINTER (TREE_VALUE (tmp)));
1814 tree next = inputs;
1816 if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
1818 error ("too many alternatives in `asm'");
1819 return false;
1822 tmp = outputs;
1823 while (tmp)
1825 const char *constraint
1826 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (tmp)));
1828 if (n_occurrences (',', constraint) != nalternatives)
1830 error ("operand constraints for `asm' differ in number of alternatives");
1831 return false;
1834 if (TREE_CHAIN (tmp))
1835 tmp = TREE_CHAIN (tmp);
1836 else
1837 tmp = next, next = 0;
1841 return true;
1844 /* A subroutine of expand_asm_operands. Check that all operand names
1845 are unique. Return true if so. We rely on the fact that these names
1846 are identifiers, and so have been canonicalized by get_identifier,
1847 so all we need are pointer comparisons. */
1849 static bool
1850 check_unique_operand_names (outputs, inputs)
1851 tree outputs, inputs;
1853 tree i, j;
1855 for (i = outputs; i ; i = TREE_CHAIN (i))
1857 tree i_name = TREE_PURPOSE (TREE_PURPOSE (i));
1858 if (! i_name)
1859 continue;
1861 for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
1862 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1863 goto failure;
1866 for (i = inputs; i ; i = TREE_CHAIN (i))
1868 tree i_name = TREE_PURPOSE (TREE_PURPOSE (i));
1869 if (! i_name)
1870 continue;
1872 for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
1873 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1874 goto failure;
1875 for (j = outputs; j ; j = TREE_CHAIN (j))
1876 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1877 goto failure;
1880 return true;
1882 failure:
1883 error ("duplicate asm operand name '%s'",
1884 TREE_STRING_POINTER (TREE_PURPOSE (TREE_PURPOSE (i))));
1885 return false;
1888 /* A subroutine of expand_asm_operands. Resolve the names of the operands
1889 in *POUTPUTS and *PINPUTS to numbers, and replace the name expansions in
1890 STRING and in the constraints to those numbers. */
1892 static tree
1893 resolve_operand_names (string, outputs, inputs, pconstraints)
1894 tree string;
1895 tree outputs, inputs;
1896 const char **pconstraints;
1898 char *buffer = xstrdup (TREE_STRING_POINTER (string));
1899 char *p;
1900 tree t;
1902 /* Assume that we will not need extra space to perform the substitution.
1903 This because we get to remove '[' and ']', which means we cannot have
1904 a problem until we have more than 999 operands. */
1906 p = buffer;
1907 while ((p = strchr (p, '%')) != NULL)
1909 if (p[1] == '[')
1910 p += 1;
1911 else if (ISALPHA (p[1]) && p[2] == '[')
1912 p += 2;
1913 else
1915 p += 1;
1916 continue;
1919 p = resolve_operand_name_1 (p, outputs, inputs);
1922 string = build_string (strlen (buffer), buffer);
1923 free (buffer);
1925 /* Collect output constraints here because it's convenient.
1926 There should be no named operands here; this is verified
1927 in expand_asm_operand. */
1928 for (t = outputs; t ; t = TREE_CHAIN (t), pconstraints++)
1929 *pconstraints = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
1931 /* Substitute [<name>] in input constraint strings. */
1932 for (t = inputs; t ; t = TREE_CHAIN (t), pconstraints++)
1934 const char *c = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
1935 if (strchr (c, '[') == NULL)
1936 *pconstraints = c;
1937 else
1939 p = buffer = xstrdup (c);
1940 while ((p = strchr (p, '[')) != NULL)
1941 p = resolve_operand_name_1 (p, outputs, inputs);
1943 *pconstraints = ggc_alloc_string (buffer, -1);
1944 free (buffer);
1948 return string;
1951 /* A subroutine of resolve_operand_names. P points to the '[' for a
1952 potential named operand of the form [<name>]. In place, replace
1953 the name and brackets with a number. Return a pointer to the
1954 balance of the string after substitution. */
1956 static char *
1957 resolve_operand_name_1 (p, outputs, inputs)
1958 char *p;
1959 tree outputs, inputs;
1961 char *q;
1962 int op;
1963 tree t;
1964 size_t len;
1966 /* Collect the operand name. */
1967 q = strchr (p, ']');
1968 if (!q)
1970 error ("missing close brace for named operand");
1971 return strchr (p, '\0');
1973 len = q - p - 1;
1975 /* Resolve the name to a number. */
1976 for (op = 0, t = outputs; t ; t = TREE_CHAIN (t), op++)
1978 tree name = TREE_PURPOSE (TREE_PURPOSE (t));
1979 if (name)
1981 const char *c = TREE_STRING_POINTER (name);
1982 if (strncmp (c, p + 1, len) == 0 && c[len] == '\0')
1983 goto found;
1986 for (t = inputs; t ; t = TREE_CHAIN (t), op++)
1988 tree name = TREE_PURPOSE (TREE_PURPOSE (t));
1989 if (name)
1991 const char *c = TREE_STRING_POINTER (name);
1992 if (strncmp (c, p + 1, len) == 0 && c[len] == '\0')
1993 goto found;
1997 *q = '\0';
1998 error ("undefined named operand '%s'", p + 1);
1999 op = 0;
2000 found:
2002 /* Replace the name with the number. Unfortunately, not all libraries
2003 get the return value of sprintf correct, so search for the end of the
2004 generated string by hand. */
2005 sprintf (p, "%d", op);
2006 p = strchr (p, '\0');
2008 /* Verify the no extra buffer space assumption. */
2009 if (p > q)
2010 abort ();
2012 /* Shift the rest of the buffer down to fill the gap. */
2013 memmove (p, q + 1, strlen (q + 1) + 1);
2015 return p;
2018 /* Generate RTL to evaluate the expression EXP
2019 and remember it in case this is the VALUE in a ({... VALUE; }) constr.
2020 Provided just for backward-compatibility. expand_expr_stmt_value()
2021 should be used for new code. */
2023 void
2024 expand_expr_stmt (exp)
2025 tree exp;
2027 expand_expr_stmt_value (exp, -1, 1);
2030 /* Generate RTL to evaluate the expression EXP. WANT_VALUE tells
2031 whether to (1) save the value of the expression, (0) discard it or
2032 (-1) use expr_stmts_for_value to tell. The use of -1 is
2033 deprecated, and retained only for backward compatibility. */
2035 void
2036 expand_expr_stmt_value (exp, want_value, maybe_last)
2037 tree exp;
2038 int want_value, maybe_last;
2040 rtx value;
2041 tree type;
2043 if (want_value == -1)
2044 want_value = expr_stmts_for_value != 0;
2046 /* If -W, warn about statements with no side effects,
2047 except for an explicit cast to void (e.g. for assert()), and
2048 except for last statement in ({...}) where they may be useful. */
2049 if (! want_value
2050 && (expr_stmts_for_value == 0 || ! maybe_last)
2051 && exp != error_mark_node)
2053 if (! TREE_SIDE_EFFECTS (exp))
2055 if ((extra_warnings || warn_unused_value)
2056 && !(TREE_CODE (exp) == CONVERT_EXPR
2057 && VOID_TYPE_P (TREE_TYPE (exp))))
2058 warning_with_file_and_line (emit_filename, emit_lineno,
2059 "statement with no effect");
2061 else if (warn_unused_value)
2062 warn_if_unused_value (exp);
2065 /* If EXP is of function type and we are expanding statements for
2066 value, convert it to pointer-to-function. */
2067 if (want_value && TREE_CODE (TREE_TYPE (exp)) == FUNCTION_TYPE)
2068 exp = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (exp)), exp);
2070 /* The call to `expand_expr' could cause last_expr_type and
2071 last_expr_value to get reset. Therefore, we set last_expr_value
2072 and last_expr_type *after* calling expand_expr. */
2073 value = expand_expr (exp, want_value ? NULL_RTX : const0_rtx,
2074 VOIDmode, 0);
2075 type = TREE_TYPE (exp);
2077 /* If all we do is reference a volatile value in memory,
2078 copy it to a register to be sure it is actually touched. */
2079 if (value && GET_CODE (value) == MEM && TREE_THIS_VOLATILE (exp))
2081 if (TYPE_MODE (type) == VOIDmode)
2083 else if (TYPE_MODE (type) != BLKmode)
2084 value = copy_to_reg (value);
2085 else
2087 rtx lab = gen_label_rtx ();
2089 /* Compare the value with itself to reference it. */
2090 emit_cmp_and_jump_insns (value, value, EQ,
2091 expand_expr (TYPE_SIZE (type),
2092 NULL_RTX, VOIDmode, 0),
2093 BLKmode, 0, lab);
2094 emit_label (lab);
2098 /* If this expression is part of a ({...}) and is in memory, we may have
2099 to preserve temporaries. */
2100 preserve_temp_slots (value);
2102 /* Free any temporaries used to evaluate this expression. Any temporary
2103 used as a result of this expression will already have been preserved
2104 above. */
2105 free_temp_slots ();
2107 if (want_value)
2109 last_expr_value = value;
2110 last_expr_type = type;
2113 emit_queue ();
2116 /* Warn if EXP contains any computations whose results are not used.
2117 Return 1 if a warning is printed; 0 otherwise. */
2120 warn_if_unused_value (exp)
2121 tree exp;
2123 if (TREE_USED (exp))
2124 return 0;
2126 /* Don't warn about void constructs. This includes casting to void,
2127 void function calls, and statement expressions with a final cast
2128 to void. */
2129 if (VOID_TYPE_P (TREE_TYPE (exp)))
2130 return 0;
2132 switch (TREE_CODE (exp))
2134 case PREINCREMENT_EXPR:
2135 case POSTINCREMENT_EXPR:
2136 case PREDECREMENT_EXPR:
2137 case POSTDECREMENT_EXPR:
2138 case MODIFY_EXPR:
2139 case INIT_EXPR:
2140 case TARGET_EXPR:
2141 case CALL_EXPR:
2142 case METHOD_CALL_EXPR:
2143 case RTL_EXPR:
2144 case TRY_CATCH_EXPR:
2145 case WITH_CLEANUP_EXPR:
2146 case EXIT_EXPR:
2147 return 0;
2149 case BIND_EXPR:
2150 /* For a binding, warn if no side effect within it. */
2151 return warn_if_unused_value (TREE_OPERAND (exp, 1));
2153 case SAVE_EXPR:
2154 return warn_if_unused_value (TREE_OPERAND (exp, 1));
2156 case TRUTH_ORIF_EXPR:
2157 case TRUTH_ANDIF_EXPR:
2158 /* In && or ||, warn if 2nd operand has no side effect. */
2159 return warn_if_unused_value (TREE_OPERAND (exp, 1));
2161 case COMPOUND_EXPR:
2162 if (TREE_NO_UNUSED_WARNING (exp))
2163 return 0;
2164 if (warn_if_unused_value (TREE_OPERAND (exp, 0)))
2165 return 1;
2166 /* Let people do `(foo (), 0)' without a warning. */
2167 if (TREE_CONSTANT (TREE_OPERAND (exp, 1)))
2168 return 0;
2169 return warn_if_unused_value (TREE_OPERAND (exp, 1));
2171 case NOP_EXPR:
2172 case CONVERT_EXPR:
2173 case NON_LVALUE_EXPR:
2174 /* Don't warn about conversions not explicit in the user's program. */
2175 if (TREE_NO_UNUSED_WARNING (exp))
2176 return 0;
2177 /* Assignment to a cast usually results in a cast of a modify.
2178 Don't complain about that. There can be an arbitrary number of
2179 casts before the modify, so we must loop until we find the first
2180 non-cast expression and then test to see if that is a modify. */
2182 tree tem = TREE_OPERAND (exp, 0);
2184 while (TREE_CODE (tem) == CONVERT_EXPR || TREE_CODE (tem) == NOP_EXPR)
2185 tem = TREE_OPERAND (tem, 0);
2187 if (TREE_CODE (tem) == MODIFY_EXPR || TREE_CODE (tem) == INIT_EXPR
2188 || TREE_CODE (tem) == CALL_EXPR)
2189 return 0;
2191 goto maybe_warn;
2193 case INDIRECT_REF:
2194 /* Don't warn about automatic dereferencing of references, since
2195 the user cannot control it. */
2196 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == REFERENCE_TYPE)
2197 return warn_if_unused_value (TREE_OPERAND (exp, 0));
2198 /* Fall through. */
2200 default:
2201 /* Referencing a volatile value is a side effect, so don't warn. */
2202 if ((DECL_P (exp)
2203 || TREE_CODE_CLASS (TREE_CODE (exp)) == 'r')
2204 && TREE_THIS_VOLATILE (exp))
2205 return 0;
2207 /* If this is an expression which has no operands, there is no value
2208 to be unused. There are no such language-independent codes,
2209 but front ends may define such. */
2210 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'e'
2211 && TREE_CODE_LENGTH (TREE_CODE (exp)) == 0)
2212 return 0;
2214 maybe_warn:
2215 /* If this is an expression with side effects, don't warn. */
2216 if (TREE_SIDE_EFFECTS (exp))
2217 return 0;
2219 warning_with_file_and_line (emit_filename, emit_lineno,
2220 "value computed is not used");
2221 return 1;
2225 /* Clear out the memory of the last expression evaluated. */
2227 void
2228 clear_last_expr ()
2230 last_expr_type = NULL_TREE;
2231 last_expr_value = NULL_RTX;
2234 /* Begin a statement-expression, i.e., a series of statements which
2235 may return a value. Return the RTL_EXPR for this statement expr.
2236 The caller must save that value and pass it to
2237 expand_end_stmt_expr. If HAS_SCOPE is nonzero, temporaries created
2238 in the statement-expression are deallocated at the end of the
2239 expression. */
2241 tree
2242 expand_start_stmt_expr (has_scope)
2243 int has_scope;
2245 tree t;
2247 /* Make the RTL_EXPR node temporary, not momentary,
2248 so that rtl_expr_chain doesn't become garbage. */
2249 t = make_node (RTL_EXPR);
2250 do_pending_stack_adjust ();
2251 if (has_scope)
2252 start_sequence_for_rtl_expr (t);
2253 else
2254 start_sequence ();
2255 NO_DEFER_POP;
2256 expr_stmts_for_value++;
2257 return t;
2260 /* Restore the previous state at the end of a statement that returns a value.
2261 Returns a tree node representing the statement's value and the
2262 insns to compute the value.
2264 The nodes of that expression have been freed by now, so we cannot use them.
2265 But we don't want to do that anyway; the expression has already been
2266 evaluated and now we just want to use the value. So generate a RTL_EXPR
2267 with the proper type and RTL value.
2269 If the last substatement was not an expression,
2270 return something with type `void'. */
2272 tree
2273 expand_end_stmt_expr (t)
2274 tree t;
2276 OK_DEFER_POP;
2278 if (! last_expr_value || ! last_expr_type)
2280 last_expr_value = const0_rtx;
2281 last_expr_type = void_type_node;
2283 else if (GET_CODE (last_expr_value) != REG && ! CONSTANT_P (last_expr_value))
2284 /* Remove any possible QUEUED. */
2285 last_expr_value = protect_from_queue (last_expr_value, 0);
2287 emit_queue ();
2289 TREE_TYPE (t) = last_expr_type;
2290 RTL_EXPR_RTL (t) = last_expr_value;
2291 RTL_EXPR_SEQUENCE (t) = get_insns ();
2293 rtl_expr_chain = tree_cons (NULL_TREE, t, rtl_expr_chain);
2295 end_sequence ();
2297 /* Don't consider deleting this expr or containing exprs at tree level. */
2298 TREE_SIDE_EFFECTS (t) = 1;
2299 /* Propagate volatility of the actual RTL expr. */
2300 TREE_THIS_VOLATILE (t) = volatile_refs_p (last_expr_value);
2302 clear_last_expr ();
2303 expr_stmts_for_value--;
2305 return t;
2308 /* Generate RTL for the start of an if-then. COND is the expression
2309 whose truth should be tested.
2311 If EXITFLAG is nonzero, this conditional is visible to
2312 `exit_something'. */
2314 void
2315 expand_start_cond (cond, exitflag)
2316 tree cond;
2317 int exitflag;
2319 struct nesting *thiscond = ALLOC_NESTING ();
2321 /* Make an entry on cond_stack for the cond we are entering. */
2323 thiscond->desc = COND_NESTING;
2324 thiscond->next = cond_stack;
2325 thiscond->all = nesting_stack;
2326 thiscond->depth = ++nesting_depth;
2327 thiscond->data.cond.next_label = gen_label_rtx ();
2328 /* Before we encounter an `else', we don't need a separate exit label
2329 unless there are supposed to be exit statements
2330 to exit this conditional. */
2331 thiscond->exit_label = exitflag ? gen_label_rtx () : 0;
2332 thiscond->data.cond.endif_label = thiscond->exit_label;
2333 cond_stack = thiscond;
2334 nesting_stack = thiscond;
2336 do_jump (cond, thiscond->data.cond.next_label, NULL_RTX);
2339 /* Generate RTL between then-clause and the elseif-clause
2340 of an if-then-elseif-.... */
2342 void
2343 expand_start_elseif (cond)
2344 tree cond;
2346 if (cond_stack->data.cond.endif_label == 0)
2347 cond_stack->data.cond.endif_label = gen_label_rtx ();
2348 emit_jump (cond_stack->data.cond.endif_label);
2349 emit_label (cond_stack->data.cond.next_label);
2350 cond_stack->data.cond.next_label = gen_label_rtx ();
2351 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
2354 /* Generate RTL between the then-clause and the else-clause
2355 of an if-then-else. */
2357 void
2358 expand_start_else ()
2360 if (cond_stack->data.cond.endif_label == 0)
2361 cond_stack->data.cond.endif_label = gen_label_rtx ();
2363 emit_jump (cond_stack->data.cond.endif_label);
2364 emit_label (cond_stack->data.cond.next_label);
2365 cond_stack->data.cond.next_label = 0; /* No more _else or _elseif calls. */
2368 /* After calling expand_start_else, turn this "else" into an "else if"
2369 by providing another condition. */
2371 void
2372 expand_elseif (cond)
2373 tree cond;
2375 cond_stack->data.cond.next_label = gen_label_rtx ();
2376 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
2379 /* Generate RTL for the end of an if-then.
2380 Pop the record for it off of cond_stack. */
2382 void
2383 expand_end_cond ()
2385 struct nesting *thiscond = cond_stack;
2387 do_pending_stack_adjust ();
2388 if (thiscond->data.cond.next_label)
2389 emit_label (thiscond->data.cond.next_label);
2390 if (thiscond->data.cond.endif_label)
2391 emit_label (thiscond->data.cond.endif_label);
2393 POPSTACK (cond_stack);
2394 clear_last_expr ();
2397 /* Generate RTL for the start of a loop. EXIT_FLAG is nonzero if this
2398 loop should be exited by `exit_something'. This is a loop for which
2399 `expand_continue' will jump to the top of the loop.
2401 Make an entry on loop_stack to record the labels associated with
2402 this loop. */
2404 struct nesting *
2405 expand_start_loop (exit_flag)
2406 int exit_flag;
2408 struct nesting *thisloop = ALLOC_NESTING ();
2410 /* Make an entry on loop_stack for the loop we are entering. */
2412 thisloop->desc = LOOP_NESTING;
2413 thisloop->next = loop_stack;
2414 thisloop->all = nesting_stack;
2415 thisloop->depth = ++nesting_depth;
2416 thisloop->data.loop.start_label = gen_label_rtx ();
2417 thisloop->data.loop.end_label = gen_label_rtx ();
2418 thisloop->data.loop.alt_end_label = 0;
2419 thisloop->data.loop.continue_label = thisloop->data.loop.start_label;
2420 thisloop->exit_label = exit_flag ? thisloop->data.loop.end_label : 0;
2421 loop_stack = thisloop;
2422 nesting_stack = thisloop;
2424 do_pending_stack_adjust ();
2425 emit_queue ();
2426 emit_note (NULL, NOTE_INSN_LOOP_BEG);
2427 emit_label (thisloop->data.loop.start_label);
2429 return thisloop;
2432 /* Like expand_start_loop but for a loop where the continuation point
2433 (for expand_continue_loop) will be specified explicitly. */
2435 struct nesting *
2436 expand_start_loop_continue_elsewhere (exit_flag)
2437 int exit_flag;
2439 struct nesting *thisloop = expand_start_loop (exit_flag);
2440 loop_stack->data.loop.continue_label = gen_label_rtx ();
2441 return thisloop;
2444 /* Begin a null, aka do { } while (0) "loop". But since the contents
2445 of said loop can still contain a break, we must frob the loop nest. */
2447 struct nesting *
2448 expand_start_null_loop ()
2450 struct nesting *thisloop = ALLOC_NESTING ();
2452 /* Make an entry on loop_stack for the loop we are entering. */
2454 thisloop->desc = LOOP_NESTING;
2455 thisloop->next = loop_stack;
2456 thisloop->all = nesting_stack;
2457 thisloop->depth = ++nesting_depth;
2458 thisloop->data.loop.start_label = emit_note (NULL, NOTE_INSN_DELETED);
2459 thisloop->data.loop.end_label = gen_label_rtx ();
2460 thisloop->data.loop.alt_end_label = NULL_RTX;
2461 thisloop->data.loop.continue_label = thisloop->data.loop.end_label;
2462 thisloop->exit_label = thisloop->data.loop.end_label;
2463 loop_stack = thisloop;
2464 nesting_stack = thisloop;
2466 return thisloop;
2469 /* Specify the continuation point for a loop started with
2470 expand_start_loop_continue_elsewhere.
2471 Use this at the point in the code to which a continue statement
2472 should jump. */
2474 void
2475 expand_loop_continue_here ()
2477 do_pending_stack_adjust ();
2478 emit_note (NULL, NOTE_INSN_LOOP_CONT);
2479 emit_label (loop_stack->data.loop.continue_label);
2482 /* Finish a loop. Generate a jump back to the top and the loop-exit label.
2483 Pop the block off of loop_stack. */
2485 void
2486 expand_end_loop ()
2488 rtx start_label = loop_stack->data.loop.start_label;
2489 rtx etc_note;
2490 int eh_regions, debug_blocks;
2492 /* Mark the continue-point at the top of the loop if none elsewhere. */
2493 if (start_label == loop_stack->data.loop.continue_label)
2494 emit_note_before (NOTE_INSN_LOOP_CONT, start_label);
2496 do_pending_stack_adjust ();
2498 /* If the loop starts with a loop exit, roll that to the end where
2499 it will optimize together with the jump back.
2501 If the loop presently looks like this (in pseudo-C):
2503 LOOP_BEG
2504 start_label:
2505 if (test) goto end_label;
2506 LOOP_END_TOP_COND
2507 body;
2508 goto start_label;
2509 end_label:
2511 transform it to look like:
2513 LOOP_BEG
2514 goto start_label;
2515 top_label:
2516 body;
2517 start_label:
2518 if (test) goto end_label;
2519 goto top_label;
2520 end_label:
2522 We rely on the presence of NOTE_INSN_LOOP_END_TOP_COND to mark
2523 the end of the entry condtional. Without this, our lexical scan
2524 can't tell the difference between an entry conditional and a
2525 body conditional that exits the loop. Mistaking the two means
2526 that we can misplace the NOTE_INSN_LOOP_CONT note, which can
2527 screw up loop unrolling.
2529 Things will be oh so much better when loop optimization is done
2530 off of a proper control flow graph... */
2532 /* Scan insns from the top of the loop looking for the END_TOP_COND note. */
2534 eh_regions = debug_blocks = 0;
2535 for (etc_note = start_label; etc_note ; etc_note = NEXT_INSN (etc_note))
2536 if (GET_CODE (etc_note) == NOTE)
2538 if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_LOOP_END_TOP_COND)
2539 break;
2541 /* We must not walk into a nested loop. */
2542 else if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_LOOP_BEG)
2544 etc_note = NULL_RTX;
2545 break;
2548 /* At the same time, scan for EH region notes, as we don't want
2549 to scrog region nesting. This shouldn't happen, but... */
2550 else if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_EH_REGION_BEG)
2551 eh_regions++;
2552 else if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_EH_REGION_END)
2554 if (--eh_regions < 0)
2555 /* We've come to the end of an EH region, but never saw the
2556 beginning of that region. That means that an EH region
2557 begins before the top of the loop, and ends in the middle
2558 of it. The existence of such a situation violates a basic
2559 assumption in this code, since that would imply that even
2560 when EH_REGIONS is zero, we might move code out of an
2561 exception region. */
2562 abort ();
2565 /* Likewise for debug scopes. In this case we'll either (1) move
2566 all of the notes if they are properly nested or (2) leave the
2567 notes alone and only rotate the loop at high optimization
2568 levels when we expect to scrog debug info. */
2569 else if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_BLOCK_BEG)
2570 debug_blocks++;
2571 else if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_BLOCK_END)
2572 debug_blocks--;
2575 if (etc_note
2576 && optimize
2577 && eh_regions == 0
2578 && (debug_blocks == 0 || optimize >= 2)
2579 && NEXT_INSN (etc_note) != NULL_RTX
2580 && ! any_condjump_p (get_last_insn ()))
2582 /* We found one. Move everything from START to ETC to the end
2583 of the loop, and add a jump from the top of the loop. */
2584 rtx top_label = gen_label_rtx ();
2585 rtx start_move = start_label;
2587 /* If the start label is preceded by a NOTE_INSN_LOOP_CONT note,
2588 then we want to move this note also. */
2589 if (GET_CODE (PREV_INSN (start_move)) == NOTE
2590 && NOTE_LINE_NUMBER (PREV_INSN (start_move)) == NOTE_INSN_LOOP_CONT)
2591 start_move = PREV_INSN (start_move);
2593 emit_label_before (top_label, start_move);
2595 /* Actually move the insns. If the debug scopes are nested, we
2596 can move everything at once. Otherwise we have to move them
2597 one by one and squeeze out the block notes. */
2598 if (debug_blocks == 0)
2599 reorder_insns (start_move, etc_note, get_last_insn ());
2600 else
2602 rtx insn, next_insn;
2603 for (insn = start_move; insn; insn = next_insn)
2605 /* Figure out which insn comes after this one. We have
2606 to do this before we move INSN. */
2607 next_insn = (insn == etc_note ? NULL : NEXT_INSN (insn));
2609 if (GET_CODE (insn) == NOTE
2610 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2611 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
2612 continue;
2614 reorder_insns (insn, insn, get_last_insn ());
2618 /* Add the jump from the top of the loop. */
2619 emit_jump_insn_before (gen_jump (start_label), top_label);
2620 emit_barrier_before (top_label);
2621 start_label = top_label;
2624 emit_jump (start_label);
2625 emit_note (NULL, NOTE_INSN_LOOP_END);
2626 emit_label (loop_stack->data.loop.end_label);
2628 POPSTACK (loop_stack);
2630 clear_last_expr ();
2633 /* Finish a null loop, aka do { } while (0). */
2635 void
2636 expand_end_null_loop ()
2638 do_pending_stack_adjust ();
2639 emit_label (loop_stack->data.loop.end_label);
2641 POPSTACK (loop_stack);
2643 clear_last_expr ();
2646 /* Generate a jump to the current loop's continue-point.
2647 This is usually the top of the loop, but may be specified
2648 explicitly elsewhere. If not currently inside a loop,
2649 return 0 and do nothing; caller will print an error message. */
2652 expand_continue_loop (whichloop)
2653 struct nesting *whichloop;
2655 /* Emit information for branch prediction. */
2656 rtx note;
2658 note = emit_note (NULL, NOTE_INSN_PREDICTION);
2659 NOTE_PREDICTION (note) = NOTE_PREDICT (PRED_CONTINUE, IS_TAKEN);
2660 clear_last_expr ();
2661 if (whichloop == 0)
2662 whichloop = loop_stack;
2663 if (whichloop == 0)
2664 return 0;
2665 expand_goto_internal (NULL_TREE, whichloop->data.loop.continue_label,
2666 NULL_RTX);
2667 return 1;
2670 /* Generate a jump to exit the current loop. If not currently inside a loop,
2671 return 0 and do nothing; caller will print an error message. */
2674 expand_exit_loop (whichloop)
2675 struct nesting *whichloop;
2677 clear_last_expr ();
2678 if (whichloop == 0)
2679 whichloop = loop_stack;
2680 if (whichloop == 0)
2681 return 0;
2682 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label, NULL_RTX);
2683 return 1;
2686 /* Generate a conditional jump to exit the current loop if COND
2687 evaluates to zero. If not currently inside a loop,
2688 return 0 and do nothing; caller will print an error message. */
2691 expand_exit_loop_if_false (whichloop, cond)
2692 struct nesting *whichloop;
2693 tree cond;
2695 rtx label = gen_label_rtx ();
2696 rtx last_insn;
2697 clear_last_expr ();
2699 if (whichloop == 0)
2700 whichloop = loop_stack;
2701 if (whichloop == 0)
2702 return 0;
2703 /* In order to handle fixups, we actually create a conditional jump
2704 around an unconditional branch to exit the loop. If fixups are
2705 necessary, they go before the unconditional branch. */
2707 do_jump (cond, NULL_RTX, label);
2708 last_insn = get_last_insn ();
2709 if (GET_CODE (last_insn) == CODE_LABEL)
2710 whichloop->data.loop.alt_end_label = last_insn;
2711 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label,
2712 NULL_RTX);
2713 emit_label (label);
2715 return 1;
2718 /* Like expand_exit_loop_if_false except also emit a note marking
2719 the end of the conditional. Should only be used immediately
2720 after expand_loop_start. */
2723 expand_exit_loop_top_cond (whichloop, cond)
2724 struct nesting *whichloop;
2725 tree cond;
2727 if (! expand_exit_loop_if_false (whichloop, cond))
2728 return 0;
2730 emit_note (NULL, NOTE_INSN_LOOP_END_TOP_COND);
2731 return 1;
2734 /* Return nonzero if the loop nest is empty. Else return zero. */
2737 stmt_loop_nest_empty ()
2739 /* cfun->stmt can be NULL if we are building a call to get the
2740 EH context for a setjmp/longjmp EH target and the current
2741 function was a deferred inline function. */
2742 return (cfun->stmt == NULL || loop_stack == NULL);
2745 /* Return non-zero if we should preserve sub-expressions as separate
2746 pseudos. We never do so if we aren't optimizing. We always do so
2747 if -fexpensive-optimizations.
2749 Otherwise, we only do so if we are in the "early" part of a loop. I.e.,
2750 the loop may still be a small one. */
2753 preserve_subexpressions_p ()
2755 rtx insn;
2757 if (flag_expensive_optimizations)
2758 return 1;
2760 if (optimize == 0 || cfun == 0 || cfun->stmt == 0 || loop_stack == 0)
2761 return 0;
2763 insn = get_last_insn_anywhere ();
2765 return (insn
2766 && (INSN_UID (insn) - INSN_UID (loop_stack->data.loop.start_label)
2767 < n_non_fixed_regs * 3));
2771 /* Generate a jump to exit the current loop, conditional, binding contour
2772 or case statement. Not all such constructs are visible to this function,
2773 only those started with EXIT_FLAG nonzero. Individual languages use
2774 the EXIT_FLAG parameter to control which kinds of constructs you can
2775 exit this way.
2777 If not currently inside anything that can be exited,
2778 return 0 and do nothing; caller will print an error message. */
2781 expand_exit_something ()
2783 struct nesting *n;
2784 clear_last_expr ();
2785 for (n = nesting_stack; n; n = n->all)
2786 if (n->exit_label != 0)
2788 expand_goto_internal (NULL_TREE, n->exit_label, NULL_RTX);
2789 return 1;
2792 return 0;
2795 /* Generate RTL to return from the current function, with no value.
2796 (That is, we do not do anything about returning any value.) */
2798 void
2799 expand_null_return ()
2801 rtx last_insn;
2803 last_insn = get_last_insn ();
2805 /* If this function was declared to return a value, but we
2806 didn't, clobber the return registers so that they are not
2807 propagated live to the rest of the function. */
2808 clobber_return_register ();
2810 expand_null_return_1 (last_insn);
2813 /* Try to guess whether the value of return means error code. */
2814 static enum br_predictor
2815 return_prediction (val)
2816 rtx val;
2818 /* Different heuristics for pointers and scalars. */
2819 if (POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (current_function_decl))))
2821 /* NULL is usually not returned. */
2822 if (val == const0_rtx)
2823 return PRED_NULL_RETURN;
2825 else
2827 /* Negative return values are often used to indicate
2828 errors. */
2829 if (GET_CODE (val) == CONST_INT
2830 && INTVAL (val) < 0)
2831 return PRED_NEGATIVE_RETURN;
2832 /* Constant return values are also usually erors,
2833 zero/one often mean booleans so exclude them from the
2834 heuristics. */
2835 if (CONSTANT_P (val)
2836 && (val != const0_rtx && val != const1_rtx))
2837 return PRED_CONST_RETURN;
2839 return PRED_NO_PREDICTION;
2842 /* Generate RTL to return from the current function, with value VAL. */
2844 static void
2845 expand_value_return (val)
2846 rtx val;
2848 rtx last_insn;
2849 rtx return_reg;
2850 enum br_predictor pred;
2852 if ((pred = return_prediction (val)) != PRED_NO_PREDICTION)
2854 /* Emit information for branch prediction. */
2855 rtx note;
2857 note = emit_note (NULL, NOTE_INSN_PREDICTION);
2859 NOTE_PREDICTION (note) = NOTE_PREDICT (pred, NOT_TAKEN);
2863 last_insn = get_last_insn ();
2864 return_reg = DECL_RTL (DECL_RESULT (current_function_decl));
2866 /* Copy the value to the return location
2867 unless it's already there. */
2869 if (return_reg != val)
2871 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
2872 #ifdef PROMOTE_FUNCTION_RETURN
2873 int unsignedp = TREE_UNSIGNED (type);
2874 enum machine_mode old_mode
2875 = DECL_MODE (DECL_RESULT (current_function_decl));
2876 enum machine_mode mode
2877 = promote_mode (type, old_mode, &unsignedp, 1);
2879 if (mode != old_mode)
2880 val = convert_modes (mode, old_mode, val, unsignedp);
2881 #endif
2882 if (GET_CODE (return_reg) == PARALLEL)
2883 emit_group_load (return_reg, val, int_size_in_bytes (type));
2884 else
2885 emit_move_insn (return_reg, val);
2888 expand_null_return_1 (last_insn);
2891 /* Output a return with no value. If LAST_INSN is nonzero,
2892 pretend that the return takes place after LAST_INSN. */
2894 static void
2895 expand_null_return_1 (last_insn)
2896 rtx last_insn;
2898 rtx end_label = cleanup_label ? cleanup_label : return_label;
2900 clear_pending_stack_adjust ();
2901 do_pending_stack_adjust ();
2902 clear_last_expr ();
2904 if (end_label == 0)
2905 end_label = return_label = gen_label_rtx ();
2906 expand_goto_internal (NULL_TREE, end_label, last_insn);
2909 /* Generate RTL to evaluate the expression RETVAL and return it
2910 from the current function. */
2912 void
2913 expand_return (retval)
2914 tree retval;
2916 /* If there are any cleanups to be performed, then they will
2917 be inserted following LAST_INSN. It is desirable
2918 that the last_insn, for such purposes, should be the
2919 last insn before computing the return value. Otherwise, cleanups
2920 which call functions can clobber the return value. */
2921 /* ??? rms: I think that is erroneous, because in C++ it would
2922 run destructors on variables that might be used in the subsequent
2923 computation of the return value. */
2924 rtx last_insn = 0;
2925 rtx result_rtl;
2926 rtx val = 0;
2927 tree retval_rhs;
2929 /* If function wants no value, give it none. */
2930 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
2932 expand_expr (retval, NULL_RTX, VOIDmode, 0);
2933 emit_queue ();
2934 expand_null_return ();
2935 return;
2938 if (retval == error_mark_node)
2940 /* Treat this like a return of no value from a function that
2941 returns a value. */
2942 expand_null_return ();
2943 return;
2945 else if (TREE_CODE (retval) == RESULT_DECL)
2946 retval_rhs = retval;
2947 else if ((TREE_CODE (retval) == MODIFY_EXPR || TREE_CODE (retval) == INIT_EXPR)
2948 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
2949 retval_rhs = TREE_OPERAND (retval, 1);
2950 else if (VOID_TYPE_P (TREE_TYPE (retval)))
2951 /* Recognize tail-recursive call to void function. */
2952 retval_rhs = retval;
2953 else
2954 retval_rhs = NULL_TREE;
2956 last_insn = get_last_insn ();
2958 /* Distribute return down conditional expr if either of the sides
2959 may involve tail recursion (see test below). This enhances the number
2960 of tail recursions we see. Don't do this always since it can produce
2961 sub-optimal code in some cases and we distribute assignments into
2962 conditional expressions when it would help. */
2964 if (optimize && retval_rhs != 0
2965 && frame_offset == 0
2966 && TREE_CODE (retval_rhs) == COND_EXPR
2967 && (TREE_CODE (TREE_OPERAND (retval_rhs, 1)) == CALL_EXPR
2968 || TREE_CODE (TREE_OPERAND (retval_rhs, 2)) == CALL_EXPR))
2970 rtx label = gen_label_rtx ();
2971 tree expr;
2973 do_jump (TREE_OPERAND (retval_rhs, 0), label, NULL_RTX);
2974 start_cleanup_deferral ();
2975 expr = build (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (current_function_decl)),
2976 DECL_RESULT (current_function_decl),
2977 TREE_OPERAND (retval_rhs, 1));
2978 TREE_SIDE_EFFECTS (expr) = 1;
2979 expand_return (expr);
2980 emit_label (label);
2982 expr = build (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (current_function_decl)),
2983 DECL_RESULT (current_function_decl),
2984 TREE_OPERAND (retval_rhs, 2));
2985 TREE_SIDE_EFFECTS (expr) = 1;
2986 expand_return (expr);
2987 end_cleanup_deferral ();
2988 return;
2991 result_rtl = DECL_RTL (DECL_RESULT (current_function_decl));
2993 /* If the result is an aggregate that is being returned in one (or more)
2994 registers, load the registers here. The compiler currently can't handle
2995 copying a BLKmode value into registers. We could put this code in a
2996 more general area (for use by everyone instead of just function
2997 call/return), but until this feature is generally usable it is kept here
2998 (and in expand_call). The value must go into a pseudo in case there
2999 are cleanups that will clobber the real return register. */
3001 if (retval_rhs != 0
3002 && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
3003 && GET_CODE (result_rtl) == REG)
3005 int i;
3006 unsigned HOST_WIDE_INT bitpos, xbitpos;
3007 unsigned HOST_WIDE_INT big_endian_correction = 0;
3008 unsigned HOST_WIDE_INT bytes
3009 = int_size_in_bytes (TREE_TYPE (retval_rhs));
3010 int n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
3011 unsigned int bitsize
3012 = MIN (TYPE_ALIGN (TREE_TYPE (retval_rhs)), BITS_PER_WORD);
3013 rtx *result_pseudos = (rtx *) alloca (sizeof (rtx) * n_regs);
3014 rtx result_reg, src = NULL_RTX, dst = NULL_RTX;
3015 rtx result_val = expand_expr (retval_rhs, NULL_RTX, VOIDmode, 0);
3016 enum machine_mode tmpmode, result_reg_mode;
3018 if (bytes == 0)
3020 expand_null_return ();
3021 return;
3024 /* Structures whose size is not a multiple of a word are aligned
3025 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
3026 machine, this means we must skip the empty high order bytes when
3027 calculating the bit offset. */
3028 if (BYTES_BIG_ENDIAN
3029 && !FUNCTION_ARG_REG_LITTLE_ENDIAN
3030 && bytes % UNITS_PER_WORD)
3031 big_endian_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
3032 * BITS_PER_UNIT));
3034 /* Copy the structure BITSIZE bits at a time. */
3035 for (bitpos = 0, xbitpos = big_endian_correction;
3036 bitpos < bytes * BITS_PER_UNIT;
3037 bitpos += bitsize, xbitpos += bitsize)
3039 /* We need a new destination pseudo each time xbitpos is
3040 on a word boundary and when xbitpos == big_endian_correction
3041 (the first time through). */
3042 if (xbitpos % BITS_PER_WORD == 0
3043 || xbitpos == big_endian_correction)
3045 /* Generate an appropriate register. */
3046 dst = gen_reg_rtx (word_mode);
3047 result_pseudos[xbitpos / BITS_PER_WORD] = dst;
3049 /* Clear the destination before we move anything into it. */
3050 emit_move_insn (dst, CONST0_RTX (GET_MODE (dst)));
3053 /* We need a new source operand each time bitpos is on a word
3054 boundary. */
3055 if (bitpos % BITS_PER_WORD == 0)
3056 src = operand_subword_force (result_val,
3057 bitpos / BITS_PER_WORD,
3058 BLKmode);
3060 /* Use bitpos for the source extraction (left justified) and
3061 xbitpos for the destination store (right justified). */
3062 store_bit_field (dst, bitsize, xbitpos % BITS_PER_WORD, word_mode,
3063 extract_bit_field (src, bitsize,
3064 bitpos % BITS_PER_WORD, 1,
3065 NULL_RTX, word_mode, word_mode,
3066 BITS_PER_WORD),
3067 BITS_PER_WORD);
3070 /* Find the smallest integer mode large enough to hold the
3071 entire structure and use that mode instead of BLKmode
3072 on the USE insn for the return register. */
3073 for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3074 tmpmode != VOIDmode;
3075 tmpmode = GET_MODE_WIDER_MODE (tmpmode))
3076 /* Have we found a large enough mode? */
3077 if (GET_MODE_SIZE (tmpmode) >= bytes)
3078 break;
3080 /* No suitable mode found. */
3081 if (tmpmode == VOIDmode)
3082 abort ();
3084 PUT_MODE (result_rtl, tmpmode);
3086 if (GET_MODE_SIZE (tmpmode) < GET_MODE_SIZE (word_mode))
3087 result_reg_mode = word_mode;
3088 else
3089 result_reg_mode = tmpmode;
3090 result_reg = gen_reg_rtx (result_reg_mode);
3092 emit_queue ();
3093 for (i = 0; i < n_regs; i++)
3094 emit_move_insn (operand_subword (result_reg, i, 0, result_reg_mode),
3095 result_pseudos[i]);
3097 if (tmpmode != result_reg_mode)
3098 result_reg = gen_lowpart (tmpmode, result_reg);
3100 expand_value_return (result_reg);
3102 else if (retval_rhs != 0
3103 && !VOID_TYPE_P (TREE_TYPE (retval_rhs))
3104 && (GET_CODE (result_rtl) == REG
3105 || (GET_CODE (result_rtl) == PARALLEL)))
3107 /* Calculate the return value into a temporary (usually a pseudo
3108 reg). */
3109 tree ot = TREE_TYPE (DECL_RESULT (current_function_decl));
3110 tree nt = build_qualified_type (ot, TYPE_QUALS (ot) | TYPE_QUAL_CONST);
3112 val = assign_temp (nt, 0, 0, 1);
3113 val = expand_expr (retval_rhs, val, GET_MODE (val), 0);
3114 val = force_not_mem (val);
3115 emit_queue ();
3116 /* Return the calculated value, doing cleanups first. */
3117 expand_value_return (val);
3119 else
3121 /* No cleanups or no hard reg used;
3122 calculate value into hard return reg. */
3123 expand_expr (retval, const0_rtx, VOIDmode, 0);
3124 emit_queue ();
3125 expand_value_return (result_rtl);
3129 /* Return 1 if the end of the generated RTX is not a barrier.
3130 This means code already compiled can drop through. */
3133 drop_through_at_end_p ()
3135 rtx insn = get_last_insn ();
3136 while (insn && GET_CODE (insn) == NOTE)
3137 insn = PREV_INSN (insn);
3138 return insn && GET_CODE (insn) != BARRIER;
3141 /* Attempt to optimize a potential tail recursion call into a goto.
3142 ARGUMENTS are the arguments to a CALL_EXPR; LAST_INSN indicates
3143 where to place the jump to the tail recursion label.
3145 Return TRUE if the call was optimized into a goto. */
3148 optimize_tail_recursion (arguments, last_insn)
3149 tree arguments;
3150 rtx last_insn;
3152 /* Finish checking validity, and if valid emit code to set the
3153 argument variables for the new call. */
3154 if (tail_recursion_args (arguments, DECL_ARGUMENTS (current_function_decl)))
3156 if (tail_recursion_label == 0)
3158 tail_recursion_label = gen_label_rtx ();
3159 emit_label_after (tail_recursion_label,
3160 tail_recursion_reentry);
3162 emit_queue ();
3163 expand_goto_internal (NULL_TREE, tail_recursion_label, last_insn);
3164 emit_barrier ();
3165 return 1;
3167 return 0;
3170 /* Emit code to alter this function's formal parms for a tail-recursive call.
3171 ACTUALS is a list of actual parameter expressions (chain of TREE_LISTs).
3172 FORMALS is the chain of decls of formals.
3173 Return 1 if this can be done;
3174 otherwise return 0 and do not emit any code. */
3176 static int
3177 tail_recursion_args (actuals, formals)
3178 tree actuals, formals;
3180 tree a = actuals, f = formals;
3181 int i;
3182 rtx *argvec;
3184 /* Check that number and types of actuals are compatible
3185 with the formals. This is not always true in valid C code.
3186 Also check that no formal needs to be addressable
3187 and that all formals are scalars. */
3189 /* Also count the args. */
3191 for (a = actuals, f = formals, i = 0; a && f; a = TREE_CHAIN (a), f = TREE_CHAIN (f), i++)
3193 if (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_VALUE (a)))
3194 != TYPE_MAIN_VARIANT (TREE_TYPE (f)))
3195 return 0;
3196 if (GET_CODE (DECL_RTL (f)) != REG || DECL_MODE (f) == BLKmode)
3197 return 0;
3199 if (a != 0 || f != 0)
3200 return 0;
3202 /* Compute all the actuals. */
3204 argvec = (rtx *) alloca (i * sizeof (rtx));
3206 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
3207 argvec[i] = expand_expr (TREE_VALUE (a), NULL_RTX, VOIDmode, 0);
3209 /* Find which actual values refer to current values of previous formals.
3210 Copy each of them now, before any formal is changed. */
3212 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
3214 int copy = 0;
3215 int j;
3216 for (f = formals, j = 0; j < i; f = TREE_CHAIN (f), j++)
3217 if (reg_mentioned_p (DECL_RTL (f), argvec[i]))
3219 copy = 1;
3220 break;
3222 if (copy)
3223 argvec[i] = copy_to_reg (argvec[i]);
3226 /* Store the values of the actuals into the formals. */
3228 for (f = formals, a = actuals, i = 0; f;
3229 f = TREE_CHAIN (f), a = TREE_CHAIN (a), i++)
3231 if (GET_MODE (DECL_RTL (f)) == GET_MODE (argvec[i]))
3232 emit_move_insn (DECL_RTL (f), argvec[i]);
3233 else
3234 convert_move (DECL_RTL (f), argvec[i],
3235 TREE_UNSIGNED (TREE_TYPE (TREE_VALUE (a))));
3238 free_temp_slots ();
3239 return 1;
3242 /* Generate the RTL code for entering a binding contour.
3243 The variables are declared one by one, by calls to `expand_decl'.
3245 FLAGS is a bitwise or of the following flags:
3247 1 - Nonzero if this construct should be visible to
3248 `exit_something'.
3250 2 - Nonzero if this contour does not require a
3251 NOTE_INSN_BLOCK_BEG note. Virtually all calls from
3252 language-independent code should set this flag because they
3253 will not create corresponding BLOCK nodes. (There should be
3254 a one-to-one correspondence between NOTE_INSN_BLOCK_BEG notes
3255 and BLOCKs.) If this flag is set, MARK_ENDS should be zero
3256 when expand_end_bindings is called.
3258 If we are creating a NOTE_INSN_BLOCK_BEG note, a BLOCK may
3259 optionally be supplied. If so, it becomes the NOTE_BLOCK for the
3260 note. */
3262 void
3263 expand_start_bindings_and_block (flags, block)
3264 int flags;
3265 tree block;
3267 struct nesting *thisblock = ALLOC_NESTING ();
3268 rtx note;
3269 int exit_flag = ((flags & 1) != 0);
3270 int block_flag = ((flags & 2) == 0);
3272 /* If a BLOCK is supplied, then the caller should be requesting a
3273 NOTE_INSN_BLOCK_BEG note. */
3274 if (!block_flag && block)
3275 abort ();
3277 /* Create a note to mark the beginning of the block. */
3278 if (block_flag)
3280 note = emit_note (NULL, NOTE_INSN_BLOCK_BEG);
3281 NOTE_BLOCK (note) = block;
3283 else
3284 note = emit_note (NULL, NOTE_INSN_DELETED);
3286 /* Make an entry on block_stack for the block we are entering. */
3288 thisblock->desc = BLOCK_NESTING;
3289 thisblock->next = block_stack;
3290 thisblock->all = nesting_stack;
3291 thisblock->depth = ++nesting_depth;
3292 thisblock->data.block.stack_level = 0;
3293 thisblock->data.block.cleanups = 0;
3294 thisblock->data.block.n_function_calls = 0;
3295 thisblock->data.block.exception_region = 0;
3296 thisblock->data.block.block_target_temp_slot_level = target_temp_slot_level;
3298 thisblock->data.block.conditional_code = 0;
3299 thisblock->data.block.last_unconditional_cleanup = note;
3300 /* When we insert instructions after the last unconditional cleanup,
3301 we don't adjust last_insn. That means that a later add_insn will
3302 clobber the instructions we've just added. The easiest way to
3303 fix this is to just insert another instruction here, so that the
3304 instructions inserted after the last unconditional cleanup are
3305 never the last instruction. */
3306 emit_note (NULL, NOTE_INSN_DELETED);
3308 if (block_stack
3309 && !(block_stack->data.block.cleanups == NULL_TREE
3310 && block_stack->data.block.outer_cleanups == NULL_TREE))
3311 thisblock->data.block.outer_cleanups
3312 = tree_cons (NULL_TREE, block_stack->data.block.cleanups,
3313 block_stack->data.block.outer_cleanups);
3314 else
3315 thisblock->data.block.outer_cleanups = 0;
3316 thisblock->data.block.label_chain = 0;
3317 thisblock->data.block.innermost_stack_block = stack_block_stack;
3318 thisblock->data.block.first_insn = note;
3319 thisblock->data.block.block_start_count = ++current_block_start_count;
3320 thisblock->exit_label = exit_flag ? gen_label_rtx () : 0;
3321 block_stack = thisblock;
3322 nesting_stack = thisblock;
3324 /* Make a new level for allocating stack slots. */
3325 push_temp_slots ();
3328 /* Specify the scope of temporaries created by TARGET_EXPRs. Similar
3329 to CLEANUP_POINT_EXPR, but handles cases when a series of calls to
3330 expand_expr are made. After we end the region, we know that all
3331 space for all temporaries that were created by TARGET_EXPRs will be
3332 destroyed and their space freed for reuse. */
3334 void
3335 expand_start_target_temps ()
3337 /* This is so that even if the result is preserved, the space
3338 allocated will be freed, as we know that it is no longer in use. */
3339 push_temp_slots ();
3341 /* Start a new binding layer that will keep track of all cleanup
3342 actions to be performed. */
3343 expand_start_bindings (2);
3345 target_temp_slot_level = temp_slot_level;
3348 void
3349 expand_end_target_temps ()
3351 expand_end_bindings (NULL_TREE, 0, 0);
3353 /* This is so that even if the result is preserved, the space
3354 allocated will be freed, as we know that it is no longer in use. */
3355 pop_temp_slots ();
3358 /* Given a pointer to a BLOCK node return non-zero if (and only if) the node
3359 in question represents the outermost pair of curly braces (i.e. the "body
3360 block") of a function or method.
3362 For any BLOCK node representing a "body block" of a function or method, the
3363 BLOCK_SUPERCONTEXT of the node will point to another BLOCK node which
3364 represents the outermost (function) scope for the function or method (i.e.
3365 the one which includes the formal parameters). The BLOCK_SUPERCONTEXT of
3366 *that* node in turn will point to the relevant FUNCTION_DECL node. */
3369 is_body_block (stmt)
3370 tree stmt;
3372 if (TREE_CODE (stmt) == BLOCK)
3374 tree parent = BLOCK_SUPERCONTEXT (stmt);
3376 if (parent && TREE_CODE (parent) == BLOCK)
3378 tree grandparent = BLOCK_SUPERCONTEXT (parent);
3380 if (grandparent && TREE_CODE (grandparent) == FUNCTION_DECL)
3381 return 1;
3385 return 0;
3388 /* True if we are currently emitting insns in an area of output code
3389 that is controlled by a conditional expression. This is used by
3390 the cleanup handling code to generate conditional cleanup actions. */
3393 conditional_context ()
3395 return block_stack && block_stack->data.block.conditional_code;
3398 /* Return an opaque pointer to the current nesting level, so frontend code
3399 can check its own sanity. */
3401 struct nesting *
3402 current_nesting_level ()
3404 return cfun ? block_stack : 0;
3407 /* Emit a handler label for a nonlocal goto handler.
3408 Also emit code to store the handler label in SLOT before BEFORE_INSN. */
3410 static rtx
3411 expand_nl_handler_label (slot, before_insn)
3412 rtx slot, before_insn;
3414 rtx insns;
3415 rtx handler_label = gen_label_rtx ();
3417 /* Don't let cleanup_cfg delete the handler. */
3418 LABEL_PRESERVE_P (handler_label) = 1;
3420 start_sequence ();
3421 emit_move_insn (slot, gen_rtx_LABEL_REF (Pmode, handler_label));
3422 insns = get_insns ();
3423 end_sequence ();
3424 emit_insn_before (insns, before_insn);
3426 emit_label (handler_label);
3428 return handler_label;
3431 /* Emit code to restore vital registers at the beginning of a nonlocal goto
3432 handler. */
3433 static void
3434 expand_nl_goto_receiver ()
3436 #ifdef HAVE_nonlocal_goto
3437 if (! HAVE_nonlocal_goto)
3438 #endif
3439 /* First adjust our frame pointer to its actual value. It was
3440 previously set to the start of the virtual area corresponding to
3441 the stacked variables when we branched here and now needs to be
3442 adjusted to the actual hardware fp value.
3444 Assignments are to virtual registers are converted by
3445 instantiate_virtual_regs into the corresponding assignment
3446 to the underlying register (fp in this case) that makes
3447 the original assignment true.
3448 So the following insn will actually be
3449 decrementing fp by STARTING_FRAME_OFFSET. */
3450 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
3452 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3453 if (fixed_regs[ARG_POINTER_REGNUM])
3455 #ifdef ELIMINABLE_REGS
3456 /* If the argument pointer can be eliminated in favor of the
3457 frame pointer, we don't need to restore it. We assume here
3458 that if such an elimination is present, it can always be used.
3459 This is the case on all known machines; if we don't make this
3460 assumption, we do unnecessary saving on many machines. */
3461 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
3462 size_t i;
3464 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
3465 if (elim_regs[i].from == ARG_POINTER_REGNUM
3466 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
3467 break;
3469 if (i == ARRAY_SIZE (elim_regs))
3470 #endif
3472 /* Now restore our arg pointer from the address at which it
3473 was saved in our stack frame. */
3474 emit_move_insn (virtual_incoming_args_rtx,
3475 copy_to_reg (get_arg_pointer_save_area (cfun)));
3478 #endif
3480 #ifdef HAVE_nonlocal_goto_receiver
3481 if (HAVE_nonlocal_goto_receiver)
3482 emit_insn (gen_nonlocal_goto_receiver ());
3483 #endif
3486 /* Make handlers for nonlocal gotos taking place in the function calls in
3487 block THISBLOCK. */
3489 static void
3490 expand_nl_goto_receivers (thisblock)
3491 struct nesting *thisblock;
3493 tree link;
3494 rtx afterward = gen_label_rtx ();
3495 rtx insns, slot;
3496 rtx label_list;
3497 int any_invalid;
3499 /* Record the handler address in the stack slot for that purpose,
3500 during this block, saving and restoring the outer value. */
3501 if (thisblock->next != 0)
3502 for (slot = nonlocal_goto_handler_slots; slot; slot = XEXP (slot, 1))
3504 rtx save_receiver = gen_reg_rtx (Pmode);
3505 emit_move_insn (XEXP (slot, 0), save_receiver);
3507 start_sequence ();
3508 emit_move_insn (save_receiver, XEXP (slot, 0));
3509 insns = get_insns ();
3510 end_sequence ();
3511 emit_insn_before (insns, thisblock->data.block.first_insn);
3514 /* Jump around the handlers; they run only when specially invoked. */
3515 emit_jump (afterward);
3517 /* Make a separate handler for each label. */
3518 link = nonlocal_labels;
3519 slot = nonlocal_goto_handler_slots;
3520 label_list = NULL_RTX;
3521 for (; link; link = TREE_CHAIN (link), slot = XEXP (slot, 1))
3522 /* Skip any labels we shouldn't be able to jump to from here,
3523 we generate one special handler for all of them below which just calls
3524 abort. */
3525 if (! DECL_TOO_LATE (TREE_VALUE (link)))
3527 rtx lab;
3528 lab = expand_nl_handler_label (XEXP (slot, 0),
3529 thisblock->data.block.first_insn);
3530 label_list = gen_rtx_EXPR_LIST (VOIDmode, lab, label_list);
3532 expand_nl_goto_receiver ();
3534 /* Jump to the "real" nonlocal label. */
3535 expand_goto (TREE_VALUE (link));
3538 /* A second pass over all nonlocal labels; this time we handle those
3539 we should not be able to jump to at this point. */
3540 link = nonlocal_labels;
3541 slot = nonlocal_goto_handler_slots;
3542 any_invalid = 0;
3543 for (; link; link = TREE_CHAIN (link), slot = XEXP (slot, 1))
3544 if (DECL_TOO_LATE (TREE_VALUE (link)))
3546 rtx lab;
3547 lab = expand_nl_handler_label (XEXP (slot, 0),
3548 thisblock->data.block.first_insn);
3549 label_list = gen_rtx_EXPR_LIST (VOIDmode, lab, label_list);
3550 any_invalid = 1;
3553 if (any_invalid)
3555 expand_nl_goto_receiver ();
3556 expand_builtin_trap ();
3559 nonlocal_goto_handler_labels = label_list;
3560 emit_label (afterward);
3563 /* Warn about any unused VARS (which may contain nodes other than
3564 VAR_DECLs, but such nodes are ignored). The nodes are connected
3565 via the TREE_CHAIN field. */
3567 void
3568 warn_about_unused_variables (vars)
3569 tree vars;
3571 tree decl;
3573 if (warn_unused_variable)
3574 for (decl = vars; decl; decl = TREE_CHAIN (decl))
3575 if (TREE_CODE (decl) == VAR_DECL
3576 && ! TREE_USED (decl)
3577 && ! DECL_IN_SYSTEM_HEADER (decl)
3578 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
3579 warning_with_decl (decl, "unused variable `%s'");
3582 /* Generate RTL code to terminate a binding contour.
3584 VARS is the chain of VAR_DECL nodes for the variables bound in this
3585 contour. There may actually be other nodes in this chain, but any
3586 nodes other than VAR_DECLS are ignored.
3588 MARK_ENDS is nonzero if we should put a note at the beginning
3589 and end of this binding contour.
3591 DONT_JUMP_IN is nonzero if it is not valid to jump into this contour.
3592 (That is true automatically if the contour has a saved stack level.) */
3594 void
3595 expand_end_bindings (vars, mark_ends, dont_jump_in)
3596 tree vars;
3597 int mark_ends;
3598 int dont_jump_in;
3600 struct nesting *thisblock = block_stack;
3602 /* If any of the variables in this scope were not used, warn the
3603 user. */
3604 warn_about_unused_variables (vars);
3606 if (thisblock->exit_label)
3608 do_pending_stack_adjust ();
3609 emit_label (thisblock->exit_label);
3612 /* If necessary, make handlers for nonlocal gotos taking
3613 place in the function calls in this block. */
3614 if (function_call_count != thisblock->data.block.n_function_calls
3615 && nonlocal_labels
3616 /* Make handler for outermost block
3617 if there were any nonlocal gotos to this function. */
3618 && (thisblock->next == 0 ? current_function_has_nonlocal_label
3619 /* Make handler for inner block if it has something
3620 special to do when you jump out of it. */
3621 : (thisblock->data.block.cleanups != 0
3622 || thisblock->data.block.stack_level != 0)))
3623 expand_nl_goto_receivers (thisblock);
3625 /* Don't allow jumping into a block that has a stack level.
3626 Cleanups are allowed, though. */
3627 if (dont_jump_in
3628 || thisblock->data.block.stack_level != 0)
3630 struct label_chain *chain;
3632 /* Any labels in this block are no longer valid to go to.
3633 Mark them to cause an error message. */
3634 for (chain = thisblock->data.block.label_chain; chain; chain = chain->next)
3636 DECL_TOO_LATE (chain->label) = 1;
3637 /* If any goto without a fixup came to this label,
3638 that must be an error, because gotos without fixups
3639 come from outside all saved stack-levels. */
3640 if (TREE_ADDRESSABLE (chain->label))
3641 error_with_decl (chain->label,
3642 "label `%s' used before containing binding contour");
3646 /* Restore stack level in effect before the block
3647 (only if variable-size objects allocated). */
3648 /* Perform any cleanups associated with the block. */
3650 if (thisblock->data.block.stack_level != 0
3651 || thisblock->data.block.cleanups != 0)
3653 int reachable;
3654 rtx insn;
3656 /* Don't let cleanups affect ({...}) constructs. */
3657 int old_expr_stmts_for_value = expr_stmts_for_value;
3658 rtx old_last_expr_value = last_expr_value;
3659 tree old_last_expr_type = last_expr_type;
3660 expr_stmts_for_value = 0;
3662 /* Only clean up here if this point can actually be reached. */
3663 insn = get_last_insn ();
3664 if (GET_CODE (insn) == NOTE)
3665 insn = prev_nonnote_insn (insn);
3666 reachable = (! insn || GET_CODE (insn) != BARRIER);
3668 /* Do the cleanups. */
3669 expand_cleanups (thisblock->data.block.cleanups, NULL_TREE, 0, reachable);
3670 if (reachable)
3671 do_pending_stack_adjust ();
3673 expr_stmts_for_value = old_expr_stmts_for_value;
3674 last_expr_value = old_last_expr_value;
3675 last_expr_type = old_last_expr_type;
3677 /* Restore the stack level. */
3679 if (reachable && thisblock->data.block.stack_level != 0)
3681 emit_stack_restore (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3682 thisblock->data.block.stack_level, NULL_RTX);
3683 if (nonlocal_goto_handler_slots != 0)
3684 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level,
3685 NULL_RTX);
3688 /* Any gotos out of this block must also do these things.
3689 Also report any gotos with fixups that came to labels in this
3690 level. */
3691 fixup_gotos (thisblock,
3692 thisblock->data.block.stack_level,
3693 thisblock->data.block.cleanups,
3694 thisblock->data.block.first_insn,
3695 dont_jump_in);
3698 /* Mark the beginning and end of the scope if requested.
3699 We do this now, after running cleanups on the variables
3700 just going out of scope, so they are in scope for their cleanups. */
3702 if (mark_ends)
3704 rtx note = emit_note (NULL, NOTE_INSN_BLOCK_END);
3705 NOTE_BLOCK (note) = NOTE_BLOCK (thisblock->data.block.first_insn);
3707 else
3708 /* Get rid of the beginning-mark if we don't make an end-mark. */
3709 NOTE_LINE_NUMBER (thisblock->data.block.first_insn) = NOTE_INSN_DELETED;
3711 /* Restore the temporary level of TARGET_EXPRs. */
3712 target_temp_slot_level = thisblock->data.block.block_target_temp_slot_level;
3714 /* Restore block_stack level for containing block. */
3716 stack_block_stack = thisblock->data.block.innermost_stack_block;
3717 POPSTACK (block_stack);
3719 /* Pop the stack slot nesting and free any slots at this level. */
3720 pop_temp_slots ();
3723 /* Generate code to save the stack pointer at the start of the current block
3724 and set up to restore it on exit. */
3726 void
3727 save_stack_pointer ()
3729 struct nesting *thisblock = block_stack;
3731 if (thisblock->data.block.stack_level == 0)
3733 emit_stack_save (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3734 &thisblock->data.block.stack_level,
3735 thisblock->data.block.first_insn);
3736 stack_block_stack = thisblock;
3740 /* Generate RTL for the automatic variable declaration DECL.
3741 (Other kinds of declarations are simply ignored if seen here.) */
3743 void
3744 expand_decl (decl)
3745 tree decl;
3747 struct nesting *thisblock;
3748 tree type;
3750 type = TREE_TYPE (decl);
3752 /* For a CONST_DECL, set mode, alignment, and sizes from those of the
3753 type in case this node is used in a reference. */
3754 if (TREE_CODE (decl) == CONST_DECL)
3756 DECL_MODE (decl) = TYPE_MODE (type);
3757 DECL_ALIGN (decl) = TYPE_ALIGN (type);
3758 DECL_SIZE (decl) = TYPE_SIZE (type);
3759 DECL_SIZE_UNIT (decl) = TYPE_SIZE_UNIT (type);
3760 return;
3763 /* Otherwise, only automatic variables need any expansion done. Static and
3764 external variables, and external functions, will be handled by
3765 `assemble_variable' (called from finish_decl). TYPE_DECL requires
3766 nothing. PARM_DECLs are handled in `assign_parms'. */
3767 if (TREE_CODE (decl) != VAR_DECL)
3768 return;
3770 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
3771 return;
3773 thisblock = block_stack;
3775 /* Create the RTL representation for the variable. */
3777 if (type == error_mark_node)
3778 SET_DECL_RTL (decl, gen_rtx_MEM (BLKmode, const0_rtx));
3780 else if (DECL_SIZE (decl) == 0)
3781 /* Variable with incomplete type. */
3783 rtx x;
3784 if (DECL_INITIAL (decl) == 0)
3785 /* Error message was already done; now avoid a crash. */
3786 x = gen_rtx_MEM (BLKmode, const0_rtx);
3787 else
3788 /* An initializer is going to decide the size of this array.
3789 Until we know the size, represent its address with a reg. */
3790 x = gen_rtx_MEM (BLKmode, gen_reg_rtx (Pmode));
3792 set_mem_attributes (x, decl, 1);
3793 SET_DECL_RTL (decl, x);
3795 else if (DECL_MODE (decl) != BLKmode
3796 /* If -ffloat-store, don't put explicit float vars
3797 into regs. */
3798 && !(flag_float_store
3799 && TREE_CODE (type) == REAL_TYPE)
3800 && ! TREE_THIS_VOLATILE (decl)
3801 && (DECL_REGISTER (decl) || optimize))
3803 /* Automatic variable that can go in a register. */
3804 int unsignedp = TREE_UNSIGNED (type);
3805 enum machine_mode reg_mode
3806 = promote_mode (type, DECL_MODE (decl), &unsignedp, 0);
3808 SET_DECL_RTL (decl, gen_reg_rtx (reg_mode));
3810 if (GET_CODE (DECL_RTL (decl)) == REG)
3811 REGNO_DECL (REGNO (DECL_RTL (decl))) = decl;
3812 else if (GET_CODE (DECL_RTL (decl)) == CONCAT)
3814 REGNO_DECL (REGNO (XEXP (DECL_RTL (decl), 0))) = decl;
3815 REGNO_DECL (REGNO (XEXP (DECL_RTL (decl), 1))) = decl;
3818 mark_user_reg (DECL_RTL (decl));
3820 if (POINTER_TYPE_P (type))
3821 mark_reg_pointer (DECL_RTL (decl),
3822 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (decl))));
3824 maybe_set_unchanging (DECL_RTL (decl), decl);
3826 /* If something wants our address, try to use ADDRESSOF. */
3827 if (TREE_ADDRESSABLE (decl))
3828 put_var_into_stack (decl);
3831 else if (TREE_CODE (DECL_SIZE_UNIT (decl)) == INTEGER_CST
3832 && ! (flag_stack_check && ! STACK_CHECK_BUILTIN
3833 && 0 < compare_tree_int (DECL_SIZE_UNIT (decl),
3834 STACK_CHECK_MAX_VAR_SIZE)))
3836 /* Variable of fixed size that goes on the stack. */
3837 rtx oldaddr = 0;
3838 rtx addr;
3839 rtx x;
3841 /* If we previously made RTL for this decl, it must be an array
3842 whose size was determined by the initializer.
3843 The old address was a register; set that register now
3844 to the proper address. */
3845 if (DECL_RTL_SET_P (decl))
3847 if (GET_CODE (DECL_RTL (decl)) != MEM
3848 || GET_CODE (XEXP (DECL_RTL (decl), 0)) != REG)
3849 abort ();
3850 oldaddr = XEXP (DECL_RTL (decl), 0);
3853 /* Set alignment we actually gave this decl. */
3854 DECL_ALIGN (decl) = (DECL_MODE (decl) == BLKmode ? BIGGEST_ALIGNMENT
3855 : GET_MODE_BITSIZE (DECL_MODE (decl)));
3856 DECL_USER_ALIGN (decl) = 0;
3858 x = assign_temp (decl, 1, 1, 1);
3859 set_mem_attributes (x, decl, 1);
3860 SET_DECL_RTL (decl, x);
3862 if (oldaddr)
3864 addr = force_operand (XEXP (DECL_RTL (decl), 0), oldaddr);
3865 if (addr != oldaddr)
3866 emit_move_insn (oldaddr, addr);
3869 else
3870 /* Dynamic-size object: must push space on the stack. */
3872 rtx address, size, x;
3874 /* Record the stack pointer on entry to block, if have
3875 not already done so. */
3876 do_pending_stack_adjust ();
3877 save_stack_pointer ();
3879 /* In function-at-a-time mode, variable_size doesn't expand this,
3880 so do it now. */
3881 if (TREE_CODE (type) == ARRAY_TYPE && TYPE_DOMAIN (type))
3882 expand_expr (TYPE_MAX_VALUE (TYPE_DOMAIN (type)),
3883 const0_rtx, VOIDmode, 0);
3885 /* Compute the variable's size, in bytes. */
3886 size = expand_expr (DECL_SIZE_UNIT (decl), NULL_RTX, VOIDmode, 0);
3887 free_temp_slots ();
3889 /* Allocate space on the stack for the variable. Note that
3890 DECL_ALIGN says how the variable is to be aligned and we
3891 cannot use it to conclude anything about the alignment of
3892 the size. */
3893 address = allocate_dynamic_stack_space (size, NULL_RTX,
3894 TYPE_ALIGN (TREE_TYPE (decl)));
3896 /* Reference the variable indirect through that rtx. */
3897 x = gen_rtx_MEM (DECL_MODE (decl), address);
3898 set_mem_attributes (x, decl, 1);
3899 SET_DECL_RTL (decl, x);
3902 /* Indicate the alignment we actually gave this variable. */
3903 #ifdef STACK_BOUNDARY
3904 DECL_ALIGN (decl) = STACK_BOUNDARY;
3905 #else
3906 DECL_ALIGN (decl) = BIGGEST_ALIGNMENT;
3907 #endif
3908 DECL_USER_ALIGN (decl) = 0;
3912 /* Emit code to perform the initialization of a declaration DECL. */
3914 void
3915 expand_decl_init (decl)
3916 tree decl;
3918 int was_used = TREE_USED (decl);
3920 /* If this is a CONST_DECL, we don't have to generate any code. Likewise
3921 for static decls. */
3922 if (TREE_CODE (decl) == CONST_DECL
3923 || TREE_STATIC (decl))
3924 return;
3926 /* Compute and store the initial value now. */
3928 if (DECL_INITIAL (decl) == error_mark_node)
3930 enum tree_code code = TREE_CODE (TREE_TYPE (decl));
3932 if (code == INTEGER_TYPE || code == REAL_TYPE || code == ENUMERAL_TYPE
3933 || code == POINTER_TYPE || code == REFERENCE_TYPE)
3934 expand_assignment (decl, convert (TREE_TYPE (decl), integer_zero_node),
3935 0, 0);
3936 emit_queue ();
3938 else if (DECL_INITIAL (decl) && TREE_CODE (DECL_INITIAL (decl)) != TREE_LIST)
3940 emit_line_note (DECL_SOURCE_FILE (decl), DECL_SOURCE_LINE (decl));
3941 expand_assignment (decl, DECL_INITIAL (decl), 0, 0);
3942 emit_queue ();
3945 /* Don't let the initialization count as "using" the variable. */
3946 TREE_USED (decl) = was_used;
3948 /* Free any temporaries we made while initializing the decl. */
3949 preserve_temp_slots (NULL_RTX);
3950 free_temp_slots ();
3953 /* CLEANUP is an expression to be executed at exit from this binding contour;
3954 for example, in C++, it might call the destructor for this variable.
3956 We wrap CLEANUP in an UNSAVE_EXPR node, so that we can expand the
3957 CLEANUP multiple times, and have the correct semantics. This
3958 happens in exception handling, for gotos, returns, breaks that
3959 leave the current scope.
3961 If CLEANUP is nonzero and DECL is zero, we record a cleanup
3962 that is not associated with any particular variable. */
3965 expand_decl_cleanup (decl, cleanup)
3966 tree decl, cleanup;
3968 struct nesting *thisblock;
3970 /* Error if we are not in any block. */
3971 if (cfun == 0 || block_stack == 0)
3972 return 0;
3974 thisblock = block_stack;
3976 /* Record the cleanup if there is one. */
3978 if (cleanup != 0)
3980 tree t;
3981 rtx seq;
3982 tree *cleanups = &thisblock->data.block.cleanups;
3983 int cond_context = conditional_context ();
3985 if (cond_context)
3987 rtx flag = gen_reg_rtx (word_mode);
3988 rtx set_flag_0;
3989 tree cond;
3991 start_sequence ();
3992 emit_move_insn (flag, const0_rtx);
3993 set_flag_0 = get_insns ();
3994 end_sequence ();
3996 thisblock->data.block.last_unconditional_cleanup
3997 = emit_insn_after (set_flag_0,
3998 thisblock->data.block.last_unconditional_cleanup);
4000 emit_move_insn (flag, const1_rtx);
4002 cond = build_decl (VAR_DECL, NULL_TREE,
4003 (*lang_hooks.types.type_for_mode) (word_mode, 1));
4004 SET_DECL_RTL (cond, flag);
4006 /* Conditionalize the cleanup. */
4007 cleanup = build (COND_EXPR, void_type_node,
4008 (*lang_hooks.truthvalue_conversion) (cond),
4009 cleanup, integer_zero_node);
4010 cleanup = fold (cleanup);
4012 cleanups = &thisblock->data.block.cleanups;
4015 cleanup = unsave_expr (cleanup);
4017 t = *cleanups = tree_cons (decl, cleanup, *cleanups);
4019 if (! cond_context)
4020 /* If this block has a cleanup, it belongs in stack_block_stack. */
4021 stack_block_stack = thisblock;
4023 if (cond_context)
4025 start_sequence ();
4028 if (! using_eh_for_cleanups_p)
4029 TREE_ADDRESSABLE (t) = 1;
4030 else
4031 expand_eh_region_start ();
4033 if (cond_context)
4035 seq = get_insns ();
4036 end_sequence ();
4037 if (seq)
4038 thisblock->data.block.last_unconditional_cleanup
4039 = emit_insn_after (seq,
4040 thisblock->data.block.last_unconditional_cleanup);
4042 else
4044 thisblock->data.block.last_unconditional_cleanup
4045 = get_last_insn ();
4046 /* When we insert instructions after the last unconditional cleanup,
4047 we don't adjust last_insn. That means that a later add_insn will
4048 clobber the instructions we've just added. The easiest way to
4049 fix this is to just insert another instruction here, so that the
4050 instructions inserted after the last unconditional cleanup are
4051 never the last instruction. */
4052 emit_note (NULL, NOTE_INSN_DELETED);
4055 return 1;
4058 /* Like expand_decl_cleanup, but maybe only run the cleanup if an exception
4059 is thrown. */
4062 expand_decl_cleanup_eh (decl, cleanup, eh_only)
4063 tree decl, cleanup;
4064 int eh_only;
4066 int ret = expand_decl_cleanup (decl, cleanup);
4067 if (cleanup && ret)
4069 tree node = block_stack->data.block.cleanups;
4070 CLEANUP_EH_ONLY (node) = eh_only;
4072 return ret;
4075 /* DECL is an anonymous union. CLEANUP is a cleanup for DECL.
4076 DECL_ELTS is the list of elements that belong to DECL's type.
4077 In each, the TREE_VALUE is a VAR_DECL, and the TREE_PURPOSE a cleanup. */
4079 void
4080 expand_anon_union_decl (decl, cleanup, decl_elts)
4081 tree decl, cleanup, decl_elts;
4083 struct nesting *thisblock = cfun == 0 ? 0 : block_stack;
4084 rtx x;
4085 tree t;
4087 /* If any of the elements are addressable, so is the entire union. */
4088 for (t = decl_elts; t; t = TREE_CHAIN (t))
4089 if (TREE_ADDRESSABLE (TREE_VALUE (t)))
4091 TREE_ADDRESSABLE (decl) = 1;
4092 break;
4095 expand_decl (decl);
4096 expand_decl_cleanup (decl, cleanup);
4097 x = DECL_RTL (decl);
4099 /* Go through the elements, assigning RTL to each. */
4100 for (t = decl_elts; t; t = TREE_CHAIN (t))
4102 tree decl_elt = TREE_VALUE (t);
4103 tree cleanup_elt = TREE_PURPOSE (t);
4104 enum machine_mode mode = TYPE_MODE (TREE_TYPE (decl_elt));
4106 /* If any of the elements are addressable, so is the entire
4107 union. */
4108 if (TREE_USED (decl_elt))
4109 TREE_USED (decl) = 1;
4111 /* Propagate the union's alignment to the elements. */
4112 DECL_ALIGN (decl_elt) = DECL_ALIGN (decl);
4113 DECL_USER_ALIGN (decl_elt) = DECL_USER_ALIGN (decl);
4115 /* If the element has BLKmode and the union doesn't, the union is
4116 aligned such that the element doesn't need to have BLKmode, so
4117 change the element's mode to the appropriate one for its size. */
4118 if (mode == BLKmode && DECL_MODE (decl) != BLKmode)
4119 DECL_MODE (decl_elt) = mode
4120 = mode_for_size_tree (DECL_SIZE (decl_elt), MODE_INT, 1);
4122 /* (SUBREG (MEM ...)) at RTL generation time is invalid, so we
4123 instead create a new MEM rtx with the proper mode. */
4124 if (GET_CODE (x) == MEM)
4126 if (mode == GET_MODE (x))
4127 SET_DECL_RTL (decl_elt, x);
4128 else
4129 SET_DECL_RTL (decl_elt, adjust_address_nv (x, mode, 0));
4131 else if (GET_CODE (x) == REG)
4133 if (mode == GET_MODE (x))
4134 SET_DECL_RTL (decl_elt, x);
4135 else
4136 SET_DECL_RTL (decl_elt, gen_lowpart_SUBREG (mode, x));
4138 else
4139 abort ();
4141 /* Record the cleanup if there is one. */
4143 if (cleanup != 0)
4144 thisblock->data.block.cleanups
4145 = tree_cons (decl_elt, cleanup_elt,
4146 thisblock->data.block.cleanups);
4150 /* Expand a list of cleanups LIST.
4151 Elements may be expressions or may be nested lists.
4153 If DONT_DO is nonnull, then any list-element
4154 whose TREE_PURPOSE matches DONT_DO is omitted.
4155 This is sometimes used to avoid a cleanup associated with
4156 a value that is being returned out of the scope.
4158 If IN_FIXUP is non-zero, we are generating this cleanup for a fixup
4159 goto and handle protection regions specially in that case.
4161 If REACHABLE, we emit code, otherwise just inform the exception handling
4162 code about this finalization. */
4164 static void
4165 expand_cleanups (list, dont_do, in_fixup, reachable)
4166 tree list;
4167 tree dont_do;
4168 int in_fixup;
4169 int reachable;
4171 tree tail;
4172 for (tail = list; tail; tail = TREE_CHAIN (tail))
4173 if (dont_do == 0 || TREE_PURPOSE (tail) != dont_do)
4175 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
4176 expand_cleanups (TREE_VALUE (tail), dont_do, in_fixup, reachable);
4177 else
4179 if (! in_fixup && using_eh_for_cleanups_p)
4180 expand_eh_region_end_cleanup (TREE_VALUE (tail));
4182 if (reachable && !CLEANUP_EH_ONLY (tail))
4184 /* Cleanups may be run multiple times. For example,
4185 when exiting a binding contour, we expand the
4186 cleanups associated with that contour. When a goto
4187 within that binding contour has a target outside that
4188 contour, it will expand all cleanups from its scope to
4189 the target. Though the cleanups are expanded multiple
4190 times, the control paths are non-overlapping so the
4191 cleanups will not be executed twice. */
4193 /* We may need to protect from outer cleanups. */
4194 if (in_fixup && using_eh_for_cleanups_p)
4196 expand_eh_region_start ();
4198 expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
4200 expand_eh_region_end_fixup (TREE_VALUE (tail));
4202 else
4203 expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
4205 free_temp_slots ();
4211 /* Mark when the context we are emitting RTL for as a conditional
4212 context, so that any cleanup actions we register with
4213 expand_decl_init will be properly conditionalized when those
4214 cleanup actions are later performed. Must be called before any
4215 expression (tree) is expanded that is within a conditional context. */
4217 void
4218 start_cleanup_deferral ()
4220 /* block_stack can be NULL if we are inside the parameter list. It is
4221 OK to do nothing, because cleanups aren't possible here. */
4222 if (block_stack)
4223 ++block_stack->data.block.conditional_code;
4226 /* Mark the end of a conditional region of code. Because cleanup
4227 deferrals may be nested, we may still be in a conditional region
4228 after we end the currently deferred cleanups, only after we end all
4229 deferred cleanups, are we back in unconditional code. */
4231 void
4232 end_cleanup_deferral ()
4234 /* block_stack can be NULL if we are inside the parameter list. It is
4235 OK to do nothing, because cleanups aren't possible here. */
4236 if (block_stack)
4237 --block_stack->data.block.conditional_code;
4240 /* Move all cleanups from the current block_stack
4241 to the containing block_stack, where they are assumed to
4242 have been created. If anything can cause a temporary to
4243 be created, but not expanded for more than one level of
4244 block_stacks, then this code will have to change. */
4246 void
4247 move_cleanups_up ()
4249 struct nesting *block = block_stack;
4250 struct nesting *outer = block->next;
4252 outer->data.block.cleanups
4253 = chainon (block->data.block.cleanups,
4254 outer->data.block.cleanups);
4255 block->data.block.cleanups = 0;
4258 tree
4259 last_cleanup_this_contour ()
4261 if (block_stack == 0)
4262 return 0;
4264 return block_stack->data.block.cleanups;
4267 /* Return 1 if there are any pending cleanups at this point.
4268 If THIS_CONTOUR is nonzero, check the current contour as well.
4269 Otherwise, look only at the contours that enclose this one. */
4272 any_pending_cleanups (this_contour)
4273 int this_contour;
4275 struct nesting *block;
4277 if (cfun == NULL || cfun->stmt == NULL || block_stack == 0)
4278 return 0;
4280 if (this_contour && block_stack->data.block.cleanups != NULL)
4281 return 1;
4282 if (block_stack->data.block.cleanups == 0
4283 && block_stack->data.block.outer_cleanups == 0)
4284 return 0;
4286 for (block = block_stack->next; block; block = block->next)
4287 if (block->data.block.cleanups != 0)
4288 return 1;
4290 return 0;
4293 /* Enter a case (Pascal) or switch (C) statement.
4294 Push a block onto case_stack and nesting_stack
4295 to accumulate the case-labels that are seen
4296 and to record the labels generated for the statement.
4298 EXIT_FLAG is nonzero if `exit_something' should exit this case stmt.
4299 Otherwise, this construct is transparent for `exit_something'.
4301 EXPR is the index-expression to be dispatched on.
4302 TYPE is its nominal type. We could simply convert EXPR to this type,
4303 but instead we take short cuts. */
4305 void
4306 expand_start_case (exit_flag, expr, type, printname)
4307 int exit_flag;
4308 tree expr;
4309 tree type;
4310 const char *printname;
4312 struct nesting *thiscase = ALLOC_NESTING ();
4314 /* Make an entry on case_stack for the case we are entering. */
4316 thiscase->desc = CASE_NESTING;
4317 thiscase->next = case_stack;
4318 thiscase->all = nesting_stack;
4319 thiscase->depth = ++nesting_depth;
4320 thiscase->exit_label = exit_flag ? gen_label_rtx () : 0;
4321 thiscase->data.case_stmt.case_list = 0;
4322 thiscase->data.case_stmt.index_expr = expr;
4323 thiscase->data.case_stmt.nominal_type = type;
4324 thiscase->data.case_stmt.default_label = 0;
4325 thiscase->data.case_stmt.printname = printname;
4326 thiscase->data.case_stmt.line_number_status = force_line_numbers ();
4327 case_stack = thiscase;
4328 nesting_stack = thiscase;
4330 do_pending_stack_adjust ();
4332 /* Make sure case_stmt.start points to something that won't
4333 need any transformation before expand_end_case. */
4334 if (GET_CODE (get_last_insn ()) != NOTE)
4335 emit_note (NULL, NOTE_INSN_DELETED);
4337 thiscase->data.case_stmt.start = get_last_insn ();
4339 start_cleanup_deferral ();
4342 /* Start a "dummy case statement" within which case labels are invalid
4343 and are not connected to any larger real case statement.
4344 This can be used if you don't want to let a case statement jump
4345 into the middle of certain kinds of constructs. */
4347 void
4348 expand_start_case_dummy ()
4350 struct nesting *thiscase = ALLOC_NESTING ();
4352 /* Make an entry on case_stack for the dummy. */
4354 thiscase->desc = CASE_NESTING;
4355 thiscase->next = case_stack;
4356 thiscase->all = nesting_stack;
4357 thiscase->depth = ++nesting_depth;
4358 thiscase->exit_label = 0;
4359 thiscase->data.case_stmt.case_list = 0;
4360 thiscase->data.case_stmt.start = 0;
4361 thiscase->data.case_stmt.nominal_type = 0;
4362 thiscase->data.case_stmt.default_label = 0;
4363 case_stack = thiscase;
4364 nesting_stack = thiscase;
4365 start_cleanup_deferral ();
4368 /* End a dummy case statement. */
4370 void
4371 expand_end_case_dummy ()
4373 end_cleanup_deferral ();
4374 POPSTACK (case_stack);
4377 /* Return the data type of the index-expression
4378 of the innermost case statement, or null if none. */
4380 tree
4381 case_index_expr_type ()
4383 if (case_stack)
4384 return TREE_TYPE (case_stack->data.case_stmt.index_expr);
4385 return 0;
4388 static void
4389 check_seenlabel ()
4391 /* If this is the first label, warn if any insns have been emitted. */
4392 if (case_stack->data.case_stmt.line_number_status >= 0)
4394 rtx insn;
4396 restore_line_number_status
4397 (case_stack->data.case_stmt.line_number_status);
4398 case_stack->data.case_stmt.line_number_status = -1;
4400 for (insn = case_stack->data.case_stmt.start;
4401 insn;
4402 insn = NEXT_INSN (insn))
4404 if (GET_CODE (insn) == CODE_LABEL)
4405 break;
4406 if (GET_CODE (insn) != NOTE
4407 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn)) != USE))
4410 insn = PREV_INSN (insn);
4411 while (insn && (GET_CODE (insn) != NOTE || NOTE_LINE_NUMBER (insn) < 0));
4413 /* If insn is zero, then there must have been a syntax error. */
4414 if (insn)
4415 warning_with_file_and_line (NOTE_SOURCE_FILE (insn),
4416 NOTE_LINE_NUMBER (insn),
4417 "unreachable code at beginning of %s",
4418 case_stack->data.case_stmt.printname);
4419 break;
4425 /* Accumulate one case or default label inside a case or switch statement.
4426 VALUE is the value of the case (a null pointer, for a default label).
4427 The function CONVERTER, when applied to arguments T and V,
4428 converts the value V to the type T.
4430 If not currently inside a case or switch statement, return 1 and do
4431 nothing. The caller will print a language-specific error message.
4432 If VALUE is a duplicate or overlaps, return 2 and do nothing
4433 except store the (first) duplicate node in *DUPLICATE.
4434 If VALUE is out of range, return 3 and do nothing.
4435 If we are jumping into the scope of a cleanup or var-sized array, return 5.
4436 Return 0 on success.
4438 Extended to handle range statements. */
4441 pushcase (value, converter, label, duplicate)
4442 tree value;
4443 tree (*converter) PARAMS ((tree, tree));
4444 tree label;
4445 tree *duplicate;
4447 tree index_type;
4448 tree nominal_type;
4450 /* Fail if not inside a real case statement. */
4451 if (! (case_stack && case_stack->data.case_stmt.start))
4452 return 1;
4454 if (stack_block_stack
4455 && stack_block_stack->depth > case_stack->depth)
4456 return 5;
4458 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4459 nominal_type = case_stack->data.case_stmt.nominal_type;
4461 /* If the index is erroneous, avoid more problems: pretend to succeed. */
4462 if (index_type == error_mark_node)
4463 return 0;
4465 /* Convert VALUE to the type in which the comparisons are nominally done. */
4466 if (value != 0)
4467 value = (*converter) (nominal_type, value);
4469 check_seenlabel ();
4471 /* Fail if this value is out of range for the actual type of the index
4472 (which may be narrower than NOMINAL_TYPE). */
4473 if (value != 0
4474 && (TREE_CONSTANT_OVERFLOW (value)
4475 || ! int_fits_type_p (value, index_type)))
4476 return 3;
4478 return add_case_node (value, value, label, duplicate);
4481 /* Like pushcase but this case applies to all values between VALUE1 and
4482 VALUE2 (inclusive). If VALUE1 is NULL, the range starts at the lowest
4483 value of the index type and ends at VALUE2. If VALUE2 is NULL, the range
4484 starts at VALUE1 and ends at the highest value of the index type.
4485 If both are NULL, this case applies to all values.
4487 The return value is the same as that of pushcase but there is one
4488 additional error code: 4 means the specified range was empty. */
4491 pushcase_range (value1, value2, converter, label, duplicate)
4492 tree value1, value2;
4493 tree (*converter) PARAMS ((tree, tree));
4494 tree label;
4495 tree *duplicate;
4497 tree index_type;
4498 tree nominal_type;
4500 /* Fail if not inside a real case statement. */
4501 if (! (case_stack && case_stack->data.case_stmt.start))
4502 return 1;
4504 if (stack_block_stack
4505 && stack_block_stack->depth > case_stack->depth)
4506 return 5;
4508 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4509 nominal_type = case_stack->data.case_stmt.nominal_type;
4511 /* If the index is erroneous, avoid more problems: pretend to succeed. */
4512 if (index_type == error_mark_node)
4513 return 0;
4515 check_seenlabel ();
4517 /* Convert VALUEs to type in which the comparisons are nominally done
4518 and replace any unspecified value with the corresponding bound. */
4519 if (value1 == 0)
4520 value1 = TYPE_MIN_VALUE (index_type);
4521 if (value2 == 0)
4522 value2 = TYPE_MAX_VALUE (index_type);
4524 /* Fail if the range is empty. Do this before any conversion since
4525 we want to allow out-of-range empty ranges. */
4526 if (value2 != 0 && tree_int_cst_lt (value2, value1))
4527 return 4;
4529 /* If the max was unbounded, use the max of the nominal_type we are
4530 converting to. Do this after the < check above to suppress false
4531 positives. */
4532 if (value2 == 0)
4533 value2 = TYPE_MAX_VALUE (nominal_type);
4535 value1 = (*converter) (nominal_type, value1);
4536 value2 = (*converter) (nominal_type, value2);
4538 /* Fail if these values are out of range. */
4539 if (TREE_CONSTANT_OVERFLOW (value1)
4540 || ! int_fits_type_p (value1, index_type))
4541 return 3;
4543 if (TREE_CONSTANT_OVERFLOW (value2)
4544 || ! int_fits_type_p (value2, index_type))
4545 return 3;
4547 return add_case_node (value1, value2, label, duplicate);
4550 /* Do the actual insertion of a case label for pushcase and pushcase_range
4551 into case_stack->data.case_stmt.case_list. Use an AVL tree to avoid
4552 slowdown for large switch statements. */
4555 add_case_node (low, high, label, duplicate)
4556 tree low, high;
4557 tree label;
4558 tree *duplicate;
4560 struct case_node *p, **q, *r;
4562 /* If there's no HIGH value, then this is not a case range; it's
4563 just a simple case label. But that's just a degenerate case
4564 range. */
4565 if (!high)
4566 high = low;
4568 /* Handle default labels specially. */
4569 if (!high && !low)
4571 if (case_stack->data.case_stmt.default_label != 0)
4573 *duplicate = case_stack->data.case_stmt.default_label;
4574 return 2;
4576 case_stack->data.case_stmt.default_label = label;
4577 expand_label (label);
4578 return 0;
4581 q = &case_stack->data.case_stmt.case_list;
4582 p = *q;
4584 while ((r = *q))
4586 p = r;
4588 /* Keep going past elements distinctly greater than HIGH. */
4589 if (tree_int_cst_lt (high, p->low))
4590 q = &p->left;
4592 /* or distinctly less than LOW. */
4593 else if (tree_int_cst_lt (p->high, low))
4594 q = &p->right;
4596 else
4598 /* We have an overlap; this is an error. */
4599 *duplicate = p->code_label;
4600 return 2;
4604 /* Add this label to the chain, and succeed. */
4606 r = (struct case_node *) ggc_alloc (sizeof (struct case_node));
4607 r->low = low;
4609 /* If the bounds are equal, turn this into the one-value case. */
4610 if (tree_int_cst_equal (low, high))
4611 r->high = r->low;
4612 else
4613 r->high = high;
4615 r->code_label = label;
4616 expand_label (label);
4618 *q = r;
4619 r->parent = p;
4620 r->left = 0;
4621 r->right = 0;
4622 r->balance = 0;
4624 while (p)
4626 struct case_node *s;
4628 if (r == p->left)
4630 int b;
4632 if (! (b = p->balance))
4633 /* Growth propagation from left side. */
4634 p->balance = -1;
4635 else if (b < 0)
4637 if (r->balance < 0)
4639 /* R-Rotation */
4640 if ((p->left = s = r->right))
4641 s->parent = p;
4643 r->right = p;
4644 p->balance = 0;
4645 r->balance = 0;
4646 s = p->parent;
4647 p->parent = r;
4649 if ((r->parent = s))
4651 if (s->left == p)
4652 s->left = r;
4653 else
4654 s->right = r;
4656 else
4657 case_stack->data.case_stmt.case_list = r;
4659 else
4660 /* r->balance == +1 */
4662 /* LR-Rotation */
4664 int b2;
4665 struct case_node *t = r->right;
4667 if ((p->left = s = t->right))
4668 s->parent = p;
4670 t->right = p;
4671 if ((r->right = s = t->left))
4672 s->parent = r;
4674 t->left = r;
4675 b = t->balance;
4676 b2 = b < 0;
4677 p->balance = b2;
4678 b2 = -b2 - b;
4679 r->balance = b2;
4680 t->balance = 0;
4681 s = p->parent;
4682 p->parent = t;
4683 r->parent = t;
4685 if ((t->parent = s))
4687 if (s->left == p)
4688 s->left = t;
4689 else
4690 s->right = t;
4692 else
4693 case_stack->data.case_stmt.case_list = t;
4695 break;
4698 else
4700 /* p->balance == +1; growth of left side balances the node. */
4701 p->balance = 0;
4702 break;
4705 else
4706 /* r == p->right */
4708 int b;
4710 if (! (b = p->balance))
4711 /* Growth propagation from right side. */
4712 p->balance++;
4713 else if (b > 0)
4715 if (r->balance > 0)
4717 /* L-Rotation */
4719 if ((p->right = s = r->left))
4720 s->parent = p;
4722 r->left = p;
4723 p->balance = 0;
4724 r->balance = 0;
4725 s = p->parent;
4726 p->parent = r;
4727 if ((r->parent = s))
4729 if (s->left == p)
4730 s->left = r;
4731 else
4732 s->right = r;
4735 else
4736 case_stack->data.case_stmt.case_list = r;
4739 else
4740 /* r->balance == -1 */
4742 /* RL-Rotation */
4743 int b2;
4744 struct case_node *t = r->left;
4746 if ((p->right = s = t->left))
4747 s->parent = p;
4749 t->left = p;
4751 if ((r->left = s = t->right))
4752 s->parent = r;
4754 t->right = r;
4755 b = t->balance;
4756 b2 = b < 0;
4757 r->balance = b2;
4758 b2 = -b2 - b;
4759 p->balance = b2;
4760 t->balance = 0;
4761 s = p->parent;
4762 p->parent = t;
4763 r->parent = t;
4765 if ((t->parent = s))
4767 if (s->left == p)
4768 s->left = t;
4769 else
4770 s->right = t;
4773 else
4774 case_stack->data.case_stmt.case_list = t;
4776 break;
4778 else
4780 /* p->balance == -1; growth of right side balances the node. */
4781 p->balance = 0;
4782 break;
4786 r = p;
4787 p = p->parent;
4790 return 0;
4793 /* Returns the number of possible values of TYPE.
4794 Returns -1 if the number is unknown, variable, or if the number does not
4795 fit in a HOST_WIDE_INT.
4796 Sets *SPARSENESS to 2 if TYPE is an ENUMERAL_TYPE whose values
4797 do not increase monotonically (there may be duplicates);
4798 to 1 if the values increase monotonically, but not always by 1;
4799 otherwise sets it to 0. */
4801 HOST_WIDE_INT
4802 all_cases_count (type, sparseness)
4803 tree type;
4804 int *sparseness;
4806 tree t;
4807 HOST_WIDE_INT count, minval, lastval;
4809 *sparseness = 0;
4811 switch (TREE_CODE (type))
4813 case BOOLEAN_TYPE:
4814 count = 2;
4815 break;
4817 case CHAR_TYPE:
4818 count = 1 << BITS_PER_UNIT;
4819 break;
4821 default:
4822 case INTEGER_TYPE:
4823 if (TYPE_MAX_VALUE (type) != 0
4824 && 0 != (t = fold (build (MINUS_EXPR, type, TYPE_MAX_VALUE (type),
4825 TYPE_MIN_VALUE (type))))
4826 && 0 != (t = fold (build (PLUS_EXPR, type, t,
4827 convert (type, integer_zero_node))))
4828 && host_integerp (t, 1))
4829 count = tree_low_cst (t, 1);
4830 else
4831 return -1;
4832 break;
4834 case ENUMERAL_TYPE:
4835 /* Don't waste time with enumeral types with huge values. */
4836 if (! host_integerp (TYPE_MIN_VALUE (type), 0)
4837 || TYPE_MAX_VALUE (type) == 0
4838 || ! host_integerp (TYPE_MAX_VALUE (type), 0))
4839 return -1;
4841 lastval = minval = tree_low_cst (TYPE_MIN_VALUE (type), 0);
4842 count = 0;
4844 for (t = TYPE_VALUES (type); t != NULL_TREE; t = TREE_CHAIN (t))
4846 HOST_WIDE_INT thisval = tree_low_cst (TREE_VALUE (t), 0);
4848 if (*sparseness == 2 || thisval <= lastval)
4849 *sparseness = 2;
4850 else if (thisval != minval + count)
4851 *sparseness = 1;
4853 lastval = thisval;
4854 count++;
4858 return count;
4861 #define BITARRAY_TEST(ARRAY, INDEX) \
4862 ((ARRAY)[(unsigned) (INDEX) / HOST_BITS_PER_CHAR]\
4863 & (1 << ((unsigned) (INDEX) % HOST_BITS_PER_CHAR)))
4864 #define BITARRAY_SET(ARRAY, INDEX) \
4865 ((ARRAY)[(unsigned) (INDEX) / HOST_BITS_PER_CHAR]\
4866 |= 1 << ((unsigned) (INDEX) % HOST_BITS_PER_CHAR))
4868 /* Set the elements of the bitstring CASES_SEEN (which has length COUNT),
4869 with the case values we have seen, assuming the case expression
4870 has the given TYPE.
4871 SPARSENESS is as determined by all_cases_count.
4873 The time needed is proportional to COUNT, unless
4874 SPARSENESS is 2, in which case quadratic time is needed. */
4876 void
4877 mark_seen_cases (type, cases_seen, count, sparseness)
4878 tree type;
4879 unsigned char *cases_seen;
4880 HOST_WIDE_INT count;
4881 int sparseness;
4883 tree next_node_to_try = NULL_TREE;
4884 HOST_WIDE_INT next_node_offset = 0;
4886 struct case_node *n, *root = case_stack->data.case_stmt.case_list;
4887 tree val = make_node (INTEGER_CST);
4889 TREE_TYPE (val) = type;
4890 if (! root)
4891 /* Do nothing. */
4893 else if (sparseness == 2)
4895 tree t;
4896 unsigned HOST_WIDE_INT xlo;
4898 /* This less efficient loop is only needed to handle
4899 duplicate case values (multiple enum constants
4900 with the same value). */
4901 TREE_TYPE (val) = TREE_TYPE (root->low);
4902 for (t = TYPE_VALUES (type), xlo = 0; t != NULL_TREE;
4903 t = TREE_CHAIN (t), xlo++)
4905 TREE_INT_CST_LOW (val) = TREE_INT_CST_LOW (TREE_VALUE (t));
4906 TREE_INT_CST_HIGH (val) = TREE_INT_CST_HIGH (TREE_VALUE (t));
4907 n = root;
4910 /* Keep going past elements distinctly greater than VAL. */
4911 if (tree_int_cst_lt (val, n->low))
4912 n = n->left;
4914 /* or distinctly less than VAL. */
4915 else if (tree_int_cst_lt (n->high, val))
4916 n = n->right;
4918 else
4920 /* We have found a matching range. */
4921 BITARRAY_SET (cases_seen, xlo);
4922 break;
4925 while (n);
4928 else
4930 if (root->left)
4931 case_stack->data.case_stmt.case_list = root = case_tree2list (root, 0);
4933 for (n = root; n; n = n->right)
4935 TREE_INT_CST_LOW (val) = TREE_INT_CST_LOW (n->low);
4936 TREE_INT_CST_HIGH (val) = TREE_INT_CST_HIGH (n->low);
4937 while (! tree_int_cst_lt (n->high, val))
4939 /* Calculate (into xlo) the "offset" of the integer (val).
4940 The element with lowest value has offset 0, the next smallest
4941 element has offset 1, etc. */
4943 unsigned HOST_WIDE_INT xlo;
4944 HOST_WIDE_INT xhi;
4945 tree t;
4947 if (sparseness && TYPE_VALUES (type) != NULL_TREE)
4949 /* The TYPE_VALUES will be in increasing order, so
4950 starting searching where we last ended. */
4951 t = next_node_to_try;
4952 xlo = next_node_offset;
4953 xhi = 0;
4954 for (;;)
4956 if (t == NULL_TREE)
4958 t = TYPE_VALUES (type);
4959 xlo = 0;
4961 if (tree_int_cst_equal (val, TREE_VALUE (t)))
4963 next_node_to_try = TREE_CHAIN (t);
4964 next_node_offset = xlo + 1;
4965 break;
4967 xlo++;
4968 t = TREE_CHAIN (t);
4969 if (t == next_node_to_try)
4971 xlo = -1;
4972 break;
4976 else
4978 t = TYPE_MIN_VALUE (type);
4979 if (t)
4980 neg_double (TREE_INT_CST_LOW (t), TREE_INT_CST_HIGH (t),
4981 &xlo, &xhi);
4982 else
4983 xlo = xhi = 0;
4984 add_double (xlo, xhi,
4985 TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
4986 &xlo, &xhi);
4989 if (xhi == 0 && xlo < (unsigned HOST_WIDE_INT) count)
4990 BITARRAY_SET (cases_seen, xlo);
4992 add_double (TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
4993 1, 0,
4994 &TREE_INT_CST_LOW (val), &TREE_INT_CST_HIGH (val));
5000 /* Given a switch statement with an expression that is an enumeration
5001 type, warn if any of the enumeration type's literals are not
5002 covered by the case expressions of the switch. Also, warn if there
5003 are any extra switch cases that are *not* elements of the
5004 enumerated type.
5006 Historical note:
5008 At one stage this function would: ``If all enumeration literals
5009 were covered by the case expressions, turn one of the expressions
5010 into the default expression since it should not be possible to fall
5011 through such a switch.''
5013 That code has since been removed as: ``This optimization is
5014 disabled because it causes valid programs to fail. ANSI C does not
5015 guarantee that an expression with enum type will have a value that
5016 is the same as one of the enumeration literals.'' */
5018 void
5019 check_for_full_enumeration_handling (type)
5020 tree type;
5022 struct case_node *n;
5023 tree chain;
5025 /* True iff the selector type is a numbered set mode. */
5026 int sparseness = 0;
5028 /* The number of possible selector values. */
5029 HOST_WIDE_INT size;
5031 /* For each possible selector value. a one iff it has been matched
5032 by a case value alternative. */
5033 unsigned char *cases_seen;
5035 /* The allocated size of cases_seen, in chars. */
5036 HOST_WIDE_INT bytes_needed;
5038 size = all_cases_count (type, &sparseness);
5039 bytes_needed = (size + HOST_BITS_PER_CHAR) / HOST_BITS_PER_CHAR;
5041 if (size > 0 && size < 600000
5042 /* We deliberately use calloc here, not cmalloc, so that we can suppress
5043 this optimization if we don't have enough memory rather than
5044 aborting, as xmalloc would do. */
5045 && (cases_seen =
5046 (unsigned char *) really_call_calloc (bytes_needed, 1)) != NULL)
5048 HOST_WIDE_INT i;
5049 tree v = TYPE_VALUES (type);
5051 /* The time complexity of this code is normally O(N), where
5052 N being the number of members in the enumerated type.
5053 However, if type is an ENUMERAL_TYPE whose values do not
5054 increase monotonically, O(N*log(N)) time may be needed. */
5056 mark_seen_cases (type, cases_seen, size, sparseness);
5058 for (i = 0; v != NULL_TREE && i < size; i++, v = TREE_CHAIN (v))
5059 if (BITARRAY_TEST (cases_seen, i) == 0)
5060 warning ("enumeration value `%s' not handled in switch",
5061 IDENTIFIER_POINTER (TREE_PURPOSE (v)));
5063 free (cases_seen);
5066 /* Now we go the other way around; we warn if there are case
5067 expressions that don't correspond to enumerators. This can
5068 occur since C and C++ don't enforce type-checking of
5069 assignments to enumeration variables. */
5071 if (case_stack->data.case_stmt.case_list
5072 && case_stack->data.case_stmt.case_list->left)
5073 case_stack->data.case_stmt.case_list
5074 = case_tree2list (case_stack->data.case_stmt.case_list, 0);
5075 for (n = case_stack->data.case_stmt.case_list; n; n = n->right)
5077 for (chain = TYPE_VALUES (type);
5078 chain && !tree_int_cst_equal (n->low, TREE_VALUE (chain));
5079 chain = TREE_CHAIN (chain))
5082 if (!chain)
5084 if (TYPE_NAME (type) == 0)
5085 warning ("case value `%ld' not in enumerated type",
5086 (long) TREE_INT_CST_LOW (n->low));
5087 else
5088 warning ("case value `%ld' not in enumerated type `%s'",
5089 (long) TREE_INT_CST_LOW (n->low),
5090 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
5091 == IDENTIFIER_NODE)
5092 ? TYPE_NAME (type)
5093 : DECL_NAME (TYPE_NAME (type))));
5095 if (!tree_int_cst_equal (n->low, n->high))
5097 for (chain = TYPE_VALUES (type);
5098 chain && !tree_int_cst_equal (n->high, TREE_VALUE (chain));
5099 chain = TREE_CHAIN (chain))
5102 if (!chain)
5104 if (TYPE_NAME (type) == 0)
5105 warning ("case value `%ld' not in enumerated type",
5106 (long) TREE_INT_CST_LOW (n->high));
5107 else
5108 warning ("case value `%ld' not in enumerated type `%s'",
5109 (long) TREE_INT_CST_LOW (n->high),
5110 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
5111 == IDENTIFIER_NODE)
5112 ? TYPE_NAME (type)
5113 : DECL_NAME (TYPE_NAME (type))));
5121 /* Terminate a case (Pascal) or switch (C) statement
5122 in which ORIG_INDEX is the expression to be tested.
5123 If ORIG_TYPE is not NULL, it is the original ORIG_INDEX
5124 type as given in the source before any compiler conversions.
5125 Generate the code to test it and jump to the right place. */
5127 void
5128 expand_end_case_type (orig_index, orig_type)
5129 tree orig_index, orig_type;
5131 tree minval = NULL_TREE, maxval = NULL_TREE, range = NULL_TREE;
5132 rtx default_label = 0;
5133 struct case_node *n;
5134 unsigned int count;
5135 rtx index;
5136 rtx table_label;
5137 int ncases;
5138 rtx *labelvec;
5139 int i;
5140 rtx before_case, end;
5141 struct nesting *thiscase = case_stack;
5142 tree index_expr, index_type;
5143 int unsignedp;
5145 /* Don't crash due to previous errors. */
5146 if (thiscase == NULL)
5147 return;
5149 table_label = gen_label_rtx ();
5150 index_expr = thiscase->data.case_stmt.index_expr;
5151 index_type = TREE_TYPE (index_expr);
5152 unsignedp = TREE_UNSIGNED (index_type);
5153 if (orig_type == NULL)
5154 orig_type = TREE_TYPE (orig_index);
5156 do_pending_stack_adjust ();
5158 /* This might get an spurious warning in the presence of a syntax error;
5159 it could be fixed by moving the call to check_seenlabel after the
5160 check for error_mark_node, and copying the code of check_seenlabel that
5161 deals with case_stack->data.case_stmt.line_number_status /
5162 restore_line_number_status in front of the call to end_cleanup_deferral;
5163 However, this might miss some useful warnings in the presence of
5164 non-syntax errors. */
5165 check_seenlabel ();
5167 /* An ERROR_MARK occurs for various reasons including invalid data type. */
5168 if (index_type != error_mark_node)
5170 /* If the switch expression was an enumerated type, check that
5171 exactly all enumeration literals are covered by the cases.
5172 The check is made when -Wswitch was specified and there is no
5173 default case, or when -Wswitch-enum was specified. */
5174 if (((warn_switch && !thiscase->data.case_stmt.default_label)
5175 || warn_switch_enum)
5176 && TREE_CODE (orig_type) == ENUMERAL_TYPE
5177 && TREE_CODE (index_expr) != INTEGER_CST)
5178 check_for_full_enumeration_handling (orig_type);
5180 if (warn_switch_default && !thiscase->data.case_stmt.default_label)
5181 warning ("switch missing default case");
5183 /* If we don't have a default-label, create one here,
5184 after the body of the switch. */
5185 if (thiscase->data.case_stmt.default_label == 0)
5187 thiscase->data.case_stmt.default_label
5188 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
5189 expand_label (thiscase->data.case_stmt.default_label);
5191 default_label = label_rtx (thiscase->data.case_stmt.default_label);
5193 before_case = get_last_insn ();
5195 if (thiscase->data.case_stmt.case_list
5196 && thiscase->data.case_stmt.case_list->left)
5197 thiscase->data.case_stmt.case_list
5198 = case_tree2list (thiscase->data.case_stmt.case_list, 0);
5200 /* Simplify the case-list before we count it. */
5201 group_case_nodes (thiscase->data.case_stmt.case_list);
5203 /* Get upper and lower bounds of case values.
5204 Also convert all the case values to the index expr's data type. */
5206 count = 0;
5207 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5209 /* Check low and high label values are integers. */
5210 if (TREE_CODE (n->low) != INTEGER_CST)
5211 abort ();
5212 if (TREE_CODE (n->high) != INTEGER_CST)
5213 abort ();
5215 n->low = convert (index_type, n->low);
5216 n->high = convert (index_type, n->high);
5218 /* Count the elements and track the largest and smallest
5219 of them (treating them as signed even if they are not). */
5220 if (count++ == 0)
5222 minval = n->low;
5223 maxval = n->high;
5225 else
5227 if (INT_CST_LT (n->low, minval))
5228 minval = n->low;
5229 if (INT_CST_LT (maxval, n->high))
5230 maxval = n->high;
5232 /* A range counts double, since it requires two compares. */
5233 if (! tree_int_cst_equal (n->low, n->high))
5234 count++;
5237 /* Compute span of values. */
5238 if (count != 0)
5239 range = fold (build (MINUS_EXPR, index_type, maxval, minval));
5241 end_cleanup_deferral ();
5243 if (count == 0)
5245 expand_expr (index_expr, const0_rtx, VOIDmode, 0);
5246 emit_queue ();
5247 emit_jump (default_label);
5250 /* If range of values is much bigger than number of values,
5251 make a sequence of conditional branches instead of a dispatch.
5252 If the switch-index is a constant, do it this way
5253 because we can optimize it. */
5255 else if (count < case_values_threshold ()
5256 || compare_tree_int (range, 10 * count) > 0
5257 /* RANGE may be signed, and really large ranges will show up
5258 as negative numbers. */
5259 || compare_tree_int (range, 0) < 0
5260 #ifndef ASM_OUTPUT_ADDR_DIFF_ELT
5261 || flag_pic
5262 #endif
5263 || TREE_CODE (index_expr) == INTEGER_CST
5264 || (TREE_CODE (index_expr) == COMPOUND_EXPR
5265 && TREE_CODE (TREE_OPERAND (index_expr, 1)) == INTEGER_CST))
5267 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5269 /* If the index is a short or char that we do not have
5270 an insn to handle comparisons directly, convert it to
5271 a full integer now, rather than letting each comparison
5272 generate the conversion. */
5274 if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT
5275 && ! have_insn_for (COMPARE, GET_MODE (index)))
5277 enum machine_mode wider_mode;
5278 for (wider_mode = GET_MODE (index); wider_mode != VOIDmode;
5279 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
5280 if (have_insn_for (COMPARE, wider_mode))
5282 index = convert_to_mode (wider_mode, index, unsignedp);
5283 break;
5287 emit_queue ();
5288 do_pending_stack_adjust ();
5290 index = protect_from_queue (index, 0);
5291 if (GET_CODE (index) == MEM)
5292 index = copy_to_reg (index);
5293 if (GET_CODE (index) == CONST_INT
5294 || TREE_CODE (index_expr) == INTEGER_CST)
5296 /* Make a tree node with the proper constant value
5297 if we don't already have one. */
5298 if (TREE_CODE (index_expr) != INTEGER_CST)
5300 index_expr
5301 = build_int_2 (INTVAL (index),
5302 unsignedp || INTVAL (index) >= 0 ? 0 : -1);
5303 index_expr = convert (index_type, index_expr);
5306 /* For constant index expressions we need only
5307 issue an unconditional branch to the appropriate
5308 target code. The job of removing any unreachable
5309 code is left to the optimisation phase if the
5310 "-O" option is specified. */
5311 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5312 if (! tree_int_cst_lt (index_expr, n->low)
5313 && ! tree_int_cst_lt (n->high, index_expr))
5314 break;
5316 if (n)
5317 emit_jump (label_rtx (n->code_label));
5318 else
5319 emit_jump (default_label);
5321 else
5323 /* If the index expression is not constant we generate
5324 a binary decision tree to select the appropriate
5325 target code. This is done as follows:
5327 The list of cases is rearranged into a binary tree,
5328 nearly optimal assuming equal probability for each case.
5330 The tree is transformed into RTL, eliminating
5331 redundant test conditions at the same time.
5333 If program flow could reach the end of the
5334 decision tree an unconditional jump to the
5335 default code is emitted. */
5337 use_cost_table
5338 = (TREE_CODE (orig_type) != ENUMERAL_TYPE
5339 && estimate_case_costs (thiscase->data.case_stmt.case_list));
5340 balance_case_nodes (&thiscase->data.case_stmt.case_list, NULL);
5341 emit_case_nodes (index, thiscase->data.case_stmt.case_list,
5342 default_label, index_type);
5343 emit_jump_if_reachable (default_label);
5346 else
5348 if (! try_casesi (index_type, index_expr, minval, range,
5349 table_label, default_label))
5351 index_type = thiscase->data.case_stmt.nominal_type;
5353 /* Index jumptables from zero for suitable values of
5354 minval to avoid a subtraction. */
5355 if (! optimize_size
5356 && compare_tree_int (minval, 0) > 0
5357 && compare_tree_int (minval, 3) < 0)
5359 minval = integer_zero_node;
5360 range = maxval;
5363 if (! try_tablejump (index_type, index_expr, minval, range,
5364 table_label, default_label))
5365 abort ();
5368 /* Get table of labels to jump to, in order of case index. */
5370 ncases = tree_low_cst (range, 0) + 1;
5371 labelvec = (rtx *) alloca (ncases * sizeof (rtx));
5372 memset ((char *) labelvec, 0, ncases * sizeof (rtx));
5374 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5376 /* Compute the low and high bounds relative to the minimum
5377 value since that should fit in a HOST_WIDE_INT while the
5378 actual values may not. */
5379 HOST_WIDE_INT i_low
5380 = tree_low_cst (fold (build (MINUS_EXPR, index_type,
5381 n->low, minval)), 1);
5382 HOST_WIDE_INT i_high
5383 = tree_low_cst (fold (build (MINUS_EXPR, index_type,
5384 n->high, minval)), 1);
5385 HOST_WIDE_INT i;
5387 for (i = i_low; i <= i_high; i ++)
5388 labelvec[i]
5389 = gen_rtx_LABEL_REF (Pmode, label_rtx (n->code_label));
5392 /* Fill in the gaps with the default. */
5393 for (i = 0; i < ncases; i++)
5394 if (labelvec[i] == 0)
5395 labelvec[i] = gen_rtx_LABEL_REF (Pmode, default_label);
5397 /* Output the table */
5398 emit_label (table_label);
5400 if (CASE_VECTOR_PC_RELATIVE || flag_pic)
5401 emit_jump_insn (gen_rtx_ADDR_DIFF_VEC (CASE_VECTOR_MODE,
5402 gen_rtx_LABEL_REF (Pmode, table_label),
5403 gen_rtvec_v (ncases, labelvec),
5404 const0_rtx, const0_rtx));
5405 else
5406 emit_jump_insn (gen_rtx_ADDR_VEC (CASE_VECTOR_MODE,
5407 gen_rtvec_v (ncases, labelvec)));
5409 /* If the case insn drops through the table,
5410 after the table we must jump to the default-label.
5411 Otherwise record no drop-through after the table. */
5412 #ifdef CASE_DROPS_THROUGH
5413 emit_jump (default_label);
5414 #else
5415 emit_barrier ();
5416 #endif
5419 before_case = NEXT_INSN (before_case);
5420 end = get_last_insn ();
5421 if (squeeze_notes (&before_case, &end))
5422 abort ();
5423 reorder_insns (before_case, end,
5424 thiscase->data.case_stmt.start);
5426 else
5427 end_cleanup_deferral ();
5429 if (thiscase->exit_label)
5430 emit_label (thiscase->exit_label);
5432 POPSTACK (case_stack);
5434 free_temp_slots ();
5437 /* Convert the tree NODE into a list linked by the right field, with the left
5438 field zeroed. RIGHT is used for recursion; it is a list to be placed
5439 rightmost in the resulting list. */
5441 static struct case_node *
5442 case_tree2list (node, right)
5443 struct case_node *node, *right;
5445 struct case_node *left;
5447 if (node->right)
5448 right = case_tree2list (node->right, right);
5450 node->right = right;
5451 if ((left = node->left))
5453 node->left = 0;
5454 return case_tree2list (left, node);
5457 return node;
5460 /* Generate code to jump to LABEL if OP1 and OP2 are equal. */
5462 static void
5463 do_jump_if_equal (op1, op2, label, unsignedp)
5464 rtx op1, op2, label;
5465 int unsignedp;
5467 if (GET_CODE (op1) == CONST_INT && GET_CODE (op2) == CONST_INT)
5469 if (INTVAL (op1) == INTVAL (op2))
5470 emit_jump (label);
5472 else
5473 emit_cmp_and_jump_insns (op1, op2, EQ, NULL_RTX,
5474 (GET_MODE (op1) == VOIDmode
5475 ? GET_MODE (op2) : GET_MODE (op1)),
5476 unsignedp, label);
5479 /* Not all case values are encountered equally. This function
5480 uses a heuristic to weight case labels, in cases where that
5481 looks like a reasonable thing to do.
5483 Right now, all we try to guess is text, and we establish the
5484 following weights:
5486 chars above space: 16
5487 digits: 16
5488 default: 12
5489 space, punct: 8
5490 tab: 4
5491 newline: 2
5492 other "\" chars: 1
5493 remaining chars: 0
5495 If we find any cases in the switch that are not either -1 or in the range
5496 of valid ASCII characters, or are control characters other than those
5497 commonly used with "\", don't treat this switch scanning text.
5499 Return 1 if these nodes are suitable for cost estimation, otherwise
5500 return 0. */
5502 static int
5503 estimate_case_costs (node)
5504 case_node_ptr node;
5506 tree min_ascii = integer_minus_one_node;
5507 tree max_ascii = convert (TREE_TYPE (node->high), build_int_2 (127, 0));
5508 case_node_ptr n;
5509 int i;
5511 /* If we haven't already made the cost table, make it now. Note that the
5512 lower bound of the table is -1, not zero. */
5514 if (! cost_table_initialized)
5516 cost_table_initialized = 1;
5518 for (i = 0; i < 128; i++)
5520 if (ISALNUM (i))
5521 COST_TABLE (i) = 16;
5522 else if (ISPUNCT (i))
5523 COST_TABLE (i) = 8;
5524 else if (ISCNTRL (i))
5525 COST_TABLE (i) = -1;
5528 COST_TABLE (' ') = 8;
5529 COST_TABLE ('\t') = 4;
5530 COST_TABLE ('\0') = 4;
5531 COST_TABLE ('\n') = 2;
5532 COST_TABLE ('\f') = 1;
5533 COST_TABLE ('\v') = 1;
5534 COST_TABLE ('\b') = 1;
5537 /* See if all the case expressions look like text. It is text if the
5538 constant is >= -1 and the highest constant is <= 127. Do all comparisons
5539 as signed arithmetic since we don't want to ever access cost_table with a
5540 value less than -1. Also check that none of the constants in a range
5541 are strange control characters. */
5543 for (n = node; n; n = n->right)
5545 if ((INT_CST_LT (n->low, min_ascii)) || INT_CST_LT (max_ascii, n->high))
5546 return 0;
5548 for (i = (HOST_WIDE_INT) TREE_INT_CST_LOW (n->low);
5549 i <= (HOST_WIDE_INT) TREE_INT_CST_LOW (n->high); i++)
5550 if (COST_TABLE (i) < 0)
5551 return 0;
5554 /* All interesting values are within the range of interesting
5555 ASCII characters. */
5556 return 1;
5559 /* Scan an ordered list of case nodes
5560 combining those with consecutive values or ranges.
5562 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
5564 static void
5565 group_case_nodes (head)
5566 case_node_ptr head;
5568 case_node_ptr node = head;
5570 while (node)
5572 rtx lb = next_real_insn (label_rtx (node->code_label));
5573 rtx lb2;
5574 case_node_ptr np = node;
5576 /* Try to group the successors of NODE with NODE. */
5577 while (((np = np->right) != 0)
5578 /* Do they jump to the same place? */
5579 && ((lb2 = next_real_insn (label_rtx (np->code_label))) == lb
5580 || (lb != 0 && lb2 != 0
5581 && simplejump_p (lb)
5582 && simplejump_p (lb2)
5583 && rtx_equal_p (SET_SRC (PATTERN (lb)),
5584 SET_SRC (PATTERN (lb2)))))
5585 /* Are their ranges consecutive? */
5586 && tree_int_cst_equal (np->low,
5587 fold (build (PLUS_EXPR,
5588 TREE_TYPE (node->high),
5589 node->high,
5590 integer_one_node)))
5591 /* An overflow is not consecutive. */
5592 && tree_int_cst_lt (node->high,
5593 fold (build (PLUS_EXPR,
5594 TREE_TYPE (node->high),
5595 node->high,
5596 integer_one_node))))
5598 node->high = np->high;
5600 /* NP is the first node after NODE which can't be grouped with it.
5601 Delete the nodes in between, and move on to that node. */
5602 node->right = np;
5603 node = np;
5607 /* Take an ordered list of case nodes
5608 and transform them into a near optimal binary tree,
5609 on the assumption that any target code selection value is as
5610 likely as any other.
5612 The transformation is performed by splitting the ordered
5613 list into two equal sections plus a pivot. The parts are
5614 then attached to the pivot as left and right branches. Each
5615 branch is then transformed recursively. */
5617 static void
5618 balance_case_nodes (head, parent)
5619 case_node_ptr *head;
5620 case_node_ptr parent;
5622 case_node_ptr np;
5624 np = *head;
5625 if (np)
5627 int cost = 0;
5628 int i = 0;
5629 int ranges = 0;
5630 case_node_ptr *npp;
5631 case_node_ptr left;
5633 /* Count the number of entries on branch. Also count the ranges. */
5635 while (np)
5637 if (!tree_int_cst_equal (np->low, np->high))
5639 ranges++;
5640 if (use_cost_table)
5641 cost += COST_TABLE (TREE_INT_CST_LOW (np->high));
5644 if (use_cost_table)
5645 cost += COST_TABLE (TREE_INT_CST_LOW (np->low));
5647 i++;
5648 np = np->right;
5651 if (i > 2)
5653 /* Split this list if it is long enough for that to help. */
5654 npp = head;
5655 left = *npp;
5656 if (use_cost_table)
5658 /* Find the place in the list that bisects the list's total cost,
5659 Here I gets half the total cost. */
5660 int n_moved = 0;
5661 i = (cost + 1) / 2;
5662 while (1)
5664 /* Skip nodes while their cost does not reach that amount. */
5665 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5666 i -= COST_TABLE (TREE_INT_CST_LOW ((*npp)->high));
5667 i -= COST_TABLE (TREE_INT_CST_LOW ((*npp)->low));
5668 if (i <= 0)
5669 break;
5670 npp = &(*npp)->right;
5671 n_moved += 1;
5673 if (n_moved == 0)
5675 /* Leave this branch lopsided, but optimize left-hand
5676 side and fill in `parent' fields for right-hand side. */
5677 np = *head;
5678 np->parent = parent;
5679 balance_case_nodes (&np->left, np);
5680 for (; np->right; np = np->right)
5681 np->right->parent = np;
5682 return;
5685 /* If there are just three nodes, split at the middle one. */
5686 else if (i == 3)
5687 npp = &(*npp)->right;
5688 else
5690 /* Find the place in the list that bisects the list's total cost,
5691 where ranges count as 2.
5692 Here I gets half the total cost. */
5693 i = (i + ranges + 1) / 2;
5694 while (1)
5696 /* Skip nodes while their cost does not reach that amount. */
5697 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5698 i--;
5699 i--;
5700 if (i <= 0)
5701 break;
5702 npp = &(*npp)->right;
5705 *head = np = *npp;
5706 *npp = 0;
5707 np->parent = parent;
5708 np->left = left;
5710 /* Optimize each of the two split parts. */
5711 balance_case_nodes (&np->left, np);
5712 balance_case_nodes (&np->right, np);
5714 else
5716 /* Else leave this branch as one level,
5717 but fill in `parent' fields. */
5718 np = *head;
5719 np->parent = parent;
5720 for (; np->right; np = np->right)
5721 np->right->parent = np;
5726 /* Search the parent sections of the case node tree
5727 to see if a test for the lower bound of NODE would be redundant.
5728 INDEX_TYPE is the type of the index expression.
5730 The instructions to generate the case decision tree are
5731 output in the same order as nodes are processed so it is
5732 known that if a parent node checks the range of the current
5733 node minus one that the current node is bounded at its lower
5734 span. Thus the test would be redundant. */
5736 static int
5737 node_has_low_bound (node, index_type)
5738 case_node_ptr node;
5739 tree index_type;
5741 tree low_minus_one;
5742 case_node_ptr pnode;
5744 /* If the lower bound of this node is the lowest value in the index type,
5745 we need not test it. */
5747 if (tree_int_cst_equal (node->low, TYPE_MIN_VALUE (index_type)))
5748 return 1;
5750 /* If this node has a left branch, the value at the left must be less
5751 than that at this node, so it cannot be bounded at the bottom and
5752 we need not bother testing any further. */
5754 if (node->left)
5755 return 0;
5757 low_minus_one = fold (build (MINUS_EXPR, TREE_TYPE (node->low),
5758 node->low, integer_one_node));
5760 /* If the subtraction above overflowed, we can't verify anything.
5761 Otherwise, look for a parent that tests our value - 1. */
5763 if (! tree_int_cst_lt (low_minus_one, node->low))
5764 return 0;
5766 for (pnode = node->parent; pnode; pnode = pnode->parent)
5767 if (tree_int_cst_equal (low_minus_one, pnode->high))
5768 return 1;
5770 return 0;
5773 /* Search the parent sections of the case node tree
5774 to see if a test for the upper bound of NODE would be redundant.
5775 INDEX_TYPE is the type of the index expression.
5777 The instructions to generate the case decision tree are
5778 output in the same order as nodes are processed so it is
5779 known that if a parent node checks the range of the current
5780 node plus one that the current node is bounded at its upper
5781 span. Thus the test would be redundant. */
5783 static int
5784 node_has_high_bound (node, index_type)
5785 case_node_ptr node;
5786 tree index_type;
5788 tree high_plus_one;
5789 case_node_ptr pnode;
5791 /* If there is no upper bound, obviously no test is needed. */
5793 if (TYPE_MAX_VALUE (index_type) == NULL)
5794 return 1;
5796 /* If the upper bound of this node is the highest value in the type
5797 of the index expression, we need not test against it. */
5799 if (tree_int_cst_equal (node->high, TYPE_MAX_VALUE (index_type)))
5800 return 1;
5802 /* If this node has a right branch, the value at the right must be greater
5803 than that at this node, so it cannot be bounded at the top and
5804 we need not bother testing any further. */
5806 if (node->right)
5807 return 0;
5809 high_plus_one = fold (build (PLUS_EXPR, TREE_TYPE (node->high),
5810 node->high, integer_one_node));
5812 /* If the addition above overflowed, we can't verify anything.
5813 Otherwise, look for a parent that tests our value + 1. */
5815 if (! tree_int_cst_lt (node->high, high_plus_one))
5816 return 0;
5818 for (pnode = node->parent; pnode; pnode = pnode->parent)
5819 if (tree_int_cst_equal (high_plus_one, pnode->low))
5820 return 1;
5822 return 0;
5825 /* Search the parent sections of the
5826 case node tree to see if both tests for the upper and lower
5827 bounds of NODE would be redundant. */
5829 static int
5830 node_is_bounded (node, index_type)
5831 case_node_ptr node;
5832 tree index_type;
5834 return (node_has_low_bound (node, index_type)
5835 && node_has_high_bound (node, index_type));
5838 /* Emit an unconditional jump to LABEL unless it would be dead code. */
5840 static void
5841 emit_jump_if_reachable (label)
5842 rtx label;
5844 if (GET_CODE (get_last_insn ()) != BARRIER)
5845 emit_jump (label);
5848 /* Emit step-by-step code to select a case for the value of INDEX.
5849 The thus generated decision tree follows the form of the
5850 case-node binary tree NODE, whose nodes represent test conditions.
5851 INDEX_TYPE is the type of the index of the switch.
5853 Care is taken to prune redundant tests from the decision tree
5854 by detecting any boundary conditions already checked by
5855 emitted rtx. (See node_has_high_bound, node_has_low_bound
5856 and node_is_bounded, above.)
5858 Where the test conditions can be shown to be redundant we emit
5859 an unconditional jump to the target code. As a further
5860 optimization, the subordinates of a tree node are examined to
5861 check for bounded nodes. In this case conditional and/or
5862 unconditional jumps as a result of the boundary check for the
5863 current node are arranged to target the subordinates associated
5864 code for out of bound conditions on the current node.
5866 We can assume that when control reaches the code generated here,
5867 the index value has already been compared with the parents
5868 of this node, and determined to be on the same side of each parent
5869 as this node is. Thus, if this node tests for the value 51,
5870 and a parent tested for 52, we don't need to consider
5871 the possibility of a value greater than 51. If another parent
5872 tests for the value 50, then this node need not test anything. */
5874 static void
5875 emit_case_nodes (index, node, default_label, index_type)
5876 rtx index;
5877 case_node_ptr node;
5878 rtx default_label;
5879 tree index_type;
5881 /* If INDEX has an unsigned type, we must make unsigned branches. */
5882 int unsignedp = TREE_UNSIGNED (index_type);
5883 enum machine_mode mode = GET_MODE (index);
5884 enum machine_mode imode = TYPE_MODE (index_type);
5886 /* See if our parents have already tested everything for us.
5887 If they have, emit an unconditional jump for this node. */
5888 if (node_is_bounded (node, index_type))
5889 emit_jump (label_rtx (node->code_label));
5891 else if (tree_int_cst_equal (node->low, node->high))
5893 /* Node is single valued. First see if the index expression matches
5894 this node and then check our children, if any. */
5896 do_jump_if_equal (index,
5897 convert_modes (mode, imode,
5898 expand_expr (node->low, NULL_RTX,
5899 VOIDmode, 0),
5900 unsignedp),
5901 label_rtx (node->code_label), unsignedp);
5903 if (node->right != 0 && node->left != 0)
5905 /* This node has children on both sides.
5906 Dispatch to one side or the other
5907 by comparing the index value with this node's value.
5908 If one subtree is bounded, check that one first,
5909 so we can avoid real branches in the tree. */
5911 if (node_is_bounded (node->right, index_type))
5913 emit_cmp_and_jump_insns (index,
5914 convert_modes
5915 (mode, imode,
5916 expand_expr (node->high, NULL_RTX,
5917 VOIDmode, 0),
5918 unsignedp),
5919 GT, NULL_RTX, mode, unsignedp,
5920 label_rtx (node->right->code_label));
5921 emit_case_nodes (index, node->left, default_label, index_type);
5924 else if (node_is_bounded (node->left, index_type))
5926 emit_cmp_and_jump_insns (index,
5927 convert_modes
5928 (mode, imode,
5929 expand_expr (node->high, NULL_RTX,
5930 VOIDmode, 0),
5931 unsignedp),
5932 LT, NULL_RTX, mode, unsignedp,
5933 label_rtx (node->left->code_label));
5934 emit_case_nodes (index, node->right, default_label, index_type);
5937 else
5939 /* Neither node is bounded. First distinguish the two sides;
5940 then emit the code for one side at a time. */
5942 tree test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
5944 /* See if the value is on the right. */
5945 emit_cmp_and_jump_insns (index,
5946 convert_modes
5947 (mode, imode,
5948 expand_expr (node->high, NULL_RTX,
5949 VOIDmode, 0),
5950 unsignedp),
5951 GT, NULL_RTX, mode, unsignedp,
5952 label_rtx (test_label));
5954 /* Value must be on the left.
5955 Handle the left-hand subtree. */
5956 emit_case_nodes (index, node->left, default_label, index_type);
5957 /* If left-hand subtree does nothing,
5958 go to default. */
5959 emit_jump_if_reachable (default_label);
5961 /* Code branches here for the right-hand subtree. */
5962 expand_label (test_label);
5963 emit_case_nodes (index, node->right, default_label, index_type);
5967 else if (node->right != 0 && node->left == 0)
5969 /* Here we have a right child but no left so we issue conditional
5970 branch to default and process the right child.
5972 Omit the conditional branch to default if we it avoid only one
5973 right child; it costs too much space to save so little time. */
5975 if (node->right->right || node->right->left
5976 || !tree_int_cst_equal (node->right->low, node->right->high))
5978 if (!node_has_low_bound (node, index_type))
5980 emit_cmp_and_jump_insns (index,
5981 convert_modes
5982 (mode, imode,
5983 expand_expr (node->high, NULL_RTX,
5984 VOIDmode, 0),
5985 unsignedp),
5986 LT, NULL_RTX, mode, unsignedp,
5987 default_label);
5990 emit_case_nodes (index, node->right, default_label, index_type);
5992 else
5993 /* We cannot process node->right normally
5994 since we haven't ruled out the numbers less than
5995 this node's value. So handle node->right explicitly. */
5996 do_jump_if_equal (index,
5997 convert_modes
5998 (mode, imode,
5999 expand_expr (node->right->low, NULL_RTX,
6000 VOIDmode, 0),
6001 unsignedp),
6002 label_rtx (node->right->code_label), unsignedp);
6005 else if (node->right == 0 && node->left != 0)
6007 /* Just one subtree, on the left. */
6008 if (node->left->left || node->left->right
6009 || !tree_int_cst_equal (node->left->low, node->left->high))
6011 if (!node_has_high_bound (node, index_type))
6013 emit_cmp_and_jump_insns (index,
6014 convert_modes
6015 (mode, imode,
6016 expand_expr (node->high, NULL_RTX,
6017 VOIDmode, 0),
6018 unsignedp),
6019 GT, NULL_RTX, mode, unsignedp,
6020 default_label);
6023 emit_case_nodes (index, node->left, default_label, index_type);
6025 else
6026 /* We cannot process node->left normally
6027 since we haven't ruled out the numbers less than
6028 this node's value. So handle node->left explicitly. */
6029 do_jump_if_equal (index,
6030 convert_modes
6031 (mode, imode,
6032 expand_expr (node->left->low, NULL_RTX,
6033 VOIDmode, 0),
6034 unsignedp),
6035 label_rtx (node->left->code_label), unsignedp);
6038 else
6040 /* Node is a range. These cases are very similar to those for a single
6041 value, except that we do not start by testing whether this node
6042 is the one to branch to. */
6044 if (node->right != 0 && node->left != 0)
6046 /* Node has subtrees on both sides.
6047 If the right-hand subtree is bounded,
6048 test for it first, since we can go straight there.
6049 Otherwise, we need to make a branch in the control structure,
6050 then handle the two subtrees. */
6051 tree test_label = 0;
6053 if (node_is_bounded (node->right, index_type))
6054 /* Right hand node is fully bounded so we can eliminate any
6055 testing and branch directly to the target code. */
6056 emit_cmp_and_jump_insns (index,
6057 convert_modes
6058 (mode, imode,
6059 expand_expr (node->high, NULL_RTX,
6060 VOIDmode, 0),
6061 unsignedp),
6062 GT, NULL_RTX, mode, unsignedp,
6063 label_rtx (node->right->code_label));
6064 else
6066 /* Right hand node requires testing.
6067 Branch to a label where we will handle it later. */
6069 test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
6070 emit_cmp_and_jump_insns (index,
6071 convert_modes
6072 (mode, imode,
6073 expand_expr (node->high, NULL_RTX,
6074 VOIDmode, 0),
6075 unsignedp),
6076 GT, NULL_RTX, mode, unsignedp,
6077 label_rtx (test_label));
6080 /* Value belongs to this node or to the left-hand subtree. */
6082 emit_cmp_and_jump_insns (index,
6083 convert_modes
6084 (mode, imode,
6085 expand_expr (node->low, NULL_RTX,
6086 VOIDmode, 0),
6087 unsignedp),
6088 GE, NULL_RTX, mode, unsignedp,
6089 label_rtx (node->code_label));
6091 /* Handle the left-hand subtree. */
6092 emit_case_nodes (index, node->left, default_label, index_type);
6094 /* If right node had to be handled later, do that now. */
6096 if (test_label)
6098 /* If the left-hand subtree fell through,
6099 don't let it fall into the right-hand subtree. */
6100 emit_jump_if_reachable (default_label);
6102 expand_label (test_label);
6103 emit_case_nodes (index, node->right, default_label, index_type);
6107 else if (node->right != 0 && node->left == 0)
6109 /* Deal with values to the left of this node,
6110 if they are possible. */
6111 if (!node_has_low_bound (node, index_type))
6113 emit_cmp_and_jump_insns (index,
6114 convert_modes
6115 (mode, imode,
6116 expand_expr (node->low, NULL_RTX,
6117 VOIDmode, 0),
6118 unsignedp),
6119 LT, NULL_RTX, mode, unsignedp,
6120 default_label);
6123 /* Value belongs to this node or to the right-hand subtree. */
6125 emit_cmp_and_jump_insns (index,
6126 convert_modes
6127 (mode, imode,
6128 expand_expr (node->high, NULL_RTX,
6129 VOIDmode, 0),
6130 unsignedp),
6131 LE, NULL_RTX, mode, unsignedp,
6132 label_rtx (node->code_label));
6134 emit_case_nodes (index, node->right, default_label, index_type);
6137 else if (node->right == 0 && node->left != 0)
6139 /* Deal with values to the right of this node,
6140 if they are possible. */
6141 if (!node_has_high_bound (node, index_type))
6143 emit_cmp_and_jump_insns (index,
6144 convert_modes
6145 (mode, imode,
6146 expand_expr (node->high, NULL_RTX,
6147 VOIDmode, 0),
6148 unsignedp),
6149 GT, NULL_RTX, mode, unsignedp,
6150 default_label);
6153 /* Value belongs to this node or to the left-hand subtree. */
6155 emit_cmp_and_jump_insns (index,
6156 convert_modes
6157 (mode, imode,
6158 expand_expr (node->low, NULL_RTX,
6159 VOIDmode, 0),
6160 unsignedp),
6161 GE, NULL_RTX, mode, unsignedp,
6162 label_rtx (node->code_label));
6164 emit_case_nodes (index, node->left, default_label, index_type);
6167 else
6169 /* Node has no children so we check low and high bounds to remove
6170 redundant tests. Only one of the bounds can exist,
6171 since otherwise this node is bounded--a case tested already. */
6172 int high_bound = node_has_high_bound (node, index_type);
6173 int low_bound = node_has_low_bound (node, index_type);
6175 if (!high_bound && low_bound)
6177 emit_cmp_and_jump_insns (index,
6178 convert_modes
6179 (mode, imode,
6180 expand_expr (node->high, NULL_RTX,
6181 VOIDmode, 0),
6182 unsignedp),
6183 GT, NULL_RTX, mode, unsignedp,
6184 default_label);
6187 else if (!low_bound && high_bound)
6189 emit_cmp_and_jump_insns (index,
6190 convert_modes
6191 (mode, imode,
6192 expand_expr (node->low, NULL_RTX,
6193 VOIDmode, 0),
6194 unsignedp),
6195 LT, NULL_RTX, mode, unsignedp,
6196 default_label);
6198 else if (!low_bound && !high_bound)
6200 /* Widen LOW and HIGH to the same width as INDEX. */
6201 tree type = (*lang_hooks.types.type_for_mode) (mode, unsignedp);
6202 tree low = build1 (CONVERT_EXPR, type, node->low);
6203 tree high = build1 (CONVERT_EXPR, type, node->high);
6204 rtx low_rtx, new_index, new_bound;
6206 /* Instead of doing two branches, emit one unsigned branch for
6207 (index-low) > (high-low). */
6208 low_rtx = expand_expr (low, NULL_RTX, mode, 0);
6209 new_index = expand_simple_binop (mode, MINUS, index, low_rtx,
6210 NULL_RTX, unsignedp,
6211 OPTAB_WIDEN);
6212 new_bound = expand_expr (fold (build (MINUS_EXPR, type,
6213 high, low)),
6214 NULL_RTX, mode, 0);
6216 emit_cmp_and_jump_insns (new_index, new_bound, GT, NULL_RTX,
6217 mode, 1, default_label);
6220 emit_jump (label_rtx (node->code_label));
6225 #include "gt-stmt.h"